summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--.coveragerc2
-rw-r--r--.flake814
-rw-r--r--.gitignore15
-rw-r--r--.mailmap76
-rw-r--r--.travis.yml91
-rw-r--r--README.md22
-rwxr-xr-xbin/sbang19
-rwxr-xr-xbin/spack110
-rw-r--r--etc/spack/defaults/config.yaml68
-rw-r--r--etc/spack/defaults/darwin/packages.yaml18
-rw-r--r--etc/spack/defaults/modules.yaml42
-rw-r--r--etc/spack/defaults/packages.yaml23
-rw-r--r--etc/spack/defaults/repos.yaml14
-rw-r--r--etc/spack/modules.yaml29
-rw-r--r--etc/spack/repos.yaml8
-rw-r--r--lib/spack/docs/.gitignore1
-rw-r--r--lib/spack/docs/Makefile42
-rw-r--r--lib/spack/docs/_gh_pages_redirect/.nojekyll0
-rw-r--r--lib/spack/docs/_gh_pages_redirect/index.html10
-rw-r--r--lib/spack/docs/basic_usage.rst1204
-rw-r--r--lib/spack/docs/build_settings.rst168
-rw-r--r--lib/spack/docs/command_index.in7
-rw-r--r--lib/spack/docs/conf.py80
-rw-r--r--lib/spack/docs/config_yaml.rst149
-rw-r--r--lib/spack/docs/configuration.rst253
-rw-r--r--lib/spack/docs/contribution_guide.rst522
-rw-r--r--lib/spack/docs/developer_guide.rst219
-rw-r--r--lib/spack/docs/exts/sphinxcontrib/__init__.py24
-rw-r--r--lib/spack/docs/exts/sphinxcontrib/programoutput.py24
-rw-r--r--lib/spack/docs/features.rst66
-rw-r--r--lib/spack/docs/getting_started.rst1124
-rw-r--r--lib/spack/docs/index.rst32
-rw-r--r--lib/spack/docs/mirrors.rst151
-rw-r--r--lib/spack/docs/module_file_support.rst682
-rw-r--r--lib/spack/docs/packaging_guide.rst2023
-rw-r--r--lib/spack/docs/repositories.rst456
-rw-r--r--lib/spack/docs/site_configuration.rst173
-rw-r--r--lib/spack/docs/tutorial/examples/0.package.py63
-rw-r--r--lib/spack/docs/tutorial/examples/1.package.py48
-rw-r--r--lib/spack/docs/tutorial/examples/2.package.py45
-rw-r--r--lib/spack/docs/tutorial/examples/3.package.py43
-rw-r--r--lib/spack/docs/tutorial/examples/4.package.py50
-rw-r--r--lib/spack/docs/tutorial/sc16-tutorial-slide-preview.pngbin0 -> 71641 bytes
-rw-r--r--lib/spack/docs/tutorial_sc16.rst48
-rw-r--r--lib/spack/docs/tutorial_sc16_modules.rst982
-rw-r--r--lib/spack/docs/tutorial_sc16_packaging.rst462
-rw-r--r--lib/spack/docs/tutorial_sc16_spack_basics.rst1255
-rw-r--r--lib/spack/docs/workflows.rst1198
-rwxr-xr-xlib/spack/env/cc164
l---------lib/spack/env/clang/gfortran1
l---------lib/spack/env/cray/CC1
l---------lib/spack/env/cray/cc1
l---------lib/spack/env/cray/ftn1
l---------lib/spack/env/craype/CC1
l---------lib/spack/env/craype/cc1
l---------lib/spack/env/craype/ftn1
-rw-r--r--lib/spack/external/__init__.py29
-rw-r--r--lib/spack/external/_pytest/AUTHORS141
-rw-r--r--lib/spack/external/_pytest/LICENSE21
-rw-r--r--lib/spack/external/_pytest/README.rst102
-rw-r--r--lib/spack/external/_pytest/__init__.py2
-rw-r--r--lib/spack/external/_pytest/_argcomplete.py102
-rw-r--r--lib/spack/external/_pytest/_code/__init__.py9
-rw-r--r--lib/spack/external/_pytest/_code/_py2traceback.py81
-rw-r--r--lib/spack/external/_pytest/_code/code.py861
-rw-r--r--lib/spack/external/_pytest/_code/source.py414
-rw-r--r--lib/spack/external/_pytest/_pluggy.py11
-rw-r--r--lib/spack/external/_pytest/assertion/__init__.py164
-rw-r--r--lib/spack/external/_pytest/assertion/rewrite.py945
-rw-r--r--lib/spack/external/_pytest/assertion/util.py300
-rw-r--r--lib/spack/external/_pytest/cacheprovider.py245
-rw-r--r--lib/spack/external/_pytest/capture.py491
-rw-r--r--lib/spack/external/_pytest/compat.py230
-rw-r--r--lib/spack/external/_pytest/config.py1340
-rw-r--r--lib/spack/external/_pytest/debugging.py124
-rw-r--r--lib/spack/external/_pytest/deprecated.py24
-rw-r--r--lib/spack/external/_pytest/doctest.py331
-rw-r--r--lib/spack/external/_pytest/fixtures.py1134
-rw-r--r--lib/spack/external/_pytest/freeze_support.py45
-rw-r--r--lib/spack/external/_pytest/helpconfig.py144
-rw-r--r--lib/spack/external/_pytest/hookspec.py314
-rw-r--r--lib/spack/external/_pytest/junitxml.py413
-rw-r--r--lib/spack/external/_pytest/main.py762
-rw-r--r--lib/spack/external/_pytest/mark.py328
-rw-r--r--lib/spack/external/_pytest/monkeypatch.py258
-rw-r--r--lib/spack/external/_pytest/nose.py71
-rw-r--r--lib/spack/external/_pytest/pastebin.py98
-rw-r--r--lib/spack/external/_pytest/pytester.py1139
-rw-r--r--lib/spack/external/_pytest/python.py1578
-rw-r--r--lib/spack/external/_pytest/recwarn.py226
-rw-r--r--lib/spack/external/_pytest/resultlog.py107
-rw-r--r--lib/spack/external/_pytest/runner.py578
-rw-r--r--lib/spack/external/_pytest/setuponly.py72
-rw-r--r--lib/spack/external/_pytest/setupplan.py23
-rw-r--r--lib/spack/external/_pytest/skipping.py375
-rw-r--r--lib/spack/external/_pytest/terminal.py593
-rw-r--r--lib/spack/external/_pytest/tmpdir.py124
-rw-r--r--lib/spack/external/_pytest/unittest.py217
-rw-r--r--lib/spack/external/_pytest/vendored_packages/README.md13
-rw-r--r--lib/spack/external/_pytest/vendored_packages/__init__.py0
-rw-r--r--lib/spack/external/_pytest/vendored_packages/pluggy-0.4.0.dist-info/DESCRIPTION.rst11
-rw-r--r--lib/spack/external/_pytest/vendored_packages/pluggy-0.4.0.dist-info/INSTALLER1
-rw-r--r--lib/spack/external/_pytest/vendored_packages/pluggy-0.4.0.dist-info/LICENSE.txt22
-rw-r--r--lib/spack/external/_pytest/vendored_packages/pluggy-0.4.0.dist-info/METADATA40
-rw-r--r--lib/spack/external/_pytest/vendored_packages/pluggy-0.4.0.dist-info/RECORD9
-rw-r--r--lib/spack/external/_pytest/vendored_packages/pluggy-0.4.0.dist-info/WHEEL6
-rw-r--r--lib/spack/external/_pytest/vendored_packages/pluggy-0.4.0.dist-info/metadata.json1
-rw-r--r--lib/spack/external/_pytest/vendored_packages/pluggy-0.4.0.dist-info/top_level.txt1
-rw-r--r--lib/spack/external/_pytest/vendored_packages/pluggy.py802
-rw-r--r--lib/spack/external/distro.py1081
-rw-r--r--lib/spack/external/nose/LICENSE502
-rw-r--r--lib/spack/external/nose/__init__.py15
-rw-r--r--lib/spack/external/nose/__main__.py8
-rw-r--r--lib/spack/external/nose/case.py397
-rw-r--r--lib/spack/external/nose/commands.py172
-rw-r--r--lib/spack/external/nose/config.py661
-rw-r--r--lib/spack/external/nose/core.py341
-rw-r--r--lib/spack/external/nose/exc.py9
-rw-r--r--lib/spack/external/nose/ext/__init__.py3
-rw-r--r--lib/spack/external/nose/ext/dtcompat.py2272
-rw-r--r--lib/spack/external/nose/failure.py42
-rw-r--r--lib/spack/external/nose/importer.py167
-rw-r--r--lib/spack/external/nose/inspector.py207
-rw-r--r--lib/spack/external/nose/loader.py623
-rw-r--r--lib/spack/external/nose/plugins/__init__.py190
-rw-r--r--lib/spack/external/nose/plugins/allmodules.py45
-rw-r--r--lib/spack/external/nose/plugins/attrib.py286
-rw-r--r--lib/spack/external/nose/plugins/base.py725
-rw-r--r--lib/spack/external/nose/plugins/builtin.py34
-rw-r--r--lib/spack/external/nose/plugins/capture.py115
-rw-r--r--lib/spack/external/nose/plugins/collect.py94
-rw-r--r--lib/spack/external/nose/plugins/cover.py271
-rw-r--r--lib/spack/external/nose/plugins/debug.py67
-rw-r--r--lib/spack/external/nose/plugins/deprecated.py45
-rw-r--r--lib/spack/external/nose/plugins/doctests.py455
-rw-r--r--lib/spack/external/nose/plugins/errorclass.py210
-rw-r--r--lib/spack/external/nose/plugins/failuredetail.py49
-rw-r--r--lib/spack/external/nose/plugins/isolate.py103
-rw-r--r--lib/spack/external/nose/plugins/logcapture.py245
-rw-r--r--lib/spack/external/nose/plugins/manager.py460
-rw-r--r--lib/spack/external/nose/plugins/multiprocess.py835
-rw-r--r--lib/spack/external/nose/plugins/plugintest.py416
-rw-r--r--lib/spack/external/nose/plugins/prof.py154
-rw-r--r--lib/spack/external/nose/plugins/skip.py63
-rw-r--r--lib/spack/external/nose/plugins/testid.py311
-rw-r--r--lib/spack/external/nose/plugins/xunit.py341
-rw-r--r--lib/spack/external/nose/proxy.py188
-rw-r--r--lib/spack/external/nose/pyversion.py215
-rw-r--r--lib/spack/external/nose/result.py200
-rw-r--r--lib/spack/external/nose/selector.py251
-rw-r--r--lib/spack/external/nose/sphinx/__init__.py1
-rw-r--r--lib/spack/external/nose/sphinx/pluginopts.py189
-rw-r--r--lib/spack/external/nose/suite.py609
-rw-r--r--lib/spack/external/nose/tools/__init__.py15
-rw-r--r--lib/spack/external/nose/tools/nontrivial.py151
-rw-r--r--lib/spack/external/nose/tools/trivial.py54
-rw-r--r--lib/spack/external/nose/twistedtools.py173
-rw-r--r--lib/spack/external/nose/usage.txt115
-rw-r--r--lib/spack/external/nose/util.py668
-rw-r--r--lib/spack/external/py/AUTHORS24
-rw-r--r--lib/spack/external/py/LICENSE19
-rw-r--r--lib/spack/external/py/README.rst21
-rw-r--r--lib/spack/external/py/__init__.py150
-rw-r--r--lib/spack/external/py/__metainfo.py2
-rw-r--r--lib/spack/external/py/_apipkg.py181
-rw-r--r--lib/spack/external/py/_builtin.py248
-rw-r--r--lib/spack/external/py/_code/__init__.py1
-rw-r--r--lib/spack/external/py/_code/_assertionnew.py339
-rw-r--r--lib/spack/external/py/_code/_assertionold.py555
-rw-r--r--lib/spack/external/py/_code/_py2traceback.py79
-rw-r--r--lib/spack/external/py/_code/assertion.py94
-rw-r--r--lib/spack/external/py/_code/code.py787
-rw-r--r--lib/spack/external/py/_code/source.py411
-rw-r--r--lib/spack/external/py/_error.py89
-rw-r--r--lib/spack/external/py/_iniconfig.py162
-rw-r--r--lib/spack/external/py/_io/__init__.py1
-rw-r--r--lib/spack/external/py/_io/capture.py371
-rw-r--r--lib/spack/external/py/_io/saferepr.py71
-rw-r--r--lib/spack/external/py/_io/terminalwriter.py357
-rw-r--r--lib/spack/external/py/_log/__init__.py2
-rw-r--r--lib/spack/external/py/_log/log.py186
-rw-r--r--lib/spack/external/py/_log/warning.py76
-rw-r--r--lib/spack/external/py/_path/__init__.py1
-rw-r--r--lib/spack/external/py/_path/cacheutil.py114
-rw-r--r--lib/spack/external/py/_path/common.py439
-rw-r--r--lib/spack/external/py/_path/local.py928
-rw-r--r--lib/spack/external/py/_path/svnurl.py380
-rw-r--r--lib/spack/external/py/_path/svnwc.py1240
-rw-r--r--lib/spack/external/py/_process/__init__.py1
-rw-r--r--lib/spack/external/py/_process/cmdexec.py49
-rw-r--r--lib/spack/external/py/_process/forkedfunc.py120
-rw-r--r--lib/spack/external/py/_process/killproc.py23
-rw-r--r--lib/spack/external/py/_std.py18
-rw-r--r--lib/spack/external/py/_xmlgen.py255
-rw-r--r--lib/spack/external/py/test.py10
-rwxr-xr-xlib/spack/external/pyqver2.py6
-rw-r--r--lib/spack/external/pytest.py28
-rw-r--r--lib/spack/llnl/util/filesystem.py336
-rw-r--r--lib/spack/llnl/util/lang.py85
-rw-r--r--lib/spack/llnl/util/link_tree.py10
-rw-r--r--lib/spack/llnl/util/lock.py210
-rw-r--r--lib/spack/llnl/util/tty/__init__.py59
-rw-r--r--lib/spack/llnl/util/tty/colify.py41
-rw-r--r--lib/spack/llnl/util/tty/color.py29
-rw-r--r--lib/spack/llnl/util/tty/log.py245
-rw-r--r--lib/spack/spack/__init__.py204
-rw-r--r--lib/spack/spack/abi.py29
-rw-r--r--lib/spack/spack/architecture.py513
-rw-r--r--lib/spack/spack/build_environment.py523
-rw-r--r--lib/spack/spack/build_systems/__init__.py24
-rw-r--r--lib/spack/spack/build_systems/autotools.py203
-rw-r--r--lib/spack/spack/build_systems/cmake.py152
-rw-r--r--lib/spack/spack/build_systems/makefile.py72
-rw-r--r--lib/spack/spack/build_systems/python.py309
-rw-r--r--lib/spack/spack/build_systems/r.py58
-rw-r--r--lib/spack/spack/cmd/__init__.py149
-rw-r--r--lib/spack/spack/cmd/activate.py4
-rw-r--r--lib/spack/spack/cmd/arch.py20
-rw-r--r--lib/spack/spack/cmd/bootstrap.py51
-rw-r--r--lib/spack/spack/cmd/build.py43
-rw-r--r--lib/spack/spack/cmd/cd.py3
-rw-r--r--lib/spack/spack/cmd/checksum.py145
-rw-r--r--lib/spack/spack/cmd/clean.py1
-rw-r--r--lib/spack/spack/cmd/common/__init__.py24
-rw-r--r--lib/spack/spack/cmd/common/arguments.py107
-rw-r--r--lib/spack/spack/cmd/compiler.py111
-rw-r--r--lib/spack/spack/cmd/compilers.py6
-rw-r--r--lib/spack/spack/cmd/config.py31
-rw-r--r--lib/spack/spack/cmd/configure.py90
-rw-r--r--lib/spack/spack/cmd/create.py624
-rw-r--r--lib/spack/spack/cmd/deactivate.py14
-rw-r--r--lib/spack/spack/cmd/debug.py100
-rw-r--r--lib/spack/spack/cmd/dependents.py17
-rw-r--r--lib/spack/spack/cmd/diy.py74
-rw-r--r--lib/spack/spack/cmd/doc.py1
-rw-r--r--lib/spack/spack/cmd/edit.py80
-rw-r--r--lib/spack/spack/cmd/env.py8
-rw-r--r--lib/spack/spack/cmd/extensions.py15
-rw-r--r--lib/spack/spack/cmd/fetch.py14
-rw-r--r--lib/spack/spack/cmd/find.py209
-rw-r--r--lib/spack/spack/cmd/flake8.py218
-rw-r--r--lib/spack/spack/cmd/graph.py56
-rw-r--r--lib/spack/spack/cmd/help.py4
-rw-r--r--lib/spack/spack/cmd/info.py44
-rw-r--r--lib/spack/spack/cmd/install.py304
-rw-r--r--lib/spack/spack/cmd/list.py168
-rw-r--r--lib/spack/spack/cmd/load.py7
-rw-r--r--lib/spack/spack/cmd/location.py34
-rw-r--r--lib/spack/spack/cmd/md5.py20
-rw-r--r--lib/spack/spack/cmd/mirror.py27
-rw-r--r--lib/spack/spack/cmd/module.py266
-rw-r--r--lib/spack/spack/cmd/package-list.py95
-rw-r--r--lib/spack/spack/cmd/patch.py6
-rw-r--r--lib/spack/spack/cmd/pkg.py53
-rw-r--r--lib/spack/spack/cmd/providers.py9
-rw-r--r--lib/spack/spack/cmd/purge.py32
-rw-r--r--lib/spack/spack/cmd/python.py10
-rw-r--r--lib/spack/spack/cmd/reindex.py6
-rw-r--r--lib/spack/spack/cmd/repo.py31
-rw-r--r--lib/spack/spack/cmd/restage.py1
-rw-r--r--lib/spack/spack/cmd/setup.py182
-rw-r--r--lib/spack/spack/cmd/spec.py55
-rw-r--r--lib/spack/spack/cmd/stage.py4
-rw-r--r--lib/spack/spack/cmd/test-install.py225
-rw-r--r--lib/spack/spack/cmd/test.py101
-rw-r--r--lib/spack/spack/cmd/uninstall.py149
-rw-r--r--lib/spack/spack/cmd/unload.py6
-rw-r--r--lib/spack/spack/cmd/unuse.py6
-rw-r--r--lib/spack/spack/cmd/url_parse.py (renamed from lib/spack/spack/cmd/url-parse.py)24
-rw-r--r--lib/spack/spack/cmd/urls.py5
-rw-r--r--lib/spack/spack/cmd/use.py6
-rw-r--r--lib/spack/spack/cmd/versions.py7
-rw-r--r--lib/spack/spack/cmd/view.py303
-rw-r--r--lib/spack/spack/compiler.py164
-rw-r--r--lib/spack/spack/compilers/__init__.py311
-rw-r--r--lib/spack/spack/compilers/cce.py (renamed from lib/spack/spack/test/tally_plugin.py)48
-rw-r--r--lib/spack/spack/compilers/clang.py187
-rw-r--r--lib/spack/spack/compilers/gcc.py35
-rw-r--r--lib/spack/spack/compilers/intel.py41
-rw-r--r--lib/spack/spack/compilers/nag.py28
-rw-r--r--lib/spack/spack/compilers/pgi.py24
-rw-r--r--lib/spack/spack/compilers/xl.py54
-rw-r--r--lib/spack/spack/concretize.py344
-rw-r--r--lib/spack/spack/config.py503
-rw-r--r--lib/spack/spack/database.py463
-rw-r--r--lib/spack/spack/directives.py433
-rw-r--r--lib/spack/spack/directory_layout.py116
-rw-r--r--lib/spack/spack/environment.py149
-rw-r--r--lib/spack/spack/error.py41
-rw-r--r--lib/spack/spack/fetch_strategy.py213
-rw-r--r--lib/spack/spack/file_cache.py182
-rw-r--r--lib/spack/spack/graph.py222
-rw-r--r--lib/spack/spack/hooks/__init__.py21
-rw-r--r--lib/spack/spack/hooks/case_consistency.py101
-rw-r--r--lib/spack/spack/hooks/extensions.py2
-rw-r--r--lib/spack/spack/hooks/licensing.py10
-rw-r--r--lib/spack/spack/hooks/module_file_generation.py37
-rw-r--r--lib/spack/spack/hooks/sbang.py35
-rw-r--r--lib/spack/spack/hooks/yaml_version_check.py57
-rw-r--r--lib/spack/spack/mirror.py98
-rw-r--r--lib/spack/spack/modules.py442
-rw-r--r--lib/spack/spack/multimethod.py33
-rw-r--r--lib/spack/spack/operating_systems/__init__.py24
-rw-r--r--lib/spack/spack/operating_systems/cnk.py41
-rw-r--r--lib/spack/spack/operating_systems/cnl.py82
-rw-r--r--lib/spack/spack/operating_systems/linux_distro.py53
-rw-r--r--lib/spack/spack/operating_systems/mac_os.py54
-rw-r--r--lib/spack/spack/package.py850
-rw-r--r--lib/spack/spack/package_prefs.py314
-rw-r--r--lib/spack/spack/parse.py72
-rw-r--r--lib/spack/spack/patch.py116
-rw-r--r--lib/spack/spack/platforms/__init__.py24
-rw-r--r--lib/spack/spack/platforms/bgq.py57
-rw-r--r--lib/spack/spack/platforms/cray.py151
-rw-r--r--lib/spack/spack/platforms/darwin.py49
-rw-r--r--lib/spack/spack/platforms/linux.py53
-rw-r--r--lib/spack/spack/platforms/test.py49
-rw-r--r--lib/spack/spack/preferred_packages.py175
-rw-r--r--lib/spack/spack/provider_index.py302
-rw-r--r--lib/spack/spack/repository.py365
-rw-r--r--lib/spack/spack/resource.py6
-rw-r--r--lib/spack/spack/schema/__init__.py33
-rw-r--r--lib/spack/spack/schema/compilers.py108
-rw-r--r--lib/spack/spack/schema/config.py67
-rw-r--r--lib/spack/spack/schema/mirrors.py48
-rw-r--r--lib/spack/spack/schema/modules.py175
-rw-r--r--lib/spack/spack/schema/packages.py90
-rw-r--r--lib/spack/spack/schema/repos.py45
-rw-r--r--lib/spack/spack/spec.py2027
-rw-r--r--lib/spack/spack/stage.py329
-rw-r--r--lib/spack/spack/store.py75
-rw-r--r--lib/spack/spack/test/__init__.py64
-rw-r--r--lib/spack/spack/test/architecture.py161
-rw-r--r--lib/spack/spack/test/build_system_guess.py65
-rw-r--r--lib/spack/spack/test/build_systems.py42
-rw-r--r--lib/spack/spack/test/cc.py168
-rw-r--r--lib/spack/spack/test/cmd/find.py53
-rw-r--r--lib/spack/spack/test/cmd/install.py (renamed from lib/spack/spack/test/cmd/test_install.py)127
-rw-r--r--lib/spack/spack/test/cmd/module.py106
-rw-r--r--lib/spack/spack/test/cmd/test_compiler_cmd.py94
-rw-r--r--lib/spack/spack/test/cmd/uninstall.py49
-rw-r--r--lib/spack/spack/test/concretize.py395
-rw-r--r--lib/spack/spack/test/concretize_preferences.py173
-rw-r--r--lib/spack/spack/test/config.py404
-rw-r--r--lib/spack/spack/test/configure_guess.py83
-rw-r--r--lib/spack/spack/test/conftest.py514
-rw-r--r--lib/spack/spack/test/data/compilers.yaml116
-rw-r--r--lib/spack/spack/test/data/config.yaml11
-rw-r--r--lib/spack/spack/test/data/packages.yaml14
-rw-r--r--lib/spack/spack/test/data/sourceme_first.sh (renamed from lib/spack/spack/hooks/dotkit.py)14
-rw-r--r--lib/spack/spack/test/data/sourceme_parameters.sh (renamed from lib/spack/spack/hooks/tclmodule.py)17
-rw-r--r--lib/spack/spack/test/data/sourceme_second.sh29
-rw-r--r--lib/spack/spack/test/database.py390
-rw-r--r--lib/spack/spack/test/directory_layout.py308
-rw-r--r--lib/spack/spack/test/environment.py116
-rw-r--r--lib/spack/spack/test/file_cache.py83
-rw-r--r--lib/spack/spack/test/git_fetch.py141
-rw-r--r--lib/spack/spack/test/hg_fetch.py125
-rw-r--r--lib/spack/spack/test/install.py154
-rw-r--r--lib/spack/spack/test/library_list.py111
-rw-r--r--lib/spack/spack/test/link_tree.py6
-rw-r--r--lib/spack/spack/test/lock.py493
-rw-r--r--lib/spack/spack/test/make_executable.py26
-rw-r--r--lib/spack/spack/test/mirror.py230
-rw-r--r--lib/spack/spack/test/mock_database.py104
-rw-r--r--lib/spack/spack/test/mock_packages_test.py133
-rw-r--r--lib/spack/spack/test/mock_repo.py198
-rw-r--r--lib/spack/spack/test/modules.py573
-rw-r--r--lib/spack/spack/test/multimethod.py132
-rw-r--r--lib/spack/spack/test/namespace_trie.py6
-rw-r--r--lib/spack/spack/test/optional_deps.py158
-rw-r--r--lib/spack/spack/test/package_sanity.py14
-rw-r--r--lib/spack/spack/test/packages.py126
-rw-r--r--lib/spack/spack/test/pattern.py2
-rw-r--r--lib/spack/spack/test/provider_index.py93
-rw-r--r--lib/spack/spack/test/python_version.py11
-rw-r--r--lib/spack/spack/test/sbang.py41
-rw-r--r--lib/spack/spack/test/spack_yaml.py (renamed from lib/spack/spack/test/yaml.py)30
-rw-r--r--lib/spack/spack/test/spec_dag.py555
-rw-r--r--lib/spack/spack/test/spec_semantics.py604
-rw-r--r--lib/spack/spack/test/spec_syntax.py339
-rw-r--r--lib/spack/spack/test/spec_yaml.py171
-rw-r--r--lib/spack/spack/test/stage.py537
-rw-r--r--lib/spack/spack/test/svn_fetch.py136
-rw-r--r--lib/spack/spack/test/url_extrapolate.py27
-rw-r--r--lib/spack/spack/test/url_parse.py61
-rw-r--r--lib/spack/spack/test/url_substitution.py30
-rw-r--r--lib/spack/spack/test/versions.py97
-rw-r--r--lib/spack/spack/url.py130
-rw-r--r--lib/spack/spack/util/compression.py13
-rw-r--r--lib/spack/spack/util/crypto.py32
-rw-r--r--lib/spack/spack/util/debug.py5
-rw-r--r--lib/spack/spack/util/environment.py19
-rw-r--r--lib/spack/spack/util/executable.py93
-rw-r--r--lib/spack/spack/util/multiproc.py18
-rw-r--r--lib/spack/spack/util/naming.py35
-rw-r--r--lib/spack/spack/util/path.py72
-rw-r--r--lib/spack/spack/util/pattern.py88
-rw-r--r--lib/spack/spack/util/prefix.py1
-rw-r--r--lib/spack/spack/util/spack_json.py76
-rw-r--r--lib/spack/spack/util/spack_yaml.py79
-rw-r--r--lib/spack/spack/util/string.py4
-rw-r--r--lib/spack/spack/util/web.py42
-rw-r--r--lib/spack/spack/variant.py4
-rw-r--r--lib/spack/spack/version.py283
-rw-r--r--lib/spack/spack/virtual.py161
-rw-r--r--pytest.ini5
-rw-r--r--share/spack/csh/spack.csh8
-rwxr-xr-xshare/spack/qa/check_dependencies96
-rwxr-xr-xshare/spack/qa/run-doc-tests41
-rwxr-xr-xshare/spack/qa/run-flake855
-rwxr-xr-xshare/spack/qa/run-flake8-tests29
-rwxr-xr-xshare/spack/qa/run-unit-tests51
-rwxr-xr-xshare/spack/setup-env.sh34
-rw-r--r--var/spack/mock_configs/site_spackconfig/compilers.yaml40
-rw-r--r--var/spack/repos/builtin.mock/packages/a/package.py1
-rw-r--r--var/spack/repos/builtin.mock/packages/b/package.py1
-rw-r--r--var/spack/repos/builtin.mock/packages/c/package.py1
-rw-r--r--var/spack/repos/builtin.mock/packages/callpath/package.py1
-rw-r--r--var/spack/repos/builtin.mock/packages/cmake-client/package.py20
-rw-r--r--var/spack/repos/builtin.mock/packages/cmake/package.py4
-rw-r--r--var/spack/repos/builtin.mock/packages/develop-test/package.py37
-rw-r--r--var/spack/repos/builtin.mock/packages/direct-mpich/package.py (renamed from var/spack/repos/builtin.mock/packages/direct_mpich/package.py)1
-rw-r--r--var/spack/repos/builtin.mock/packages/dt-diamond-bottom/package.py36
-rw-r--r--var/spack/repos/builtin.mock/packages/dt-diamond-left/package.py38
-rw-r--r--var/spack/repos/builtin.mock/packages/dt-diamond-right/package.py38
-rw-r--r--var/spack/repos/builtin.mock/packages/dt-diamond/package.py39
-rw-r--r--var/spack/repos/builtin.mock/packages/dtbuild1/package.py42
-rw-r--r--var/spack/repos/builtin.mock/packages/dtbuild2/package.py37
-rw-r--r--var/spack/repos/builtin.mock/packages/dtbuild3/package.py37
-rw-r--r--var/spack/repos/builtin.mock/packages/dtlink1/package.py39
-rw-r--r--var/spack/repos/builtin.mock/packages/dtlink2/package.py37
-rw-r--r--var/spack/repos/builtin.mock/packages/dtlink3/package.py40
-rw-r--r--var/spack/repos/builtin.mock/packages/dtlink4/package.py37
-rw-r--r--var/spack/repos/builtin.mock/packages/dtlink5/package.py37
-rw-r--r--var/spack/repos/builtin.mock/packages/dtrun1/package.py40
-rw-r--r--var/spack/repos/builtin.mock/packages/dtrun2/package.py37
-rw-r--r--var/spack/repos/builtin.mock/packages/dtrun3/package.py39
-rw-r--r--var/spack/repos/builtin.mock/packages/dttop/package.py41
-rw-r--r--var/spack/repos/builtin.mock/packages/dtuse/package.py39
-rw-r--r--var/spack/repos/builtin.mock/packages/dyninst/package.py1
-rw-r--r--var/spack/repos/builtin.mock/packages/e/package.py1
-rw-r--r--var/spack/repos/builtin.mock/packages/externalmodule/package.py38
-rw-r--r--var/spack/repos/builtin.mock/packages/externalprereq/package.py1
-rw-r--r--var/spack/repos/builtin.mock/packages/externaltest/package.py1
-rw-r--r--var/spack/repos/builtin.mock/packages/externaltool/package.py1
-rw-r--r--var/spack/repos/builtin.mock/packages/externalvirtual/package.py5
-rw-r--r--var/spack/repos/builtin.mock/packages/failing-build/package.py37
-rw-r--r--var/spack/repos/builtin.mock/packages/fake/package.py1
-rw-r--r--var/spack/repos/builtin.mock/packages/git-test/package.py1
-rw-r--r--var/spack/repos/builtin.mock/packages/hg-test/package.py1
-rw-r--r--var/spack/repos/builtin.mock/packages/hypre/package.py1
-rw-r--r--var/spack/repos/builtin.mock/packages/indirect-mpich/package.py (renamed from var/spack/repos/builtin.mock/packages/indirect_mpich/package.py)3
-rw-r--r--var/spack/repos/builtin.mock/packages/libdwarf/package.py2
-rw-r--r--var/spack/repos/builtin.mock/packages/libelf/package.py1
-rw-r--r--var/spack/repos/builtin.mock/packages/mpich/package.py1
-rw-r--r--var/spack/repos/builtin.mock/packages/mpich2/package.py1
-rw-r--r--var/spack/repos/builtin.mock/packages/mpileaks/package.py3
-rw-r--r--var/spack/repos/builtin.mock/packages/multi-provider-mpi/package.py51
-rw-r--r--var/spack/repos/builtin.mock/packages/multimethod/package.py39
-rw-r--r--var/spack/repos/builtin.mock/packages/netlib-blas/package.py1
-rw-r--r--var/spack/repos/builtin.mock/packages/netlib-lapack/package.py1
-rw-r--r--var/spack/repos/builtin.mock/packages/openblas-with-lapack/package.py1
-rw-r--r--var/spack/repos/builtin.mock/packages/openblas/package.py1
-rw-r--r--var/spack/repos/builtin.mock/packages/optional-dep-test-2/package.py1
-rw-r--r--var/spack/repos/builtin.mock/packages/optional-dep-test-3/package.py1
-rw-r--r--var/spack/repos/builtin.mock/packages/optional-dep-test/package.py1
-rw-r--r--var/spack/repos/builtin.mock/packages/othervirtual/package.py37
-rw-r--r--var/spack/repos/builtin.mock/packages/python/package.py2
-rw-r--r--var/spack/repos/builtin.mock/packages/simple-inheritance/package.py24
-rw-r--r--var/spack/repos/builtin.mock/packages/svn-test/package.py1
-rw-r--r--var/spack/repos/builtin.mock/packages/trivial-install-test-package/package.py (renamed from var/spack/repos/builtin.mock/packages/trivial_install_test_package/package.py)1
-rw-r--r--var/spack/repos/builtin.mock/packages/zmpi/package.py1
-rw-r--r--var/spack/repos/builtin/packages/ImageMagick/package.py63
-rw-r--r--var/spack/repos/builtin/packages/R/package.py130
-rw-r--r--var/spack/repos/builtin/packages/abinit/package.py175
-rw-r--r--var/spack/repos/builtin/packages/ack/package.py51
-rw-r--r--var/spack/repos/builtin/packages/activeharmony/package.py7
-rw-r--r--var/spack/repos/builtin/packages/adept-utils/package.py2
-rw-r--r--var/spack/repos/builtin/packages/adios/adios_1100.patch29
-rw-r--r--var/spack/repos/builtin/packages/adios/package.py131
-rw-r--r--var/spack/repos/builtin/packages/adol-c/openmp_exam_261.patch (renamed from var/spack/repos/builtin/packages/adol-c/openmp_exam.patch)0
-rw-r--r--var/spack/repos/builtin/packages/adol-c/package.py63
-rw-r--r--var/spack/repos/builtin/packages/ant/package.py44
-rw-r--r--var/spack/repos/builtin/packages/antlr/package.py51
-rw-r--r--var/spack/repos/builtin/packages/ape/package.py62
-rw-r--r--var/spack/repos/builtin/packages/apex/package.py19
-rw-r--r--var/spack/repos/builtin/packages/applewmproto/package.py46
-rw-r--r--var/spack/repos/builtin/packages/appres/package.py50
-rw-r--r--var/spack/repos/builtin/packages/apr-util/package.py1
-rw-r--r--var/spack/repos/builtin/packages/apr/package.py1
-rw-r--r--var/spack/repos/builtin/packages/armadillo/package.py72
-rw-r--r--var/spack/repos/builtin/packages/arpack-ng/make_install.patch24
-rw-r--r--var/spack/repos/builtin/packages/arpack-ng/package.py90
-rw-r--r--var/spack/repos/builtin/packages/arpack-ng/parpack_cmake.patch18
-rw-r--r--var/spack/repos/builtin/packages/arpack/package.py40
-rw-r--r--var/spack/repos/builtin/packages/asciidoc/package.py6
-rw-r--r--var/spack/repos/builtin/packages/astra/package.py41
-rw-r--r--var/spack/repos/builtin/packages/astyle/package.py34
-rw-r--r--var/spack/repos/builtin/packages/atk/package.py10
-rw-r--r--var/spack/repos/builtin/packages/atlas/package.py96
-rw-r--r--var/spack/repos/builtin/packages/atlas/test_cblas_dgemm.c49
-rw-r--r--var/spack/repos/builtin/packages/atlas/test_cblas_dgemm.output12
-rw-r--r--var/spack/repos/builtin/packages/atompaw/package.py63
-rw-r--r--var/spack/repos/builtin/packages/atop/package.py3
-rw-r--r--var/spack/repos/builtin/packages/autoconf/package.py30
-rw-r--r--var/spack/repos/builtin/packages/automaded/package.py2
-rw-r--r--var/spack/repos/builtin/packages/automake/package.py22
-rw-r--r--var/spack/repos/builtin/packages/bamtools/package.py45
-rw-r--r--var/spack/repos/builtin/packages/bash-completion/package.py64
-rw-r--r--var/spack/repos/builtin/packages/bash/package.py1
-rw-r--r--var/spack/repos/builtin/packages/bazel/cc_configure.patch28
-rw-r--r--var/spack/repos/builtin/packages/bazel/fix_env_handling.patch119
-rw-r--r--var/spack/repos/builtin/packages/bazel/link.patch133
-rw-r--r--var/spack/repos/builtin/packages/bazel/package.py89
-rw-r--r--var/spack/repos/builtin/packages/bbcp/package.py13
-rw-r--r--var/spack/repos/builtin/packages/bcftools/package.py43
-rw-r--r--var/spack/repos/builtin/packages/bdftopcf/package.py50
-rw-r--r--var/spack/repos/builtin/packages/bdw-gc/package.py53
-rw-r--r--var/spack/repos/builtin/packages/bear/package.py6
-rw-r--r--var/spack/repos/builtin/packages/bedtools2/package.py43
-rw-r--r--var/spack/repos/builtin/packages/beforelight/package.py50
-rw-r--r--var/spack/repos/builtin/packages/bertini/package.py50
-rw-r--r--var/spack/repos/builtin/packages/bib2xhtml/package.py3
-rw-r--r--var/spack/repos/builtin/packages/bigreqsproto/package.py45
-rw-r--r--var/spack/repos/builtin/packages/binutils/package.py19
-rw-r--r--var/spack/repos/builtin/packages/binutils/update_symbol-2.26.patch104
-rw-r--r--var/spack/repos/builtin/packages/bison/package.py7
-rw-r--r--var/spack/repos/builtin/packages/bitmap/package.py51
-rw-r--r--var/spack/repos/builtin/packages/bliss/Makefile.spack.patch62
-rw-r--r--var/spack/repos/builtin/packages/bliss/package.py50
-rw-r--r--var/spack/repos/builtin/packages/blitz/package.py14
-rw-r--r--var/spack/repos/builtin/packages/boost/boost_10125.patch51
-rw-r--r--var/spack/repos/builtin/packages/boost/package.py182
-rw-r--r--var/spack/repos/builtin/packages/bowtie2/package.py8
-rw-r--r--var/spack/repos/builtin/packages/boxlib/package.py29
-rw-r--r--var/spack/repos/builtin/packages/bpp-core/package.py41
-rw-r--r--var/spack/repos/builtin/packages/bpp-phyl/package.py43
-rw-r--r--var/spack/repos/builtin/packages/bpp-seq/package.py42
-rw-r--r--var/spack/repos/builtin/packages/bpp-suite/package.py46
-rw-r--r--var/spack/repos/builtin/packages/bwa/package.py52
-rw-r--r--var/spack/repos/builtin/packages/bzip2/package.py61
-rw-r--r--var/spack/repos/builtin/packages/c-blosc/package.py52
-rw-r--r--var/spack/repos/builtin/packages/cairo/package.py31
-rw-r--r--var/spack/repos/builtin/packages/caliper/package.py17
-rw-r--r--var/spack/repos/builtin/packages/callpath/package.py2
-rw-r--r--var/spack/repos/builtin/packages/cantera/package.py202
-rw-r--r--var/spack/repos/builtin/packages/cask/package.py55
-rw-r--r--var/spack/repos/builtin/packages/catch/package.py40
-rw-r--r--var/spack/repos/builtin/packages/cblas/package.py11
-rw-r--r--var/spack/repos/builtin/packages/cbtf-argonavis/package.py121
-rw-r--r--var/spack/repos/builtin/packages/cbtf-krell/package.py247
-rw-r--r--var/spack/repos/builtin/packages/cbtf-lanl/package.py93
-rw-r--r--var/spack/repos/builtin/packages/cbtf/package.py151
-rw-r--r--var/spack/repos/builtin/packages/ccache/package.py44
-rw-r--r--var/spack/repos/builtin/packages/cdd/Makefile.spack.patch22
-rw-r--r--var/spack/repos/builtin/packages/cdd/package.py52
-rw-r--r--var/spack/repos/builtin/packages/cddlib/package.py58
-rw-r--r--var/spack/repos/builtin/packages/cdo/package.py117
-rw-r--r--var/spack/repos/builtin/packages/cereal/package.py14
-rw-r--r--var/spack/repos/builtin/packages/cfitsio/package.py10
-rw-r--r--var/spack/repos/builtin/packages/cgal/package.py94
-rw-r--r--var/spack/repos/builtin/packages/cgm/package.py4
-rw-r--r--var/spack/repos/builtin/packages/cgns/package.py73
-rw-r--r--var/spack/repos/builtin/packages/charm/mpi.patch19
-rw-r--r--var/spack/repos/builtin/packages/charm/package.py190
-rw-r--r--var/spack/repos/builtin/packages/charm/strictpass.patch16
-rw-r--r--var/spack/repos/builtin/packages/cityhash/package.py10
-rw-r--r--var/spack/repos/builtin/packages/cleverleaf/package.py18
-rw-r--r--var/spack/repos/builtin/packages/clhep/darwin/CLHEP.patch11
-rw-r--r--var/spack/repos/builtin/packages/clhep/package.py81
-rw-r--r--var/spack/repos/builtin/packages/cloog/package.py1
-rw-r--r--var/spack/repos/builtin/packages/cmake/intel-c-gnu11.patch23
-rw-r--r--var/spack/repos/builtin/packages/cmake/package.py71
-rw-r--r--var/spack/repos/builtin/packages/cmocka/package.py9
-rw-r--r--var/spack/repos/builtin/packages/cmor/package.py72
-rw-r--r--var/spack/repos/builtin/packages/cnmem/package.py7
-rw-r--r--var/spack/repos/builtin/packages/compiz/package.py63
-rw-r--r--var/spack/repos/builtin/packages/compositeproto/package.py45
-rw-r--r--var/spack/repos/builtin/packages/conduit/package.py313
-rw-r--r--var/spack/repos/builtin/packages/constype/package.py47
-rw-r--r--var/spack/repos/builtin/packages/converge/package.py69
-rw-r--r--var/spack/repos/builtin/packages/coreutils/package.py1
-rw-r--r--var/spack/repos/builtin/packages/cp2k/package.py208
-rw-r--r--var/spack/repos/builtin/packages/cppad/package.py48
-rw-r--r--var/spack/repos/builtin/packages/cppcheck/package.py1
-rw-r--r--var/spack/repos/builtin/packages/cppunit/package.py34
-rw-r--r--var/spack/repos/builtin/packages/cram/package.py2
-rw-r--r--var/spack/repos/builtin/packages/cryptopp/package.py20
-rw-r--r--var/spack/repos/builtin/packages/cscope/package.py12
-rw-r--r--var/spack/repos/builtin/packages/cube/package.py24
-rw-r--r--var/spack/repos/builtin/packages/cuda/package.py40
-rw-r--r--var/spack/repos/builtin/packages/curl/package.py5
-rw-r--r--var/spack/repos/builtin/packages/czmq/package.py28
-rw-r--r--var/spack/repos/builtin/packages/daal/package.py52
-rw-r--r--var/spack/repos/builtin/packages/dakota/package.py39
-rw-r--r--var/spack/repos/builtin/packages/damageproto/package.py45
-rw-r--r--var/spack/repos/builtin/packages/damselfly/package.py12
-rw-r--r--var/spack/repos/builtin/packages/darshan-runtime/package.py75
-rw-r--r--var/spack/repos/builtin/packages/darshan-util/package.py51
-rw-r--r--var/spack/repos/builtin/packages/datamash/package.py44
-rw-r--r--var/spack/repos/builtin/packages/dbus/package.py1
-rw-r--r--var/spack/repos/builtin/packages/dealii/package.py315
-rw-r--r--var/spack/repos/builtin/packages/dia/package.py5
-rw-r--r--var/spack/repos/builtin/packages/dmxproto/package.py46
-rw-r--r--var/spack/repos/builtin/packages/docbook-xml/package.py18
-rw-r--r--var/spack/repos/builtin/packages/docbook-xsl/package.py53
-rw-r--r--var/spack/repos/builtin/packages/doxygen/package.py12
-rw-r--r--var/spack/repos/builtin/packages/dri2proto/package.py20
-rw-r--r--var/spack/repos/builtin/packages/dri3proto/package.py46
-rw-r--r--var/spack/repos/builtin/packages/dtcmp/package.py2
-rw-r--r--var/spack/repos/builtin/packages/dyninst/package.py52
-rw-r--r--var/spack/repos/builtin/packages/dyninst/stackanalysis_h.patch11
-rw-r--r--var/spack/repos/builtin/packages/dyninst/stat_dysect.patch96
-rw-r--r--var/spack/repos/builtin/packages/editres/package.py48
-rw-r--r--var/spack/repos/builtin/packages/eigen/package.py28
-rw-r--r--var/spack/repos/builtin/packages/elfutils/package.py2
-rw-r--r--var/spack/repos/builtin/packages/elk/package.py6
-rw-r--r--var/spack/repos/builtin/packages/elpa/package.py30
-rw-r--r--var/spack/repos/builtin/packages/emacs/package.py36
-rw-r--r--var/spack/repos/builtin/packages/encodings/package.py50
-rw-r--r--var/spack/repos/builtin/packages/environment-modules/package.py38
-rw-r--r--var/spack/repos/builtin/packages/espresso/package.py31
-rw-r--r--var/spack/repos/builtin/packages/espressopp/package.py82
-rw-r--r--var/spack/repos/builtin/packages/etsf-io/package.py67
-rw-r--r--var/spack/repos/builtin/packages/everytrace-example/package.py42
-rw-r--r--var/spack/repos/builtin/packages/everytrace/package.py51
-rw-r--r--var/spack/repos/builtin/packages/evieext/package.py45
-rw-r--r--var/spack/repos/builtin/packages/exodusii/cmake-exodus.patch9
-rw-r--r--var/spack/repos/builtin/packages/exodusii/exodus-cmake.patch12
-rw-r--r--var/spack/repos/builtin/packages/exodusii/package.py71
-rw-r--r--var/spack/repos/builtin/packages/exonerate/package.py45
-rw-r--r--var/spack/repos/builtin/packages/expat/package.py19
-rw-r--r--var/spack/repos/builtin/packages/extrae/package.py48
-rw-r--r--var/spack/repos/builtin/packages/exuberant-ctags/package.py1
-rw-r--r--var/spack/repos/builtin/packages/fastqc/fastqc.patch30
-rw-r--r--var/spack/repos/builtin/packages/fastqc/package.py60
-rw-r--r--var/spack/repos/builtin/packages/fastx-toolkit/package.py43
-rw-r--r--var/spack/repos/builtin/packages/fenics/hdf5~cxx-detection.patch11
-rw-r--r--var/spack/repos/builtin/packages/fenics/package.py198
-rw-r--r--var/spack/repos/builtin/packages/fenics/petsc-3.7.patch394
-rw-r--r--var/spack/repos/builtin/packages/fenics/petsc-version-detection.patch39
-rw-r--r--var/spack/repos/builtin/packages/ferret/package.py103
-rw-r--r--var/spack/repos/builtin/packages/fftw/package.py71
-rw-r--r--var/spack/repos/builtin/packages/fftw/pfft-3.3.4.patch865
-rw-r--r--var/spack/repos/builtin/packages/fftw/pfft-3.3.5.patch858
-rw-r--r--var/spack/repos/builtin/packages/fish/package.py4
-rw-r--r--var/spack/repos/builtin/packages/fixesproto/package.py46
-rw-r--r--var/spack/repos/builtin/packages/flex/package.py47
-rw-r--r--var/spack/repos/builtin/packages/flint/package.py66
-rw-r--r--var/spack/repos/builtin/packages/fltk/package.py18
-rw-r--r--var/spack/repos/builtin/packages/flux/package.py38
-rw-r--r--var/spack/repos/builtin/packages/foam-extend/package.py289
-rw-r--r--var/spack/repos/builtin/packages/font-adobe-100dpi/package.py53
-rw-r--r--var/spack/repos/builtin/packages/font-adobe-75dpi/package.py53
-rw-r--r--var/spack/repos/builtin/packages/font-adobe-utopia-100dpi/package.py53
-rw-r--r--var/spack/repos/builtin/packages/font-adobe-utopia-75dpi/package.py53
-rw-r--r--var/spack/repos/builtin/packages/font-adobe-utopia-type1/package.py51
-rw-r--r--var/spack/repos/builtin/packages/font-alias/package.py49
-rw-r--r--var/spack/repos/builtin/packages/font-arabic-misc/package.py52
-rw-r--r--var/spack/repos/builtin/packages/font-bh-100dpi/package.py53
-rw-r--r--var/spack/repos/builtin/packages/font-bh-75dpi/package.py53
-rw-r--r--var/spack/repos/builtin/packages/font-bh-lucidatypewriter-100dpi/package.py53
-rw-r--r--var/spack/repos/builtin/packages/font-bh-lucidatypewriter-75dpi/package.py53
-rw-r--r--var/spack/repos/builtin/packages/font-bh-ttf/package.py52
-rw-r--r--var/spack/repos/builtin/packages/font-bh-type1/package.py52
-rw-r--r--var/spack/repos/builtin/packages/font-bitstream-100dpi/package.py53
-rw-r--r--var/spack/repos/builtin/packages/font-bitstream-75dpi/package.py53
-rw-r--r--var/spack/repos/builtin/packages/font-bitstream-speedo/package.py52
-rw-r--r--var/spack/repos/builtin/packages/font-bitstream-type1/package.py52
-rw-r--r--var/spack/repos/builtin/packages/font-cronyx-cyrillic/package.py53
-rw-r--r--var/spack/repos/builtin/packages/font-cursor-misc/package.py53
-rw-r--r--var/spack/repos/builtin/packages/font-daewoo-misc/package.py53
-rw-r--r--var/spack/repos/builtin/packages/font-dec-misc/package.py53
-rw-r--r--var/spack/repos/builtin/packages/font-ibm-type1/package.py52
-rw-r--r--var/spack/repos/builtin/packages/font-isas-misc/package.py53
-rw-r--r--var/spack/repos/builtin/packages/font-jis-misc/package.py53
-rw-r--r--var/spack/repos/builtin/packages/font-micro-misc/package.py53
-rw-r--r--var/spack/repos/builtin/packages/font-misc-cyrillic/package.py53
-rw-r--r--var/spack/repos/builtin/packages/font-misc-ethiopic/package.py52
-rw-r--r--var/spack/repos/builtin/packages/font-misc-meltho/package.py52
-rw-r--r--var/spack/repos/builtin/packages/font-misc-misc/package.py53
-rw-r--r--var/spack/repos/builtin/packages/font-mutt-misc/package.py53
-rw-r--r--var/spack/repos/builtin/packages/font-schumacher-misc/package.py53
-rw-r--r--var/spack/repos/builtin/packages/font-screen-cyrillic/package.py53
-rw-r--r--var/spack/repos/builtin/packages/font-sony-misc/package.py53
-rw-r--r--var/spack/repos/builtin/packages/font-sun-misc/package.py52
-rw-r--r--var/spack/repos/builtin/packages/font-util/package.py43
-rw-r--r--var/spack/repos/builtin/packages/font-winitzki-cyrillic/package.py53
-rw-r--r--var/spack/repos/builtin/packages/font-xfree86-type1/package.py52
-rw-r--r--var/spack/repos/builtin/packages/fontcacheproto/package.py39
-rw-r--r--var/spack/repos/builtin/packages/fontconfig/package.py19
-rw-r--r--var/spack/repos/builtin/packages/fontsproto/package.py42
-rw-r--r--var/spack/repos/builtin/packages/fonttosfnt/package.py47
-rw-r--r--var/spack/repos/builtin/packages/freetype/package.py25
-rw-r--r--var/spack/repos/builtin/packages/fslsfonts/package.py46
-rw-r--r--var/spack/repos/builtin/packages/fstobdf/package.py50
-rw-r--r--var/spack/repos/builtin/packages/gasnet/package.py29
-rw-r--r--var/spack/repos/builtin/packages/gawk/package.py45
-rw-r--r--var/spack/repos/builtin/packages/gbenchmark/package.py59
-rw-r--r--var/spack/repos/builtin/packages/gcc/gcc-backport.patch138
-rw-r--r--var/spack/repos/builtin/packages/gcc/package.py56
-rw-r--r--var/spack/repos/builtin/packages/gcc/piclibs.patch62
-rw-r--r--var/spack/repos/builtin/packages/gccmakedep/package.py42
-rw-r--r--var/spack/repos/builtin/packages/gconf/package.py51
-rw-r--r--var/spack/repos/builtin/packages/gdal/package.py13
-rw-r--r--var/spack/repos/builtin/packages/gdb/package.py21
-rw-r--r--var/spack/repos/builtin/packages/gdk-pixbuf/package.py3
-rw-r--r--var/spack/repos/builtin/packages/geant4/package.py85
-rw-r--r--var/spack/repos/builtin/packages/geos/package.py2
-rw-r--r--var/spack/repos/builtin/packages/gettext/package.py85
-rw-r--r--var/spack/repos/builtin/packages/gflags/package.py4
-rw-r--r--var/spack/repos/builtin/packages/ghostscript-fonts/package.py43
-rw-r--r--var/spack/repos/builtin/packages/ghostscript/package.py14
-rw-r--r--var/spack/repos/builtin/packages/giflib/package.py41
-rw-r--r--var/spack/repos/builtin/packages/git-lfs/package.py53
-rw-r--r--var/spack/repos/builtin/packages/git/package.py53
-rw-r--r--var/spack/repos/builtin/packages/gl2ps/package.py1
-rw-r--r--var/spack/repos/builtin/packages/glew/package.py51
-rw-r--r--var/spack/repos/builtin/packages/glib/g_date_strftime.patch34
-rw-r--r--var/spack/repos/builtin/packages/glib/no-Werror=format-security.patch16
-rw-r--r--var/spack/repos/builtin/packages/glib/package.py39
-rw-r--r--var/spack/repos/builtin/packages/glm/package.py11
-rw-r--r--var/spack/repos/builtin/packages/global/package.py2
-rw-r--r--var/spack/repos/builtin/packages/globus-toolkit/package.py40
-rw-r--r--var/spack/repos/builtin/packages/glog/package.py2
-rw-r--r--var/spack/repos/builtin/packages/glpk/package.py13
-rw-r--r--var/spack/repos/builtin/packages/glproto/package.py45
-rw-r--r--var/spack/repos/builtin/packages/gmake/package.py42
-rw-r--r--var/spack/repos/builtin/packages/gmp/package.py30
-rw-r--r--var/spack/repos/builtin/packages/gmsh/package.py105
-rw-r--r--var/spack/repos/builtin/packages/gnu-prolog/package.py42
-rw-r--r--var/spack/repos/builtin/packages/gnuplot/package.py17
-rw-r--r--var/spack/repos/builtin/packages/gnutls/package.py1
-rw-r--r--var/spack/repos/builtin/packages/go-bootstrap/package.py67
-rw-r--r--var/spack/repos/builtin/packages/go/misc-cgo-testcshared.patch11
-rw-r--r--var/spack/repos/builtin/packages/go/package.py87
-rw-r--r--var/spack/repos/builtin/packages/go/time_test.patch18
-rw-r--r--var/spack/repos/builtin/packages/gobject-introspection/package.py51
-rw-r--r--var/spack/repos/builtin/packages/googletest/package.py10
-rw-r--r--var/spack/repos/builtin/packages/gource/package.py63
-rw-r--r--var/spack/repos/builtin/packages/gperf/package.py7
-rw-r--r--var/spack/repos/builtin/packages/gperftools/package.py14
-rw-r--r--var/spack/repos/builtin/packages/grackle/Make.mach.template71
-rw-r--r--var/spack/repos/builtin/packages/grackle/package.py89
-rw-r--r--var/spack/repos/builtin/packages/grandr/package.py45
-rw-r--r--var/spack/repos/builtin/packages/graphlib/package.py8
-rw-r--r--var/spack/repos/builtin/packages/graphviz/package.py25
-rw-r--r--var/spack/repos/builtin/packages/grib-api/package.py80
-rw-r--r--var/spack/repos/builtin/packages/gromacs/package.py55
-rw-r--r--var/spack/repos/builtin/packages/gsl/package.py29
-rw-r--r--var/spack/repos/builtin/packages/gtkplus/package.py14
-rw-r--r--var/spack/repos/builtin/packages/gts/package.py53
-rw-r--r--var/spack/repos/builtin/packages/guile/package.py68
-rw-r--r--var/spack/repos/builtin/packages/h5hut/package.py75
-rw-r--r--var/spack/repos/builtin/packages/hadoop/package.py52
-rw-r--r--var/spack/repos/builtin/packages/harfbuzz/package.py6
-rw-r--r--var/spack/repos/builtin/packages/harminv/package.py54
-rw-r--r--var/spack/repos/builtin/packages/hdf/package.py36
-rw-r--r--var/spack/repos/builtin/packages/hdf5-blosc/package.py212
-rw-r--r--var/spack/repos/builtin/packages/hdf5/package.py140
-rw-r--r--var/spack/repos/builtin/packages/help2man/package.py37
-rw-r--r--var/spack/repos/builtin/packages/hepmc/package.py55
-rw-r--r--var/spack/repos/builtin/packages/heppdt/package.py48
-rw-r--r--var/spack/repos/builtin/packages/hmmer/package.py76
-rw-r--r--var/spack/repos/builtin/packages/hoomd-blue/package.py13
-rw-r--r--var/spack/repos/builtin/packages/hpctoolkit-externals/package.py49
-rw-r--r--var/spack/repos/builtin/packages/hpctoolkit/package.py65
-rw-r--r--var/spack/repos/builtin/packages/hpl/package.py119
-rw-r--r--var/spack/repos/builtin/packages/hpx5/package.py6
-rw-r--r--var/spack/repos/builtin/packages/hsakmt/package.py41
-rw-r--r--var/spack/repos/builtin/packages/htop/package.py41
-rw-r--r--var/spack/repos/builtin/packages/htslib/package.py41
-rw-r--r--var/spack/repos/builtin/packages/hub/package.py24
-rw-r--r--var/spack/repos/builtin/packages/hwloc/package.py2
-rw-r--r--var/spack/repos/builtin/packages/hydra/package.py2
-rw-r--r--var/spack/repos/builtin/packages/hypre/package.py50
-rw-r--r--var/spack/repos/builtin/packages/ibmisc/package.py78
-rw-r--r--var/spack/repos/builtin/packages/iceauth/package.py48
-rw-r--r--var/spack/repos/builtin/packages/icet/package.py46
-rw-r--r--var/spack/repos/builtin/packages/ico/package.py49
-rw-r--r--var/spack/repos/builtin/packages/icu4c/package.py25
-rw-r--r--var/spack/repos/builtin/packages/ilmbase/package.py42
-rw-r--r--var/spack/repos/builtin/packages/image-magick/package.py57
-rw-r--r--var/spack/repos/builtin/packages/imake/package.py43
-rw-r--r--var/spack/repos/builtin/packages/inputproto/package.py45
-rw-r--r--var/spack/repos/builtin/packages/intel-gpu-tools/package.py67
-rw-r--r--var/spack/repos/builtin/packages/intel-mkl/package.py99
-rw-r--r--var/spack/repos/builtin/packages/intel-parallel-studio/package.py362
-rw-r--r--var/spack/repos/builtin/packages/intel/package.py176
-rw-r--r--var/spack/repos/builtin/packages/intltool/package.py8
-rw-r--r--var/spack/repos/builtin/packages/ior/package.py2
-rw-r--r--var/spack/repos/builtin/packages/ipopt/package.py18
-rw-r--r--var/spack/repos/builtin/packages/ipp/package.py50
-rw-r--r--var/spack/repos/builtin/packages/isl/package.py2
-rw-r--r--var/spack/repos/builtin/packages/itstool/package.py44
-rw-r--r--var/spack/repos/builtin/packages/jansson/package.py35
-rw-r--r--var/spack/repos/builtin/packages/jasper/fix_alpha_channel_assert_fail.patch25
-rw-r--r--var/spack/repos/builtin/packages/jasper/package.py63
-rw-r--r--var/spack/repos/builtin/packages/jdk/package.py46
-rw-r--r--var/spack/repos/builtin/packages/jemalloc/package.py9
-rw-r--r--var/spack/repos/builtin/packages/jpeg/package.py1
-rw-r--r--var/spack/repos/builtin/packages/jsoncpp/package.py (renamed from var/spack/repos/builtin/packages/icu/package.py)32
-rw-r--r--var/spack/repos/builtin/packages/judy/package.py3
-rw-r--r--var/spack/repos/builtin/packages/julia/package.py167
-rw-r--r--var/spack/repos/builtin/packages/kbproto/package.py45
-rw-r--r--var/spack/repos/builtin/packages/kdiff3/package.py44
-rw-r--r--var/spack/repos/builtin/packages/kealib/package.py10
-rw-r--r--var/spack/repos/builtin/packages/kripke/package.py1
-rw-r--r--var/spack/repos/builtin/packages/launchmon/package.py26
-rw-r--r--var/spack/repos/builtin/packages/lbxproxy/package.py58
-rw-r--r--var/spack/repos/builtin/packages/lcms/package.py1
-rw-r--r--var/spack/repos/builtin/packages/leveldb/package.py2
-rw-r--r--var/spack/repos/builtin/packages/libaio/package.py43
-rw-r--r--var/spack/repos/builtin/packages/libapplewm/package.py54
-rw-r--r--var/spack/repos/builtin/packages/libarchive/package.py21
-rw-r--r--var/spack/repos/builtin/packages/libatomic-ops/package.py42
-rw-r--r--var/spack/repos/builtin/packages/libcerf/package.py15
-rw-r--r--var/spack/repos/builtin/packages/libcircle/package.py4
-rw-r--r--var/spack/repos/builtin/packages/libctl/package.py48
-rw-r--r--var/spack/repos/builtin/packages/libdmx/package.py49
-rw-r--r--var/spack/repos/builtin/packages/libdrm/package.py14
-rw-r--r--var/spack/repos/builtin/packages/libdwarf/package.py13
-rw-r--r--var/spack/repos/builtin/packages/libedit/package.py4
-rw-r--r--var/spack/repos/builtin/packages/libelf/package.py18
-rw-r--r--var/spack/repos/builtin/packages/libemos/package.py52
-rw-r--r--var/spack/repos/builtin/packages/libepoxy/package.py39
-rw-r--r--var/spack/repos/builtin/packages/libevent/package.py11
-rw-r--r--var/spack/repos/builtin/packages/libffi/package.py16
-rw-r--r--var/spack/repos/builtin/packages/libfontenc/package.py46
-rw-r--r--var/spack/repos/builtin/packages/libfs/package.py49
-rw-r--r--var/spack/repos/builtin/packages/libgcrypt/package.py1
-rw-r--r--var/spack/repos/builtin/packages/libgd/package.py47
-rw-r--r--var/spack/repos/builtin/packages/libgpg-error/package.py1
-rw-r--r--var/spack/repos/builtin/packages/libgtextutils/package.py40
-rw-r--r--var/spack/repos/builtin/packages/libhio/package.py45
-rw-r--r--var/spack/repos/builtin/packages/libice/package.py45
-rw-r--r--var/spack/repos/builtin/packages/libiconv/gets.patch13
-rw-r--r--var/spack/repos/builtin/packages/libiconv/package.py48
-rw-r--r--var/spack/repos/builtin/packages/libint/package.py104
-rw-r--r--var/spack/repos/builtin/packages/libjpeg-turbo/package.py22
-rw-r--r--var/spack/repos/builtin/packages/libjson-c/package.py11
-rw-r--r--var/spack/repos/builtin/packages/liblbxutil/package.py51
-rw-r--r--var/spack/repos/builtin/packages/libmesh/package.py56
-rw-r--r--var/spack/repos/builtin/packages/libmng/package.py4
-rw-r--r--var/spack/repos/builtin/packages/libmonitor/package.py12
-rw-r--r--var/spack/repos/builtin/packages/libnbc/package.py (renamed from var/spack/repos/builtin/packages/libNBC/package.py)1
-rw-r--r--var/spack/repos/builtin/packages/liboldx/package.py45
-rw-r--r--var/spack/repos/builtin/packages/libpciaccess/package.py17
-rw-r--r--var/spack/repos/builtin/packages/libpng/package.py34
-rw-r--r--var/spack/repos/builtin/packages/libpthread-stubs/package.py40
-rw-r--r--var/spack/repos/builtin/packages/libquo/package.py68
-rw-r--r--var/spack/repos/builtin/packages/libsigsegv/package.py14
-rw-r--r--var/spack/repos/builtin/packages/libsigsegv/patch.new_config_guess1187
-rw-r--r--var/spack/repos/builtin/packages/libsm/package.py47
-rw-r--r--var/spack/repos/builtin/packages/libsodium/package.py11
-rw-r--r--var/spack/repos/builtin/packages/libspatialindex/package.py32
-rw-r--r--var/spack/repos/builtin/packages/libsplash/package.py64
-rw-r--r--var/spack/repos/builtin/packages/libtermkey/package.py10
-rw-r--r--var/spack/repos/builtin/packages/libtiff/package.py14
-rw-r--r--var/spack/repos/builtin/packages/libtool/package.py28
-rw-r--r--var/spack/repos/builtin/packages/libunistring/package.py42
-rw-r--r--var/spack/repos/builtin/packages/libunwind/package.py1
-rw-r--r--var/spack/repos/builtin/packages/libuuid/package.py4
-rw-r--r--var/spack/repos/builtin/packages/libuv/package.py7
-rw-r--r--var/spack/repos/builtin/packages/libvterm/package.py1
-rw-r--r--var/spack/repos/builtin/packages/libwebsockets/package.py39
-rw-r--r--var/spack/repos/builtin/packages/libwindowswm/package.py53
-rw-r--r--var/spack/repos/builtin/packages/libx11/package.py51
-rw-r--r--var/spack/repos/builtin/packages/libxau/package.py47
-rw-r--r--var/spack/repos/builtin/packages/libxaw/package.py52
-rw-r--r--var/spack/repos/builtin/packages/libxaw3d/package.py50
-rw-r--r--var/spack/repos/builtin/packages/libxc/package.py23
-rw-r--r--var/spack/repos/builtin/packages/libxcb/package.py30
-rw-r--r--var/spack/repos/builtin/packages/libxcomposite/package.py48
-rw-r--r--var/spack/repos/builtin/packages/libxcursor/package.py48
-rw-r--r--var/spack/repos/builtin/packages/libxdamage/package.py49
-rw-r--r--var/spack/repos/builtin/packages/libxdmcp/package.py45
-rw-r--r--var/spack/repos/builtin/packages/libxevie/package.py49
-rw-r--r--var/spack/repos/builtin/packages/libxext/package.py47
-rw-r--r--var/spack/repos/builtin/packages/libxfixes/package.py49
-rw-r--r--var/spack/repos/builtin/packages/libxfont/package.py54
-rw-r--r--var/spack/repos/builtin/packages/libxfont2/package.py54
-rw-r--r--var/spack/repos/builtin/packages/libxfontcache/package.py48
-rw-r--r--var/spack/repos/builtin/packages/libxft/package.py52
-rw-r--r--var/spack/repos/builtin/packages/libxi/package.py48
-rw-r--r--var/spack/repos/builtin/packages/libxinerama/package.py48
-rw-r--r--var/spack/repos/builtin/packages/libxkbfile/package.py46
-rw-r--r--var/spack/repos/builtin/packages/libxkbui/package.py47
-rw-r--r--var/spack/repos/builtin/packages/libxml2/package.py24
-rw-r--r--var/spack/repos/builtin/packages/libxmu/package.py51
-rw-r--r--var/spack/repos/builtin/packages/libxp/package.py49
-rw-r--r--var/spack/repos/builtin/packages/libxpm/package.py50
-rw-r--r--var/spack/repos/builtin/packages/libxpresent/package.py49
-rw-r--r--var/spack/repos/builtin/packages/libxprintapputil/package.py49
-rw-r--r--var/spack/repos/builtin/packages/libxprintutil/package.py49
-rw-r--r--var/spack/repos/builtin/packages/libxrandr/package.py50
-rw-r--r--var/spack/repos/builtin/packages/libxrender/package.py46
-rw-r--r--var/spack/repos/builtin/packages/libxres/package.py48
-rw-r--r--var/spack/repos/builtin/packages/libxscrnsaver/package.py48
-rw-r--r--var/spack/repos/builtin/packages/libxshmfence/package.py19
-rw-r--r--var/spack/repos/builtin/packages/libxslt/package.py9
-rw-r--r--var/spack/repos/builtin/packages/libxsmm/package.py66
-rw-r--r--var/spack/repos/builtin/packages/libxstream/package.py50
-rw-r--r--var/spack/repos/builtin/packages/libxt/package.py49
-rw-r--r--var/spack/repos/builtin/packages/libxtrap/package.py58
-rw-r--r--var/spack/repos/builtin/packages/libxtst/package.py59
-rw-r--r--var/spack/repos/builtin/packages/libxv/package.py49
-rw-r--r--var/spack/repos/builtin/packages/libxvmc/package.py49
-rw-r--r--var/spack/repos/builtin/packages/libxxf86dga/package.py49
-rw-r--r--var/spack/repos/builtin/packages/libxxf86misc/package.py49
-rw-r--r--var/spack/repos/builtin/packages/libxxf86vm/package.py49
-rw-r--r--var/spack/repos/builtin/packages/likwid/package.py69
-rw-r--r--var/spack/repos/builtin/packages/listres/package.py49
-rw-r--r--var/spack/repos/builtin/packages/llvm-lld/package.py5
-rw-r--r--var/spack/repos/builtin/packages/llvm/package.py408
-rw-r--r--var/spack/repos/builtin/packages/lmdb/package.py2
-rw-r--r--var/spack/repos/builtin/packages/lmod/fix_tclsh_paths.patch10
-rw-r--r--var/spack/repos/builtin/packages/lmod/package.py44
-rw-r--r--var/spack/repos/builtin/packages/lndir/package.py44
-rw-r--r--var/spack/repos/builtin/packages/lrslib/Makefile.spack.patch60
-rw-r--r--var/spack/repos/builtin/packages/lrslib/package.py61
-rw-r--r--var/spack/repos/builtin/packages/lrzip/package.py61
-rw-r--r--var/spack/repos/builtin/packages/lua-jit/package.py (renamed from var/spack/repos/builtin/packages/LuaJIT/package.py)3
-rw-r--r--var/spack/repos/builtin/packages/lua-luafilesystem/package.py52
-rw-r--r--var/spack/repos/builtin/packages/lua-luaposix/package.py24
-rw-r--r--var/spack/repos/builtin/packages/lua/package.py22
-rw-r--r--var/spack/repos/builtin/packages/luit/package.py51
-rw-r--r--var/spack/repos/builtin/packages/lulesh/package.py55
-rw-r--r--var/spack/repos/builtin/packages/lulesh/remove_defaults.patch60
-rw-r--r--var/spack/repos/builtin/packages/lwgrp/package.py2
-rw-r--r--var/spack/repos/builtin/packages/lwm2/package.py1
-rw-r--r--var/spack/repos/builtin/packages/lz4/package.py45
-rw-r--r--var/spack/repos/builtin/packages/lzma/package.py41
-rw-r--r--var/spack/repos/builtin/packages/lzo/package.py44
-rw-r--r--var/spack/repos/builtin/packages/m4/package.py30
-rw-r--r--var/spack/repos/builtin/packages/mafft/package.py42
-rw-r--r--var/spack/repos/builtin/packages/magics/no_hardcoded_python.patch5
-rw-r--r--var/spack/repos/builtin/packages/magics/package.py115
-rw-r--r--var/spack/repos/builtin/packages/magics/resolve_isnan_ambiguity.patch73
-rw-r--r--var/spack/repos/builtin/packages/makedepend/package.py44
-rw-r--r--var/spack/repos/builtin/packages/mariadb/package.py59
-rw-r--r--var/spack/repos/builtin/packages/matio/package.py23
-rw-r--r--var/spack/repos/builtin/packages/maven/package.py41
-rw-r--r--var/spack/repos/builtin/packages/mbedtls/package.py19
-rw-r--r--var/spack/repos/builtin/packages/meep/package.py109
-rw-r--r--var/spack/repos/builtin/packages/memaxes/package.py3
-rw-r--r--var/spack/repos/builtin/packages/mercurial/package.py69
-rw-r--r--var/spack/repos/builtin/packages/mesa/package.py45
-rw-r--r--var/spack/repos/builtin/packages/metis/package.py245
-rw-r--r--var/spack/repos/builtin/packages/mfem/package.py131
-rw-r--r--var/spack/repos/builtin/packages/mitos/package.py (renamed from var/spack/repos/builtin/packages/Mitos/package.py)1
-rw-r--r--var/spack/repos/builtin/packages/mkfontdir/package.py47
-rw-r--r--var/spack/repos/builtin/packages/mkfontscale/package.py48
-rw-r--r--var/spack/repos/builtin/packages/moab/package.py10
-rw-r--r--var/spack/repos/builtin/packages/mpc/package.py7
-rw-r--r--var/spack/repos/builtin/packages/mpe2/package.py3
-rw-r--r--var/spack/repos/builtin/packages/mpfr/package.py3
-rw-r--r--var/spack/repos/builtin/packages/mpibash/package.py4
-rw-r--r--var/spack/repos/builtin/packages/mpich/package.py147
-rw-r--r--var/spack/repos/builtin/packages/mpileaks/package.py4
-rw-r--r--var/spack/repos/builtin/packages/mpip/package.py44
-rw-r--r--var/spack/repos/builtin/packages/mpir/package.py62
-rw-r--r--var/spack/repos/builtin/packages/mrnet/package.py21
-rw-r--r--var/spack/repos/builtin/packages/msgpack-c/package.py3
-rw-r--r--var/spack/repos/builtin/packages/mumps/package.py118
-rw-r--r--var/spack/repos/builtin/packages/munge/package.py12
-rw-r--r--var/spack/repos/builtin/packages/muparser/package.py1
-rw-r--r--var/spack/repos/builtin/packages/muster/package.py2
-rw-r--r--var/spack/repos/builtin/packages/mvapich2/package.py169
-rw-r--r--var/spack/repos/builtin/packages/mxml/package.py50
-rw-r--r--var/spack/repos/builtin/packages/nag/package.py6
-rw-r--r--var/spack/repos/builtin/packages/nano/package.py40
-rw-r--r--var/spack/repos/builtin/packages/nasm/package.py1
-rw-r--r--var/spack/repos/builtin/packages/nauty/package.py89
-rw-r--r--var/spack/repos/builtin/packages/nccmp/package.py1
-rw-r--r--var/spack/repos/builtin/packages/ncdu/package.py12
-rw-r--r--var/spack/repos/builtin/packages/ncl/package.py233
-rw-r--r--var/spack/repos/builtin/packages/ncl/spack_ncl.patch30
-rw-r--r--var/spack/repos/builtin/packages/nco/package.py39
-rw-r--r--var/spack/repos/builtin/packages/ncurses/package.py16
-rw-r--r--var/spack/repos/builtin/packages/ncview/package.py16
-rw-r--r--var/spack/repos/builtin/packages/ndiff/package.py12
-rw-r--r--var/spack/repos/builtin/packages/netcdf-cxx/package.py8
-rw-r--r--var/spack/repos/builtin/packages/netcdf-cxx4/package.py9
-rw-r--r--var/spack/repos/builtin/packages/netcdf-fortran/package.py9
-rw-r--r--var/spack/repos/builtin/packages/netcdf/package.py136
-rw-r--r--var/spack/repos/builtin/packages/netgauge/package.py1
-rw-r--r--var/spack/repos/builtin/packages/netlib-lapack/package.py81
-rw-r--r--var/spack/repos/builtin/packages/netlib-scalapack/package.py60
-rw-r--r--var/spack/repos/builtin/packages/nettle/package.py12
-rw-r--r--var/spack/repos/builtin/packages/nextflow/package.py42
-rw-r--r--var/spack/repos/builtin/packages/nfft/package.py60
-rw-r--r--var/spack/repos/builtin/packages/ninja/package.py2
-rw-r--r--var/spack/repos/builtin/packages/nmap/package.py37
-rw-r--r--var/spack/repos/builtin/packages/node-js/package.py110
-rw-r--r--var/spack/repos/builtin/packages/npm/package.py54
-rw-r--r--var/spack/repos/builtin/packages/numdiff/package.py11
-rw-r--r--var/spack/repos/builtin/packages/nwchem/package.py170
-rw-r--r--var/spack/repos/builtin/packages/ocaml/package.py43
-rw-r--r--var/spack/repos/builtin/packages/oce/package.py41
-rw-r--r--var/spack/repos/builtin/packages/oce/sierra.patch45
-rw-r--r--var/spack/repos/builtin/packages/oclock/package.py50
-rw-r--r--var/spack/repos/builtin/packages/octave-splines/package.py44
-rw-r--r--var/spack/repos/builtin/packages/octave/package.py74
-rw-r--r--var/spack/repos/builtin/packages/octopus/package.py134
-rw-r--r--var/spack/repos/builtin/packages/ompss/package.py37
-rw-r--r--var/spack/repos/builtin/packages/ompt-openmp/package.py16
-rw-r--r--var/spack/repos/builtin/packages/opari2/package.py18
-rw-r--r--var/spack/repos/builtin/packages/openblas/openblas_icc.patch253
-rw-r--r--var/spack/repos/builtin/packages/openblas/package.py153
-rw-r--r--var/spack/repos/builtin/packages/opencoarrays/package.py49
-rw-r--r--var/spack/repos/builtin/packages/opencv/package.py176
-rw-r--r--var/spack/repos/builtin/packages/openexr/package.py54
-rw-r--r--var/spack/repos/builtin/packages/openjpeg/package.py23
-rw-r--r--var/spack/repos/builtin/packages/openmpi/package.py282
-rw-r--r--var/spack/repos/builtin/packages/openscenegraph/package.py77
-rw-r--r--var/spack/repos/builtin/packages/openspeedshop/package.py538
-rw-r--r--var/spack/repos/builtin/packages/openssl/package.py88
-rw-r--r--var/spack/repos/builtin/packages/opium/package.py51
-rw-r--r--var/spack/repos/builtin/packages/osu-micro-benchmarks/package.py2
-rw-r--r--var/spack/repos/builtin/packages/otf/package.py1
-rw-r--r--var/spack/repos/builtin/packages/otf2/package.py12
-rw-r--r--var/spack/repos/builtin/packages/p4est/package.py45
-rw-r--r--var/spack/repos/builtin/packages/panda/package.py45
-rw-r--r--var/spack/repos/builtin/packages/pango/package.py15
-rw-r--r--var/spack/repos/builtin/packages/papi/package.py4
-rw-r--r--var/spack/repos/builtin/packages/paradiseo/package.py71
-rw-r--r--var/spack/repos/builtin/packages/parallel-netcdf/package.py27
-rw-r--r--var/spack/repos/builtin/packages/parallel/package.py44
-rw-r--r--var/spack/repos/builtin/packages/paraver/package.py25
-rw-r--r--var/spack/repos/builtin/packages/paraview/package.py73
-rw-r--r--var/spack/repos/builtin/packages/parmetis/package.py56
-rw-r--r--var/spack/repos/builtin/packages/parmgridgen/package.py71
-rw-r--r--var/spack/repos/builtin/packages/parpack/package.py5
-rw-r--r--var/spack/repos/builtin/packages/patch/package.py37
-rw-r--r--var/spack/repos/builtin/packages/patchelf/package.py8
-rw-r--r--var/spack/repos/builtin/packages/pcre/intel.patch12
-rw-r--r--var/spack/repos/builtin/packages/pcre/package.py16
-rw-r--r--var/spack/repos/builtin/packages/pcre2/package.py1
-rw-r--r--var/spack/repos/builtin/packages/pdt/package.py25
-rw-r--r--var/spack/repos/builtin/packages/perl/package.py75
-rw-r--r--var/spack/repos/builtin/packages/petsc/package.py180
-rw-r--r--var/spack/repos/builtin/packages/pexsi/make.inc79
-rw-r--r--var/spack/repos/builtin/packages/pexsi/package.py103
-rw-r--r--var/spack/repos/builtin/packages/pfft/package.py64
-rw-r--r--var/spack/repos/builtin/packages/pflotran/package.py45
-rw-r--r--var/spack/repos/builtin/packages/pgi/package.py9
-rw-r--r--var/spack/repos/builtin/packages/pidx/package.py2
-rw-r--r--var/spack/repos/builtin/packages/piranha/package.py73
-rw-r--r--var/spack/repos/builtin/packages/pixman/package.py24
-rw-r--r--var/spack/repos/builtin/packages/pkg-config/g_date_strftime.patch34
-rw-r--r--var/spack/repos/builtin/packages/pkg-config/package.py30
-rw-r--r--var/spack/repos/builtin/packages/plumed/package.py158
-rw-r--r--var/spack/repos/builtin/packages/pmgr-collective/package.py (renamed from var/spack/repos/builtin/packages/pmgr_collective/package.py)1
-rw-r--r--var/spack/repos/builtin/packages/pngwriter/package.py58
-rw-r--r--var/spack/repos/builtin/packages/polymake/package.py57
-rw-r--r--var/spack/repos/builtin/packages/porta/Makefile.spack.patch23
-rw-r--r--var/spack/repos/builtin/packages/porta/package.py44
-rw-r--r--var/spack/repos/builtin/packages/postgresql/package.py15
-rw-r--r--var/spack/repos/builtin/packages/ppl/package.py1
-rw-r--r--var/spack/repos/builtin/packages/prank/package.py44
-rw-r--r--var/spack/repos/builtin/packages/presentproto/package.py42
-rw-r--r--var/spack/repos/builtin/packages/printproto/package.py43
-rw-r--r--var/spack/repos/builtin/packages/proj/package.py1
-rw-r--r--var/spack/repos/builtin/packages/protobuf/package.py2
-rw-r--r--var/spack/repos/builtin/packages/proxymngr/package.py52
-rw-r--r--var/spack/repos/builtin/packages/psi4/package.py124
-rw-r--r--var/spack/repos/builtin/packages/py-3to2/package.py35
-rw-r--r--var/spack/repos/builtin/packages/py-alabaster/package.py38
-rw-r--r--var/spack/repos/builtin/packages/py-argcomplete/package.py8
-rw-r--r--var/spack/repos/builtin/packages/py-astroid/package.py16
-rw-r--r--var/spack/repos/builtin/packages/py-astropy/package.py38
-rw-r--r--var/spack/repos/builtin/packages/py-autopep8/package.py50
-rw-r--r--var/spack/repos/builtin/packages/py-babel/package.py40
-rw-r--r--var/spack/repos/builtin/packages/py-backports-abc/package.py36
-rw-r--r--var/spack/repos/builtin/packages/py-backports-shutil-get-terminal-size/package.py38
-rw-r--r--var/spack/repos/builtin/packages/py-backports-ssl-match-hostname/package.py36
-rw-r--r--var/spack/repos/builtin/packages/py-basemap/package.py99
-rw-r--r--var/spack/repos/builtin/packages/py-beautifulsoup4/package.py41
-rw-r--r--var/spack/repos/builtin/packages/py-biopython/package.py17
-rw-r--r--var/spack/repos/builtin/packages/py-bleach/package.py39
-rw-r--r--var/spack/repos/builtin/packages/py-blessings/package.py10
-rw-r--r--var/spack/repos/builtin/packages/py-bottleneck/package.py11
-rw-r--r--var/spack/repos/builtin/packages/py-cclib/package.py37
-rw-r--r--var/spack/repos/builtin/packages/py-cdo/package.py42
-rw-r--r--var/spack/repos/builtin/packages/py-certifi/package.py37
-rw-r--r--var/spack/repos/builtin/packages/py-cffi/package.py18
-rw-r--r--var/spack/repos/builtin/packages/py-configparser/package.py44
-rw-r--r--var/spack/repos/builtin/packages/py-coverage/package.py12
-rw-r--r--var/spack/repos/builtin/packages/py-csvkit/package.py20
-rw-r--r--var/spack/repos/builtin/packages/py-cycler/package.py37
-rw-r--r--var/spack/repos/builtin/packages/py-cython/package.py12
-rw-r--r--var/spack/repos/builtin/packages/py-dask/package.py10
-rw-r--r--var/spack/repos/builtin/packages/py-dateutil/package.py11
-rw-r--r--var/spack/repos/builtin/packages/py-dbf/package.py8
-rw-r--r--var/spack/repos/builtin/packages/py-decorator/package.py14
-rw-r--r--var/spack/repos/builtin/packages/py-docutils/package.py40
-rw-r--r--var/spack/repos/builtin/packages/py-emcee/package.py37
-rw-r--r--var/spack/repos/builtin/packages/py-entrypoints/package.py40
-rw-r--r--var/spack/repos/builtin/packages/py-enum34/package.py43
-rw-r--r--var/spack/repos/builtin/packages/py-epydoc/package.py11
-rw-r--r--var/spack/repos/builtin/packages/py-flake8/package.py44
-rw-r--r--var/spack/repos/builtin/packages/py-funcsigs/package.py14
-rw-r--r--var/spack/repos/builtin/packages/py-functools32/package.py35
-rw-r--r--var/spack/repos/builtin/packages/py-futures/package.py36
-rw-r--r--var/spack/repos/builtin/packages/py-genders/package.py14
-rw-r--r--var/spack/repos/builtin/packages/py-genshi/package.py11
-rw-r--r--var/spack/repos/builtin/packages/py-gnuplot/package.py12
-rw-r--r--var/spack/repos/builtin/packages/py-h5py/package.py42
-rw-r--r--var/spack/repos/builtin/packages/py-html5lib/package.py37
-rw-r--r--var/spack/repos/builtin/packages/py-imagesize/package.py38
-rw-r--r--var/spack/repos/builtin/packages/py-iminuit/package.py42
-rw-r--r--var/spack/repos/builtin/packages/py-ipdb/package.py58
-rw-r--r--var/spack/repos/builtin/packages/py-ipykernel/package.py51
-rw-r--r--var/spack/repos/builtin/packages/py-ipython-genutils/package.py37
-rw-r--r--var/spack/repos/builtin/packages/py-ipython/package.py27
-rw-r--r--var/spack/repos/builtin/packages/py-ipywidgets/package.py40
-rw-r--r--var/spack/repos/builtin/packages/py-jdcal/package.py8
-rw-r--r--var/spack/repos/builtin/packages/py-jinja2/package.py33
-rw-r--r--var/spack/repos/builtin/packages/py-jsonschema/package.py42
-rw-r--r--var/spack/repos/builtin/packages/py-jupyter-client/package.py47
-rw-r--r--var/spack/repos/builtin/packages/py-jupyter-console/package.py46
-rw-r--r--var/spack/repos/builtin/packages/py-jupyter-core/package.py47
-rw-r--r--var/spack/repos/builtin/packages/py-jupyter-notebook/package.py61
-rw-r--r--var/spack/repos/builtin/packages/py-lockfile/package.py9
-rw-r--r--var/spack/repos/builtin/packages/py-logilab-common/package.py15
-rw-r--r--var/spack/repos/builtin/packages/py-macs2/package.py42
-rw-r--r--var/spack/repos/builtin/packages/py-mako/package.py19
-rw-r--r--var/spack/repos/builtin/packages/py-markdown/package.py56
-rw-r--r--var/spack/repos/builtin/packages/py-markupsafe/package.py34
-rw-r--r--var/spack/repos/builtin/packages/py-matplotlib/package.py121
-rw-r--r--var/spack/repos/builtin/packages/py-mccabe/package.py49
-rw-r--r--var/spack/repos/builtin/packages/py-meep/package.py89
-rw-r--r--var/spack/repos/builtin/packages/py-mistune/package.py11
-rw-r--r--var/spack/repos/builtin/packages/py-mock/package.py11
-rw-r--r--var/spack/repos/builtin/packages/py-monotonic/package.py36
-rw-r--r--var/spack/repos/builtin/packages/py-mpi4py/package.py16
-rw-r--r--var/spack/repos/builtin/packages/py-mpmath/package.py8
-rw-r--r--var/spack/repos/builtin/packages/py-mx/package.py15
-rw-r--r--var/spack/repos/builtin/packages/py-mysqldb1/package.py13
-rw-r--r--var/spack/repos/builtin/packages/py-nbconvert/package.py55
-rw-r--r--var/spack/repos/builtin/packages/py-nbformat/package.py42
-rw-r--r--var/spack/repos/builtin/packages/py-nestle/package.py40
-rw-r--r--var/spack/repos/builtin/packages/py-netcdf/package.py12
-rw-r--r--var/spack/repos/builtin/packages/py-networkx/package.py14
-rw-r--r--var/spack/repos/builtin/packages/py-nose/package.py8
-rw-r--r--var/spack/repos/builtin/packages/py-numexpr/package.py16
-rw-r--r--var/spack/repos/builtin/packages/py-numpy/package.py52
-rw-r--r--var/spack/repos/builtin/packages/py-openpyxl/package.py12
-rw-r--r--var/spack/repos/builtin/packages/py-ordereddict/package.py35
-rw-r--r--var/spack/repos/builtin/packages/py-pandas/package.py30
-rw-r--r--var/spack/repos/builtin/packages/py-pathlib2/package.py37
-rw-r--r--var/spack/repos/builtin/packages/py-pathspec/package.py37
-rw-r--r--var/spack/repos/builtin/packages/py-pbr/package.py16
-rw-r--r--var/spack/repos/builtin/packages/py-periodictable/package.py11
-rw-r--r--var/spack/repos/builtin/packages/py-pexpect/package.py11
-rw-r--r--var/spack/repos/builtin/packages/py-phonopy/package.py15
-rw-r--r--var/spack/repos/builtin/packages/py-pickleshare/package.py36
-rw-r--r--var/spack/repos/builtin/packages/py-pil/package.py14
-rw-r--r--var/spack/repos/builtin/packages/py-pillow/package.py131
-rw-r--r--var/spack/repos/builtin/packages/py-pip/package.py36
-rw-r--r--var/spack/repos/builtin/packages/py-ply/package.py33
-rw-r--r--var/spack/repos/builtin/packages/py-pmw/package.py11
-rw-r--r--var/spack/repos/builtin/packages/py-prettytable/package.py17
-rw-r--r--var/spack/repos/builtin/packages/py-proj/package.py46
-rw-r--r--var/spack/repos/builtin/packages/py-prompt-toolkit/package.py38
-rw-r--r--var/spack/repos/builtin/packages/py-protobuf/package.py45
-rw-r--r--var/spack/repos/builtin/packages/py-psutil/package.py43
-rw-r--r--var/spack/repos/builtin/packages/py-ptyprocess/package.py36
-rw-r--r--var/spack/repos/builtin/packages/py-pudb/package.py39
-rw-r--r--var/spack/repos/builtin/packages/py-py/package.py36
-rw-r--r--var/spack/repos/builtin/packages/py-py2cairo/package.py44
-rw-r--r--var/spack/repos/builtin/packages/py-py2neo/package.py19
-rw-r--r--var/spack/repos/builtin/packages/py-pychecker/package.py10
-rw-r--r--var/spack/repos/builtin/packages/py-pycodestyle/package.py47
-rw-r--r--var/spack/repos/builtin/packages/py-pycparser/package.py12
-rw-r--r--var/spack/repos/builtin/packages/py-pycurl/package.py39
-rw-r--r--var/spack/repos/builtin/packages/py-pydatalog/package.py33
-rw-r--r--var/spack/repos/builtin/packages/py-pyelftools/package.py11
-rw-r--r--var/spack/repos/builtin/packages/py-pyflakes/package.py47
-rw-r--r--var/spack/repos/builtin/packages/py-pygments/package.py11
-rw-r--r--var/spack/repos/builtin/packages/py-pygobject/package.py48
-rw-r--r--var/spack/repos/builtin/packages/py-pygobject/pygobject-2.28.6-introspection-1.patch35
-rw-r--r--var/spack/repos/builtin/packages/py-pygtk/package.py44
-rw-r--r--var/spack/repos/builtin/packages/py-pylint/package.py16
-rw-r--r--var/spack/repos/builtin/packages/py-pypar/package.py22
-rw-r--r--var/spack/repos/builtin/packages/py-pyparsing/package.py8
-rw-r--r--var/spack/repos/builtin/packages/py-pyqt/package.py5
-rw-r--r--var/spack/repos/builtin/packages/py-pyside/package.py46
-rw-r--r--var/spack/repos/builtin/packages/py-pytables/package.py25
-rw-r--r--var/spack/repos/builtin/packages/py-pytest/package.py38
-rw-r--r--var/spack/repos/builtin/packages/py-python-daemon/package.py12
-rw-r--r--var/spack/repos/builtin/packages/py-pytz/package.py11
-rw-r--r--var/spack/repos/builtin/packages/py-pyyaml/package.py8
-rw-r--r--var/spack/repos/builtin/packages/py-readme-renderer/package.py42
-rw-r--r--var/spack/repos/builtin/packages/py-restview/package.py39
-rw-r--r--var/spack/repos/builtin/packages/py-rpy2/package.py23
-rw-r--r--var/spack/repos/builtin/packages/py-rtree/package.py56
-rw-r--r--var/spack/repos/builtin/packages/py-scientificpython/package.py8
-rw-r--r--var/spack/repos/builtin/packages/py-scikit-image/package.py23
-rw-r--r--var/spack/repos/builtin/packages/py-scikit-learn/package.py16
-rw-r--r--var/spack/repos/builtin/packages/py-scipy/package.py34
-rw-r--r--var/spack/repos/builtin/packages/py-setuptools/package.py14
-rw-r--r--var/spack/repos/builtin/packages/py-shiboken/package.py22
-rw-r--r--var/spack/repos/builtin/packages/py-simplegeneric/package.py38
-rw-r--r--var/spack/repos/builtin/packages/py-singledispatch/package.py41
-rw-r--r--var/spack/repos/builtin/packages/py-sip/package.py4
-rw-r--r--var/spack/repos/builtin/packages/py-six/package.py12
-rw-r--r--var/spack/repos/builtin/packages/py-sncosmo/package.py48
-rw-r--r--var/spack/repos/builtin/packages/py-snowballstemmer/package.py35
-rw-r--r--var/spack/repos/builtin/packages/py-sphinx-bootstrap-theme/package.py41
-rw-r--r--var/spack/repos/builtin/packages/py-sphinx-rtd-theme/package.py37
-rw-r--r--var/spack/repos/builtin/packages/py-sphinx/package.py22
-rw-r--r--var/spack/repos/builtin/packages/py-sqlalchemy/package.py (renamed from var/spack/repos/builtin/packages/py-SQLAlchemy/package.py)8
-rw-r--r--var/spack/repos/builtin/packages/py-storm/package.py11
-rw-r--r--var/spack/repos/builtin/packages/py-symengine/package.py45
-rw-r--r--var/spack/repos/builtin/packages/py-sympy/package.py7
-rw-r--r--var/spack/repos/builtin/packages/py-tappy/package.py12
-rw-r--r--var/spack/repos/builtin/packages/py-terminado/package.py38
-rw-r--r--var/spack/repos/builtin/packages/py-tornado/package.py48
-rw-r--r--var/spack/repos/builtin/packages/py-traitlets/package.py50
-rw-r--r--var/spack/repos/builtin/packages/py-tuiview/package.py17
-rw-r--r--var/spack/repos/builtin/packages/py-twisted/package.py10
-rw-r--r--var/spack/repos/builtin/packages/py-unittest2/package.py37
-rw-r--r--var/spack/repos/builtin/packages/py-unittest2py3k/package.py39
-rw-r--r--var/spack/repos/builtin/packages/py-urwid/package.py11
-rw-r--r--var/spack/repos/builtin/packages/py-vcversioner/package.py36
-rw-r--r--var/spack/repos/builtin/packages/py-virtualenv/package.py10
-rw-r--r--var/spack/repos/builtin/packages/py-wcsaxes/package.py40
-rw-r--r--var/spack/repos/builtin/packages/py-wcwidth/package.py36
-rw-r--r--var/spack/repos/builtin/packages/py-wheel/package.py9
-rw-r--r--var/spack/repos/builtin/packages/py-widgetsnbextension/package.py38
-rw-r--r--var/spack/repos/builtin/packages/py-xlrd/package.py8
-rw-r--r--var/spack/repos/builtin/packages/py-xpyb/package.py47
-rw-r--r--var/spack/repos/builtin/packages/py-yapf/package.py9
-rw-r--r--var/spack/repos/builtin/packages/py-yt/package.py73
-rw-r--r--var/spack/repos/builtin/packages/py-zmq/package.py40
-rw-r--r--var/spack/repos/builtin/packages/python/ncurses.patch11
-rw-r--r--var/spack/repos/builtin/packages/python/package.py436
-rw-r--r--var/spack/repos/builtin/packages/qhull/package.py18
-rw-r--r--var/spack/repos/builtin/packages/qhull/qhull-iterator.patch45
-rw-r--r--var/spack/repos/builtin/packages/qrupdate/package.py1
-rw-r--r--var/spack/repos/builtin/packages/qt-creator/package.py46
-rw-r--r--var/spack/repos/builtin/packages/qt/btn_trigger_happy.patch17
-rw-r--r--var/spack/repos/builtin/packages/qt/package.py214
-rw-r--r--var/spack/repos/builtin/packages/qt/qt4-corewlan-new-osx.patch16
-rw-r--r--var/spack/repos/builtin/packages/qt/qt4-el-capitan.patch31
-rw-r--r--var/spack/repos/builtin/packages/qt/qt4-pcre-include-conflict.patch16
-rw-r--r--var/spack/repos/builtin/packages/qthreads/ldflags.patch11
-rw-r--r--var/spack/repos/builtin/packages/qthreads/package.py17
-rw-r--r--var/spack/repos/builtin/packages/r-abind/package.py12
-rw-r--r--var/spack/repos/builtin/packages/r-assertthat/package.py37
-rw-r--r--var/spack/repos/builtin/packages/r-base64enc/package.py36
-rw-r--r--var/spack/repos/builtin/packages/r-bh/package.py48
-rw-r--r--var/spack/repos/builtin/packages/r-biocgenerics/package.py (renamed from var/spack/repos/builtin/packages/r-BiocGenerics/package.py)20
-rw-r--r--var/spack/repos/builtin/packages/r-bitops/package.py36
-rw-r--r--var/spack/repos/builtin/packages/r-boot/package.py37
-rw-r--r--var/spack/repos/builtin/packages/r-brew/package.py37
-rw-r--r--var/spack/repos/builtin/packages/r-c50/package.py38
-rw-r--r--var/spack/repos/builtin/packages/r-car/package.py42
-rw-r--r--var/spack/repos/builtin/packages/r-caret/package.py44
-rw-r--r--var/spack/repos/builtin/packages/r-catools/package.py41
-rw-r--r--var/spack/repos/builtin/packages/r-chron/package.py35
-rw-r--r--var/spack/repos/builtin/packages/r-class/package.py38
-rw-r--r--var/spack/repos/builtin/packages/r-cluster/package.py37
-rw-r--r--var/spack/repos/builtin/packages/r-codetools/package.py35
-rw-r--r--var/spack/repos/builtin/packages/r-colorspace/package.py38
-rw-r--r--var/spack/repos/builtin/packages/r-crayon/package.py39
-rw-r--r--var/spack/repos/builtin/packages/r-cubature/package.py35
-rw-r--r--var/spack/repos/builtin/packages/r-curl/package.py46
-rw-r--r--var/spack/repos/builtin/packages/r-datatable/package.py40
-rw-r--r--var/spack/repos/builtin/packages/r-dbi/package.py37
-rw-r--r--var/spack/repos/builtin/packages/r-deoptim/package.py37
-rw-r--r--var/spack/repos/builtin/packages/r-devtools/package.py44
-rw-r--r--var/spack/repos/builtin/packages/r-diagrammer/package.py43
-rw-r--r--var/spack/repos/builtin/packages/r-dichromat/package.py36
-rw-r--r--var/spack/repos/builtin/packages/r-digest/package.py50
-rw-r--r--var/spack/repos/builtin/packages/r-doparallel/package.py39
-rw-r--r--var/spack/repos/builtin/packages/r-dplyr/package.py45
-rw-r--r--var/spack/repos/builtin/packages/r-dt/package.py42
-rw-r--r--var/spack/repos/builtin/packages/r-dygraphs/package.py44
-rw-r--r--var/spack/repos/builtin/packages/r-e1071/package.py39
-rw-r--r--var/spack/repos/builtin/packages/r-evaluate/package.py40
-rw-r--r--var/spack/repos/builtin/packages/r-filehash/package.py11
-rw-r--r--var/spack/repos/builtin/packages/r-foreach/package.py44
-rw-r--r--var/spack/repos/builtin/packages/r-foreign/package.py37
-rw-r--r--var/spack/repos/builtin/packages/r-formatr/package.py45
-rw-r--r--var/spack/repos/builtin/packages/r-formula/package.py37
-rw-r--r--var/spack/repos/builtin/packages/r-gdata/package.py53
-rw-r--r--var/spack/repos/builtin/packages/r-geosphere/package.py39
-rw-r--r--var/spack/repos/builtin/packages/r-ggmap/package.py51
-rw-r--r--var/spack/repos/builtin/packages/r-ggplot2/package.py48
-rw-r--r--var/spack/repos/builtin/packages/r-ggvis/package.py45
-rw-r--r--var/spack/repos/builtin/packages/r-gistr/package.py49
-rw-r--r--var/spack/repos/builtin/packages/r-git2r/package.py40
-rw-r--r--var/spack/repos/builtin/packages/r-glmnet/package.py43
-rw-r--r--var/spack/repos/builtin/packages/r-googlevis/package.py41
-rw-r--r--var/spack/repos/builtin/packages/r-gridbase/package.py35
-rw-r--r--var/spack/repos/builtin/packages/r-gridextra/package.py38
-rw-r--r--var/spack/repos/builtin/packages/r-gtable/package.py35
-rw-r--r--var/spack/repos/builtin/packages/r-gtools/package.py54
-rw-r--r--var/spack/repos/builtin/packages/r-hexbin/package.py39
-rw-r--r--var/spack/repos/builtin/packages/r-highr/package.py38
-rw-r--r--var/spack/repos/builtin/packages/r-htmltools/package.py38
-rw-r--r--var/spack/repos/builtin/packages/r-htmlwidgets/package.py41
-rw-r--r--var/spack/repos/builtin/packages/r-httpuv/package.py43
-rw-r--r--var/spack/repos/builtin/packages/r-httr/package.py43
-rw-r--r--var/spack/repos/builtin/packages/r-igraph/package.py44
-rw-r--r--var/spack/repos/builtin/packages/r-influencer/package.py44
-rw-r--r--var/spack/repos/builtin/packages/r-inline/package.py37
-rw-r--r--var/spack/repos/builtin/packages/r-irdisplay/package.py39
-rw-r--r--var/spack/repos/builtin/packages/r-irkernel/package.py45
-rw-r--r--var/spack/repos/builtin/packages/r-irlba/package.py39
-rw-r--r--var/spack/repos/builtin/packages/r-iterators/package.py36
-rw-r--r--var/spack/repos/builtin/packages/r-jpeg/package.py39
-rw-r--r--var/spack/repos/builtin/packages/r-jsonlite/package.py45
-rw-r--r--var/spack/repos/builtin/packages/r-knitr/package.py46
-rw-r--r--var/spack/repos/builtin/packages/r-labeling/package.py35
-rw-r--r--var/spack/repos/builtin/packages/r-lattice/package.py38
-rw-r--r--var/spack/repos/builtin/packages/r-lazyeval/package.py37
-rw-r--r--var/spack/repos/builtin/packages/r-leaflet/package.py48
-rw-r--r--var/spack/repos/builtin/packages/r-lme4/package.py47
-rw-r--r--var/spack/repos/builtin/packages/r-lmtest/package.py39
-rw-r--r--var/spack/repos/builtin/packages/r-lubridate/package.py42
-rw-r--r--var/spack/repos/builtin/packages/r-magic/package.py13
-rw-r--r--var/spack/repos/builtin/packages/r-magrittr/package.py39
-rw-r--r--var/spack/repos/builtin/packages/r-mapproj/package.py37
-rw-r--r--var/spack/repos/builtin/packages/r-maps/package.py36
-rw-r--r--var/spack/repos/builtin/packages/r-maptools/package.py43
-rw-r--r--var/spack/repos/builtin/packages/r-markdown/package.py41
-rw-r--r--var/spack/repos/builtin/packages/r-mass/package.py36
-rw-r--r--var/spack/repos/builtin/packages/r-matrix/package.py38
-rw-r--r--var/spack/repos/builtin/packages/r-matrixmodels/package.py38
-rw-r--r--var/spack/repos/builtin/packages/r-memoise/package.py38
-rw-r--r--var/spack/repos/builtin/packages/r-mgcv/package.py41
-rw-r--r--var/spack/repos/builtin/packages/r-mime/package.py37
-rw-r--r--var/spack/repos/builtin/packages/r-minqa/package.py38
-rw-r--r--var/spack/repos/builtin/packages/r-multcomp/package.py45
-rw-r--r--var/spack/repos/builtin/packages/r-munsell/package.py41
-rw-r--r--var/spack/repos/builtin/packages/r-mvtnorm/package.py36
-rw-r--r--var/spack/repos/builtin/packages/r-ncdf4/package.py50
-rw-r--r--var/spack/repos/builtin/packages/r-networkd3/package.py40
-rw-r--r--var/spack/repos/builtin/packages/r-nlme/package.py37
-rw-r--r--var/spack/repos/builtin/packages/r-nloptr/package.py41
-rw-r--r--var/spack/repos/builtin/packages/r-nmf/package.py54
-rw-r--r--var/spack/repos/builtin/packages/r-nnet/package.py36
-rw-r--r--var/spack/repos/builtin/packages/r-np/package.py44
-rw-r--r--var/spack/repos/builtin/packages/r-openssl/package.py46
-rw-r--r--var/spack/repos/builtin/packages/r-packrat/package.py36
-rw-r--r--var/spack/repos/builtin/packages/r-partykit/package.py46
-rw-r--r--var/spack/repos/builtin/packages/r-pbdzmq/package.py45
-rw-r--r--var/spack/repos/builtin/packages/r-pbkrtest/package.py43
-rw-r--r--var/spack/repos/builtin/packages/r-pkgmaker/package.py47
-rw-r--r--var/spack/repos/builtin/packages/r-plotrix/package.py35
-rw-r--r--var/spack/repos/builtin/packages/r-plyr/package.py43
-rw-r--r--var/spack/repos/builtin/packages/r-png/package.py39
-rw-r--r--var/spack/repos/builtin/packages/r-praise/package.py35
-rw-r--r--var/spack/repos/builtin/packages/r-proto/package.py36
-rw-r--r--var/spack/repos/builtin/packages/r-pryr/package.py40
-rw-r--r--var/spack/repos/builtin/packages/r-quantmod/package.py40
-rw-r--r--var/spack/repos/builtin/packages/r-quantreg/package.py44
-rw-r--r--var/spack/repos/builtin/packages/r-r6/package.py40
-rw-r--r--var/spack/repos/builtin/packages/r-randomforest/package.py36
-rw-r--r--var/spack/repos/builtin/packages/r-raster/package.py40
-rw-r--r--var/spack/repos/builtin/packages/r-rbokeh/package.py49
-rw-r--r--var/spack/repos/builtin/packages/r-rcolorbrewer/package.py36
-rw-r--r--var/spack/repos/builtin/packages/r-rcpp/package.py44
-rw-r--r--var/spack/repos/builtin/packages/r-rcppeigen/package.py50
-rw-r--r--var/spack/repos/builtin/packages/r-registry/package.py35
-rw-r--r--var/spack/repos/builtin/packages/r-repr/package.py37
-rw-r--r--var/spack/repos/builtin/packages/r-reshape2/package.py40
-rw-r--r--var/spack/repos/builtin/packages/r-rgooglemaps/package.py41
-rw-r--r--var/spack/repos/builtin/packages/r-rinside/package.py51
-rw-r--r--var/spack/repos/builtin/packages/r-rjava/package.py38
-rw-r--r--var/spack/repos/builtin/packages/r-rjson/package.py35
-rw-r--r--var/spack/repos/builtin/packages/r-rjsonio/package.py49
-rw-r--r--var/spack/repos/builtin/packages/r-rmarkdown/package.py44
-rw-r--r--var/spack/repos/builtin/packages/r-rmysql/package.py38
-rw-r--r--var/spack/repos/builtin/packages/r-rngtools/package.py43
-rw-r--r--var/spack/repos/builtin/packages/r-rodbc/package.py37
-rw-r--r--var/spack/repos/builtin/packages/r-roxygen2/package.py42
-rw-r--r--var/spack/repos/builtin/packages/r-rpostgresql/package.py46
-rw-r--r--var/spack/repos/builtin/packages/r-rsnns/package.py44
-rw-r--r--var/spack/repos/builtin/packages/r-rsqlite/package.py39
-rw-r--r--var/spack/repos/builtin/packages/r-rstan/package.py51
-rw-r--r--var/spack/repos/builtin/packages/r-rstudioapi/package.py37
-rw-r--r--var/spack/repos/builtin/packages/r-rzmq/package.py38
-rw-r--r--var/spack/repos/builtin/packages/r-sandwich/package.py38
-rw-r--r--var/spack/repos/builtin/packages/r-scales/package.py43
-rw-r--r--var/spack/repos/builtin/packages/r-shiny/package.py46
-rw-r--r--var/spack/repos/builtin/packages/r-sp/package.py40
-rw-r--r--var/spack/repos/builtin/packages/r-sparsem/package.py37
-rw-r--r--var/spack/repos/builtin/packages/r-stanheaders/package.py49
-rw-r--r--var/spack/repos/builtin/packages/r-stringi/package.py45
-rw-r--r--var/spack/repos/builtin/packages/r-stringr/package.py42
-rw-r--r--var/spack/repos/builtin/packages/r-survey/package.py41
-rw-r--r--var/spack/repos/builtin/packages/r-survival/package.py39
-rw-r--r--var/spack/repos/builtin/packages/r-tarifx/package.py38
-rw-r--r--var/spack/repos/builtin/packages/r-testit/package.py37
-rw-r--r--var/spack/repos/builtin/packages/r-testthat/package.py42
-rw-r--r--var/spack/repos/builtin/packages/r-thdata/package.py38
-rw-r--r--var/spack/repos/builtin/packages/r-threejs/package.py41
-rw-r--r--var/spack/repos/builtin/packages/r-tibble/package.py40
-rw-r--r--var/spack/repos/builtin/packages/r-tidyr/package.py44
-rw-r--r--var/spack/repos/builtin/packages/r-ttr/package.py38
-rw-r--r--var/spack/repos/builtin/packages/r-uuid/package.py37
-rw-r--r--var/spack/repos/builtin/packages/r-vcd/package.py44
-rw-r--r--var/spack/repos/builtin/packages/r-visnetwork/package.py41
-rw-r--r--var/spack/repos/builtin/packages/r-whisker/package.py36
-rw-r--r--var/spack/repos/builtin/packages/r-withr/package.py38
-rw-r--r--var/spack/repos/builtin/packages/r-xgboost/package.py48
-rw-r--r--var/spack/repos/builtin/packages/r-xlconnect/package.py40
-rw-r--r--var/spack/repos/builtin/packages/r-xlconnectjars/package.py38
-rw-r--r--var/spack/repos/builtin/packages/r-xlsx/package.py39
-rw-r--r--var/spack/repos/builtin/packages/r-xlsxjars/package.py38
-rw-r--r--var/spack/repos/builtin/packages/r-xml/package.py40
-rw-r--r--var/spack/repos/builtin/packages/r-xtable/package.py35
-rw-r--r--var/spack/repos/builtin/packages/r-xts/package.py40
-rw-r--r--var/spack/repos/builtin/packages/r-yaml/package.py36
-rw-r--r--var/spack/repos/builtin/packages/r-zoo/package.py41
-rw-r--r--var/spack/repos/builtin/packages/r/package.py185
-rw-r--r--var/spack/repos/builtin/packages/raja/package.py4
-rw-r--r--var/spack/repos/builtin/packages/randrproto/package.py46
-rw-r--r--var/spack/repos/builtin/packages/ravel/package.py4
-rw-r--r--var/spack/repos/builtin/packages/readline/package.py1
-rw-r--r--var/spack/repos/builtin/packages/recordproto/package.py45
-rw-r--r--var/spack/repos/builtin/packages/rename/package.py45
-rw-r--r--var/spack/repos/builtin/packages/rendercheck/package.py48
-rw-r--r--var/spack/repos/builtin/packages/renderproto/package.py45
-rw-r--r--var/spack/repos/builtin/packages/resourceproto/package.py45
-rw-r--r--var/spack/repos/builtin/packages/rgb/package.py51
-rw-r--r--var/spack/repos/builtin/packages/root/math_uint.patch115
-rw-r--r--var/spack/repos/builtin/packages/root/package.py89
-rw-r--r--var/spack/repos/builtin/packages/root/root6-60606-mathmore.patch29
-rw-r--r--var/spack/repos/builtin/packages/rose/package.py15
-rw-r--r--var/spack/repos/builtin/packages/rstart/package.py49
-rw-r--r--var/spack/repos/builtin/packages/rsync/package.py3
-rw-r--r--var/spack/repos/builtin/packages/ruby/package.py13
-rw-r--r--var/spack/repos/builtin/packages/rust-bindgen/package.py42
-rw-r--r--var/spack/repos/builtin/packages/rust/package.py88
-rw-r--r--var/spack/repos/builtin/packages/samrai/no-tool-build.patch (renamed from var/spack/repos/builtin/packages/SAMRAI/no-tool-build.patch)0
-rw-r--r--var/spack/repos/builtin/packages/samrai/package.py (renamed from var/spack/repos/builtin/packages/SAMRAI/package.py)10
-rw-r--r--var/spack/repos/builtin/packages/samtools/package.py27
-rw-r--r--var/spack/repos/builtin/packages/samtools/samtools1.2.patch20
-rw-r--r--var/spack/repos/builtin/packages/sbt/package.py41
-rw-r--r--var/spack/repos/builtin/packages/scalasca/package.py15
-rw-r--r--var/spack/repos/builtin/packages/scons/package.py8
-rw-r--r--var/spack/repos/builtin/packages/scorep/package.py37
-rw-r--r--var/spack/repos/builtin/packages/scotch/Makefile.esmumps5
-rw-r--r--var/spack/repos/builtin/packages/scotch/package.py160
-rw-r--r--var/spack/repos/builtin/packages/scr/package.py7
-rw-r--r--var/spack/repos/builtin/packages/screen/package.py58
-rw-r--r--var/spack/repos/builtin/packages/scripts/package.py45
-rw-r--r--var/spack/repos/builtin/packages/scrnsaverproto/package.py45
-rw-r--r--var/spack/repos/builtin/packages/sdl2-image/package.py43
-rw-r--r--var/spack/repos/builtin/packages/sdl2/package.py45
-rw-r--r--var/spack/repos/builtin/packages/seqtk/package.py43
-rw-r--r--var/spack/repos/builtin/packages/serf/package.py18
-rw-r--r--var/spack/repos/builtin/packages/sessreg/package.py51
-rw-r--r--var/spack/repos/builtin/packages/setxkbmap/package.py48
-rw-r--r--var/spack/repos/builtin/packages/showfont/package.py47
-rw-r--r--var/spack/repos/builtin/packages/silo/package.py12
-rw-r--r--var/spack/repos/builtin/packages/slepc/install_name_371.patch32
-rw-r--r--var/spack/repos/builtin/packages/slepc/package.py29
-rw-r--r--var/spack/repos/builtin/packages/smproxy/package.py49
-rw-r--r--var/spack/repos/builtin/packages/snappy/package.py2
-rw-r--r--var/spack/repos/builtin/packages/sowing/package.py42
-rw-r--r--var/spack/repos/builtin/packages/spark/package.py74
-rw-r--r--var/spack/repos/builtin/packages/sparsehash/package.py1
-rw-r--r--var/spack/repos/builtin/packages/spdlog/package.py36
-rw-r--r--var/spack/repos/builtin/packages/spindle/package.py1
-rw-r--r--var/spack/repos/builtin/packages/spot/package.py7
-rw-r--r--var/spack/repos/builtin/packages/sqlite/package.py14
-rw-r--r--var/spack/repos/builtin/packages/star-ccm-plus/package.py78
-rw-r--r--var/spack/repos/builtin/packages/stat/package.py27
-rw-r--r--var/spack/repos/builtin/packages/stream/package.py62
-rw-r--r--var/spack/repos/builtin/packages/sublime-text/package.py59
-rw-r--r--var/spack/repos/builtin/packages/subversion/package.py30
-rw-r--r--var/spack/repos/builtin/packages/suite-sparse/package.py69
-rw-r--r--var/spack/repos/builtin/packages/suite-sparse/tbb_453.patch13
-rw-r--r--var/spack/repos/builtin/packages/sundials/package.py153
-rw-r--r--var/spack/repos/builtin/packages/superlu-dist/package.py58
-rw-r--r--var/spack/repos/builtin/packages/superlu-mt/package.py136
-rw-r--r--var/spack/repos/builtin/packages/superlu/package.py120
-rw-r--r--var/spack/repos/builtin/packages/swiftsim/package.py72
-rw-r--r--var/spack/repos/builtin/packages/swig/package.py3
-rw-r--r--var/spack/repos/builtin/packages/symengine/package.py113
-rw-r--r--var/spack/repos/builtin/packages/sympol/lrs_mp_close.patch10
-rw-r--r--var/spack/repos/builtin/packages/sympol/package.py48
-rw-r--r--var/spack/repos/builtin/packages/szip/package.py28
-rw-r--r--var/spack/repos/builtin/packages/tar/gnutar-configure-xattrs.patch482
-rw-r--r--var/spack/repos/builtin/packages/tar/package.py17
-rw-r--r--var/spack/repos/builtin/packages/task/package.py2
-rw-r--r--var/spack/repos/builtin/packages/taskd/package.py4
-rw-r--r--var/spack/repos/builtin/packages/tau/package.py57
-rw-r--r--var/spack/repos/builtin/packages/tbb/package.py56
-rw-r--r--var/spack/repos/builtin/packages/tcl/package.py28
-rw-r--r--var/spack/repos/builtin/packages/tetgen/package.py44
-rw-r--r--var/spack/repos/builtin/packages/tetgen/tetgen-1.5.0-free.patch49
-rw-r--r--var/spack/repos/builtin/packages/tethex/package.py49
-rw-r--r--var/spack/repos/builtin/packages/texinfo/package.py19
-rw-r--r--var/spack/repos/builtin/packages/texlive/package.py67
-rw-r--r--var/spack/repos/builtin/packages/the-platinum-searcher/package.py45
-rw-r--r--var/spack/repos/builtin/packages/the-silver-searcher/package.py (renamed from var/spack/repos/builtin/packages/the_silver_searcher/package.py)6
-rw-r--r--var/spack/repos/builtin/packages/the_platinum_searcher/package.py21
-rw-r--r--var/spack/repos/builtin/packages/thrift/package.py33
-rw-r--r--var/spack/repos/builtin/packages/tinyxml/CMakeLists.txt17
-rw-r--r--var/spack/repos/builtin/packages/tinyxml/package.py39
-rw-r--r--var/spack/repos/builtin/packages/tinyxml2/package.py39
-rw-r--r--var/spack/repos/builtin/packages/tk/package.py32
-rw-r--r--var/spack/repos/builtin/packages/tmux/package.py20
-rw-r--r--var/spack/repos/builtin/packages/tmuxinator/package.py9
-rw-r--r--var/spack/repos/builtin/packages/transset/package.py46
-rw-r--r--var/spack/repos/builtin/packages/trapproto/package.py39
-rw-r--r--var/spack/repos/builtin/packages/tree/package.py63
-rw-r--r--var/spack/repos/builtin/packages/triangle/package.py1
-rw-r--r--var/spack/repos/builtin/packages/trilinos/package.py348
-rw-r--r--var/spack/repos/builtin/packages/turbomole/package.py16
-rw-r--r--var/spack/repos/builtin/packages/twm/package.py56
-rw-r--r--var/spack/repos/builtin/packages/uberftp/package.py44
-rw-r--r--var/spack/repos/builtin/packages/udunits2/package.py13
-rw-r--r--var/spack/repos/builtin/packages/uncrustify/package.py3
-rw-r--r--var/spack/repos/builtin/packages/unibilium/package.py5
-rw-r--r--var/spack/repos/builtin/packages/unison/package.py51
-rw-r--r--var/spack/repos/builtin/packages/unixodbc/package.py42
-rw-r--r--var/spack/repos/builtin/packages/util-linux/package.py8
-rw-r--r--var/spack/repos/builtin/packages/util-macros/package.py41
-rw-r--r--var/spack/repos/builtin/packages/uuid/package.py37
-rw-r--r--var/spack/repos/builtin/packages/valgrind/package.py16
-rw-r--r--var/spack/repos/builtin/packages/veclibfort/package.py70
-rw-r--r--var/spack/repos/builtin/packages/videoproto/package.py45
-rw-r--r--var/spack/repos/builtin/packages/viewres/package.py48
-rw-r--r--var/spack/repos/builtin/packages/vim/package.py105
-rw-r--r--var/spack/repos/builtin/packages/visit/package.py44
-rw-r--r--var/spack/repos/builtin/packages/vizglow/package.py58
-rw-r--r--var/spack/repos/builtin/packages/vtk/gcc.patch21
-rw-r--r--var/spack/repos/builtin/packages/vtk/package.py85
-rw-r--r--var/spack/repos/builtin/packages/wannier90/make.sys7
-rw-r--r--var/spack/repos/builtin/packages/wannier90/package.py116
-rw-r--r--var/spack/repos/builtin/packages/wget/package.py11
-rw-r--r--var/spack/repos/builtin/packages/windowswmproto/package.py44
-rw-r--r--var/spack/repos/builtin/packages/wx/package.py22
-rw-r--r--var/spack/repos/builtin/packages/wxpropgrid/package.py5
-rw-r--r--var/spack/repos/builtin/packages/x11perf/package.py49
-rw-r--r--var/spack/repos/builtin/packages/xauth/package.py51
-rw-r--r--var/spack/repos/builtin/packages/xbacklight/package.py49
-rw-r--r--var/spack/repos/builtin/packages/xbiff/package.py51
-rw-r--r--var/spack/repos/builtin/packages/xbitmaps/package.py43
-rw-r--r--var/spack/repos/builtin/packages/xcalc/package.py49
-rw-r--r--var/spack/repos/builtin/packages/xcb-demo/package.py51
-rw-r--r--var/spack/repos/builtin/packages/xcb-proto/package.py13
-rw-r--r--var/spack/repos/builtin/packages/xcb-util-cursor/package.py51
-rw-r--r--var/spack/repos/builtin/packages/xcb-util-errors/package.py51
-rw-r--r--var/spack/repos/builtin/packages/xcb-util-image/package.py52
-rw-r--r--var/spack/repos/builtin/packages/xcb-util-keysyms/package.py50
-rw-r--r--var/spack/repos/builtin/packages/xcb-util-renderutil/package.py49
-rw-r--r--var/spack/repos/builtin/packages/xcb-util-wm/package.py49
-rw-r--r--var/spack/repos/builtin/packages/xcb-util/package.py49
-rw-r--r--var/spack/repos/builtin/packages/xclipboard/package.py53
-rw-r--r--var/spack/repos/builtin/packages/xclock/package.py54
-rw-r--r--var/spack/repos/builtin/packages/xcmiscproto/package.py45
-rw-r--r--var/spack/repos/builtin/packages/xcmsdb/package.py48
-rw-r--r--var/spack/repos/builtin/packages/xcompmgr/package.py51
-rw-r--r--var/spack/repos/builtin/packages/xconsole/package.py50
-rw-r--r--var/spack/repos/builtin/packages/xcursor-themes/package.py53
-rw-r--r--var/spack/repos/builtin/packages/xcursorgen/package.py47
-rw-r--r--var/spack/repos/builtin/packages/xdbedizzy/package.py47
-rw-r--r--var/spack/repos/builtin/packages/xditview/package.py48
-rw-r--r--var/spack/repos/builtin/packages/xdm/package.py54
-rw-r--r--var/spack/repos/builtin/packages/xdpyinfo/package.py54
-rw-r--r--var/spack/repos/builtin/packages/xdriinfo/package.py52
-rw-r--r--var/spack/repos/builtin/packages/xedit/package.py48
-rw-r--r--var/spack/repos/builtin/packages/xerces-c/package.py15
-rw-r--r--var/spack/repos/builtin/packages/xev/package.py53
-rw-r--r--var/spack/repos/builtin/packages/xextproto/package.py44
-rw-r--r--var/spack/repos/builtin/packages/xeyes/package.py49
-rw-r--r--var/spack/repos/builtin/packages/xf86bigfontproto/package.py39
-rw-r--r--var/spack/repos/builtin/packages/xf86dga/package.py46
-rw-r--r--var/spack/repos/builtin/packages/xf86dgaproto/package.py39
-rw-r--r--var/spack/repos/builtin/packages/xf86driproto/package.py46
-rw-r--r--var/spack/repos/builtin/packages/xf86miscproto/package.py42
-rw-r--r--var/spack/repos/builtin/packages/xf86rushproto/package.py39
-rw-r--r--var/spack/repos/builtin/packages/xf86vidmodeproto/package.py45
-rw-r--r--var/spack/repos/builtin/packages/xfd/package.py52
-rw-r--r--var/spack/repos/builtin/packages/xfindproxy/package.py53
-rw-r--r--var/spack/repos/builtin/packages/xfontsel/package.py50
-rw-r--r--var/spack/repos/builtin/packages/xfs/package.py48
-rw-r--r--var/spack/repos/builtin/packages/xfsinfo/package.py50
-rw-r--r--var/spack/repos/builtin/packages/xfwp/package.py50
-rw-r--r--var/spack/repos/builtin/packages/xgamma/package.py48
-rw-r--r--var/spack/repos/builtin/packages/xgc/package.py49
-rw-r--r--var/spack/repos/builtin/packages/xhost/package.py49
-rw-r--r--var/spack/repos/builtin/packages/xineramaproto/package.py45
-rw-r--r--var/spack/repos/builtin/packages/xinit/package.py48
-rw-r--r--var/spack/repos/builtin/packages/xinput/package.py50
-rw-r--r--var/spack/repos/builtin/packages/xkbcomp/package.py53
-rw-r--r--var/spack/repos/builtin/packages/xkbdata/package.py43
-rw-r--r--var/spack/repos/builtin/packages/xkbevd/package.py47
-rw-r--r--var/spack/repos/builtin/packages/xkbprint/package.py48
-rw-r--r--var/spack/repos/builtin/packages/xkbutils/package.py50
-rw-r--r--var/spack/repos/builtin/packages/xkeyboard-config/package.py57
-rw-r--r--var/spack/repos/builtin/packages/xkill/package.py49
-rw-r--r--var/spack/repos/builtin/packages/xload/package.py50
-rw-r--r--var/spack/repos/builtin/packages/xlogo/package.py53
-rw-r--r--var/spack/repos/builtin/packages/xlsatoms/package.py46
-rw-r--r--var/spack/repos/builtin/packages/xlsclients/package.py47
-rw-r--r--var/spack/repos/builtin/packages/xlsfonts/package.py47
-rw-r--r--var/spack/repos/builtin/packages/xmag/package.py48
-rw-r--r--var/spack/repos/builtin/packages/xman/package.py48
-rw-r--r--var/spack/repos/builtin/packages/xmessage/package.py48
-rw-r--r--var/spack/repos/builtin/packages/xmh/package.py51
-rw-r--r--var/spack/repos/builtin/packages/xmlto/package.py46
-rw-r--r--var/spack/repos/builtin/packages/xmodmap/package.py50
-rw-r--r--var/spack/repos/builtin/packages/xmore/package.py46
-rw-r--r--var/spack/repos/builtin/packages/xorg-cf-files/package.py44
-rw-r--r--var/spack/repos/builtin/packages/xorg-docs/package.py48
-rw-r--r--var/spack/repos/builtin/packages/xorg-gtest/package.py51
-rw-r--r--var/spack/repos/builtin/packages/xorg-server/package.py108
-rw-r--r--var/spack/repos/builtin/packages/xorg-sgml-doctools/package.py45
-rw-r--r--var/spack/repos/builtin/packages/xphelloworld/package.py54
-rw-r--r--var/spack/repos/builtin/packages/xplsprinters/package.py47
-rw-r--r--var/spack/repos/builtin/packages/xpr/package.py48
-rw-r--r--var/spack/repos/builtin/packages/xprehashprinterlist/package.py46
-rw-r--r--var/spack/repos/builtin/packages/xprop/package.py47
-rw-r--r--var/spack/repos/builtin/packages/xproto/package.py49
-rw-r--r--var/spack/repos/builtin/packages/xproxymanagementprotocol/package.py41
-rw-r--r--var/spack/repos/builtin/packages/xrandr/package.py49
-rw-r--r--var/spack/repos/builtin/packages/xrdb/package.py47
-rw-r--r--var/spack/repos/builtin/packages/xrefresh/package.py46
-rw-r--r--var/spack/repos/builtin/packages/xrootd/package.py52
-rw-r--r--var/spack/repos/builtin/packages/xrx/package.py57
-rw-r--r--var/spack/repos/builtin/packages/xscope/package.py45
-rw-r--r--var/spack/repos/builtin/packages/xsdktrilinos/package.py98
-rw-r--r--var/spack/repos/builtin/packages/xset/package.py47
-rw-r--r--var/spack/repos/builtin/packages/xsetmode/package.py46
-rw-r--r--var/spack/repos/builtin/packages/xsetpointer/package.py47
-rw-r--r--var/spack/repos/builtin/packages/xsetroot/package.py49
-rw-r--r--var/spack/repos/builtin/packages/xsm/package.py49
-rw-r--r--var/spack/repos/builtin/packages/xstdcmap/package.py50
-rw-r--r--var/spack/repos/builtin/packages/xtrans/package.py45
-rw-r--r--var/spack/repos/builtin/packages/xtrap/package.py46
-rw-r--r--var/spack/repos/builtin/packages/xts/package.py60
-rw-r--r--var/spack/repos/builtin/packages/xvidtune/package.py50
-rw-r--r--var/spack/repos/builtin/packages/xvinfo/package.py48
-rw-r--r--var/spack/repos/builtin/packages/xwd/package.py47
-rw-r--r--var/spack/repos/builtin/packages/xwininfo/package.py48
-rw-r--r--var/spack/repos/builtin/packages/xwud/package.py47
-rw-r--r--var/spack/repos/builtin/packages/xz/package.py9
-rw-r--r--var/spack/repos/builtin/packages/yaml-cpp/package.py50
-rw-r--r--var/spack/repos/builtin/packages/yasm/package.py1
-rw-r--r--var/spack/repos/builtin/packages/yorick/package.py81
-rw-r--r--var/spack/repos/builtin/packages/zeromq/package.py5
-rw-r--r--var/spack/repos/builtin/packages/zfp/package.py13
-rw-r--r--var/spack/repos/builtin/packages/zlib/package.py24
-rw-r--r--var/spack/repos/builtin/packages/zoltan/package.py133
-rw-r--r--var/spack/repos/builtin/packages/zsh/package.py9
1657 files changed, 106625 insertions, 29344 deletions
diff --git a/.coveragerc b/.coveragerc
index a1271a94fc..0201a4b502 100644
--- a/.coveragerc
+++ b/.coveragerc
@@ -1,6 +1,8 @@
# -*- conf -*-
# .coveragerc to control coverage.py
[run]
+parallel = True
+concurrency = multiprocessing
branch = True
source = lib
omit =
diff --git a/.flake8 b/.flake8
index a1e2fcc1f8..023f392952 100644
--- a/.flake8
+++ b/.flake8
@@ -5,16 +5,22 @@
# rationale is.
#
# Let people line things up nicely:
+# - E129: visually indented line with same indent as next logical line
# - E221: multiple spaces before operator
-# - E241: multiple spaces after ‘,’
+# - E241: multiple spaces after ','
+# - E272: multiple spaces before keyword
+#
+# Let people use terse Python features:
+# - E731 : lambda expressions
#
# Spack allows wildcard imports:
# - F403: disable wildcard import
#
# These are required to get the package.py files to test clean.
-# - F821: undefined name (needed for cmake, configure, etc.)
-# - F999: name name be undefined or undefined from star imports.
+# - F405: `name` may be undefined, or undefined from star imports: `module`
+# - F821: undefined name `name` (needed for cmake, configure, etc.)
+# - F999: syntax error in doctest
#
[flake8]
-ignore = E221,E241,F403,F821,F999
+ignore = E129,E221,E241,E272,E731,F403,F405,F821,F999
max-line-length = 79
diff --git a/.gitignore b/.gitignore
index 643e5d9b03..1a95d49377 100644
--- a/.gitignore
+++ b/.gitignore
@@ -1,13 +1,24 @@
+/db
/var/spack/stage
+/var/spack/cache
+/var/spack/repos/*/index.yaml
+/var/spack/repos/*/lock
*.pyc
-/opt/
+/opt
*~
.DS_Store
.idea
-/etc/spack/*
+/etc/spack/licenses
+/etc/spack/*.yaml
/etc/spackconfig
/share/spack/dotkit
/share/spack/modules
+/share/spack/lmod
/TAGS
+*.swp
/htmlcov
.coverage
+#*
+.#*
+/.cache
+/bin/spackc
diff --git a/.mailmap b/.mailmap
index 39ec183241..df0a82a197 100644
--- a/.mailmap
+++ b/.mailmap
@@ -1,20 +1,56 @@
-Todd Gamblin <tgamblin@llnl.gov> George Todd Gamblin <gamblin2@llnl.gov>
-Todd Gamblin <tgamblin@llnl.gov> Todd Gamblin <gamblin2@llnl.gov>
-Adam Moody <moody20@llnl.gov> Adam T. Moody <moody20@llnl.gov>
-Alfredo Gimenez <gimenez1@llnl.gov> Alfredo Gimenez <alfredo.gimenez@gmail.com>
-David Boehme <boehme3@llnl.gov> David Boehme <boehme3@sierra324.llnl.gov>
-David Boehme <boehme3@llnl.gov> David Boehme <boehme3@sierra648.llnl.gov>
-Kevin Brandstatter <kjbrandstatter@gmail.com> Kevin Brandstatter <kbrandst@hawk.iit.edu>
-Luc Jaulmes <luc.jaulmes@bsc.es> Luc Jaulmes <jaulmes1@llnl.gov>
-Saravan Pantham <saravan.pantham@gmail.com> Saravan Pantham <pantham1@surface86.llnl.gov>
-Tom Scogland <tscogland@llnl.gov> Tom Scogland <scogland1@llnl.gov>
-Tom Scogland <tscogland@llnl.gov> Tom Scogland <tom.scogland@gmail.com>
-Joachim Protze <protze@rz.rwth-aachen.de> jprotze <protze@rz.rwth-aachen.de>
-Gregory L. Lee <lee218@llnl.gov> Gregory L. Lee <lee218@surface86.llnl.gov>
-Gregory L. Lee <lee218@llnl.gov> Gregory L. Lee <lee218@cab687.llnl.gov>
-Gregory L. Lee <lee218@llnl.gov> Gregory L. Lee <lee218@cab690.llnl.gov>
-Gregory L. Lee <lee218@llnl.gov> Gregory L. Lee <lee218@catalyst159.llnl.gov>
-Gregory L. Lee <lee218@llnl.gov> Gregory Lee <lee218@llnl.gov>
-Massimiliano Culpo <massimiliano.culpo@epfl.ch> Massimiliano Culpo <massimiliano.culpo@googlemail.com>
-Massimiliano Culpo <massimiliano.culpo@epfl.ch> alalazo <massimiliano.culpo@googlemail.com>
-Mark Miller <miller86@llnl.gov> miller86 <miller86@llnl.gov>
+Abhinav Bhatele <bhatele@llnl.gov> Abhinav Bhatele <bhatele@gmail.com>
+Adam Moody <moody20@llnl.gov> Adam T. Moody <moody20@llnl.gov>
+Alfredo Gimenez <gimenez1@llnl.gov> Alfredo Gimenez <alfredo.gimenez@gmail.com>
+Alfredo Gimenez <gimenez1@llnl.gov> Alfredo Adolfo Gimenez <alfredo.gimenez@gmail.com>
+Andrew Williams <williamsa89@cardiff.ac.uk> Andrew Williams <andrew@alshain.org.uk>
+Ben Boeckel <ben.boeckel@kitware.com> Ben Boeckel <mathstuf@gmail.com>
+Ben Boeckel <ben.boeckel@kitware.com> Ben Boeckel <mathstuf@users.noreply.github.com>
+Benedikt Hegner <hegner@cern.ch> Benedikt Hegner <benedikt.hegner@cern.ch>
+Brett Viren <bv@bnl.gov> Brett Viren <brett.viren@gmail.com>
+David Boehme <boehme3@llnl.gov> David Boehme <boehme3@sierra324.llnl.gov>
+David Boehme <boehme3@llnl.gov> David Boehme <boehme3@sierra648.llnl.gov>
+David Poliakoff <poliakoff1@llnl.gov> David Poliakoff <david.poliakoff@gmail.com>
+Dhanannjay Deo <dhanannjay.deo@kitware.com> Dhanannjay 'Djay' Deo <dhanannjay.deo@kitware.com>
+Elizabeth Fischer <elizabeth.fischer@columbia.edu> Elizabeth F <elizabeth.fischer@columbia.edu>
+Elizabeth Fischer <elizabeth.fischer@columbia.edu> Elizabeth F <rpf2116@columbia.edu>
+Elizabeth Fischer <elizabeth.fischer@columbia.edu> Elizabeth Fischer <rpf2116@columbia.edu>
+Elizabeth Fischer <elizabeth.fischer@columbia.edu> citibeth <rpf2116@columbia.edu>
+Geoffrey Oxberry <oxberry1@llnl.gov> Geoffrey Oxberry <goxberry@gmail.com>
+Glenn Johnson <glenn-johnson@uiowa.edu> Glenn Johnson <gjohnson@argon-ohpc.hpc.uiowa.edu>
+Glenn Johnson <glenn-johnson@uiowa.edu> Glenn Johnson <glennpj@gmail.com>
+Gregory Becker <becker33@llnl.gov> Gregory Becker <becker33.llnl.gov>
+Gregory Becker <becker33@llnl.gov> becker33 <becker33.llnl.gov>
+Gregory Becker <becker33@llnl.gov> becker33 <becker33@llnl.gov>
+Gregory L. Lee <lee218@llnl.gov> Greg Lee <lee218@llnl.gov>
+Gregory L. Lee <lee218@llnl.gov> Gregory L. Lee <lee218@cab687.llnl.gov>
+Gregory L. Lee <lee218@llnl.gov> Gregory L. Lee <lee218@cab690.llnl.gov>
+Gregory L. Lee <lee218@llnl.gov> Gregory L. Lee <lee218@catalyst159.llnl.gov>
+Gregory L. Lee <lee218@llnl.gov> Gregory L. Lee <lee218@surface86.llnl.gov>
+Gregory L. Lee <lee218@llnl.gov> Gregory Lee <lee218@llnl.gov>
+Ian Lee <lee1001@llnl.gov> Ian Lee <IanLee1521@gmail.com>
+James Wynne III <wynnejr@ornl.gov> James Riley Wynne III <wynnejr@ornl.gov>
+James Wynne III <wynnejr@ornl.gov> James Wynne III <wynnejr@gpujake.com>
+Joachim Protze <protze@rz.rwth-aachen.de> jprotze <protze@rz.rwth-aachen.de>
+Kelly (KT) Thompson <kgt@lanl.gov> <kellyt@MENE.localdomain>
+Kelly (KT) Thompson <kgt@lanl.gov> Kelly Thompson <KineticTheory@users.noreply.github.com>
+Kevin Brandstatter <kjbrandstatter@gmail.com> Kevin Brandstatter <kbrandst@hawk.iit.edu>
+Luc Jaulmes <luc.jaulmes@bsc.es> Luc Jaulmes <jaulmes1@llnl.gov>
+Mario Melara <maamelara@gmail.com> Mario Melara <mamelara@genepool1.nersc.gov>
+Mark Miller <miller86@llnl.gov> miller86 <miller86@llnl.gov>
+Massimiliano Culpo <massimiliano.culpo@epfl.ch> Massimiliano Culpo <massimiliano.culpo@googlemail.com>
+Massimiliano Culpo <massimiliano.culpo@epfl.ch> alalazo <massimiliano.culpo@googlemail.com>
+Mayeul d'Avezac <m.davezac@ucl.ac.uk> Mayeul d'Avezac <mdavezac@gmail.com>
+Mitchell Devlin <mitchell.r.devlin@gmail.com> Mitchell Devlin <devlin@blogin4.lcrc.anl.gov>
+Nicolas Richart <nicolas.richart@epfl.ch> Nicolas <nrichart@users.noreply.github.com>
+Nicolas Richart <nicolas.richart@epfl.ch> Nicolas Richart <nrichart@users.noreply.github.com>
+Peter Scheibel <scheibel1@llnl.gov> scheibelp <scheibel1@llnl.gov>
+Robert D. French <frenchrd@ornl.gov> Robert D. French <robert@robertdfrench.me>
+Robert D. French <frenchrd@ornl.gov> Robert.French <frenchrd@ornl.gov>
+Robert D. French <frenchrd@ornl.gov> robertdfrench <frenchrd@ornl.gov>
+Saravan Pantham <saravan.pantham@gmail.com> Saravan Pantham <pantham1@surface86.llnl.gov>
+Stephen Herbein <sherbein@udel.edu> Stephen Herbein <stephen272@gmail.com>
+Todd Gamblin <tgamblin@llnl.gov> George Todd Gamblin <gamblin2@llnl.gov>
+Todd Gamblin <tgamblin@llnl.gov> Todd Gamblin <gamblin2@llnl.gov>
+Tom Scogland <tscogland@llnl.gov> Tom Scogland <scogland1@llnl.gov>
+Tom Scogland <tscogland@llnl.gov> Tom Scogland <tom.scogland@gmail.com>
+Tzanio Kolev <tzanio@llnl.gov> Tzanio <tzanio@llnl.gov>
diff --git a/.travis.yml b/.travis.yml
index 904143a00f..0a9a118b73 100644
--- a/.travis.yml
+++ b/.travis.yml
@@ -1,17 +1,72 @@
+#=============================================================================
+# Project settings
+#=============================================================================
language: python
+
+# Only build master and develop on push; do not build every branch.
+branches:
+ only:
+ - master
+ - develop
+ - /^releases\/.*$/
+
+#=============================================================================
+# Build matrix
+#=============================================================================
python:
- - "2.6"
- - "2.7"
+ - 2.6
+ - 2.7
+
+env:
+ - TEST_SUITE=unit
+ - TEST_SUITE=flake8
+ - TEST_SUITE=doc
+matrix:
+ # Flake8 and Sphinx no longer support Python 2.6, and one run is enough.
+ exclude:
+ - python: 2.6
+ env: TEST_SUITE=flake8
+ - python: 2.6
+ env: TEST_SUITE=doc
+ # Explicitly include an OS X build with homebrew's python.
+ # Works around Python issues on Travis for OSX, described here:
+ # http://blog.fizyk.net.pl/blog/running-python-tests-on-traviss-osx-workers.html
+ include:
+ - os: osx
+ language: generic
+ env: TEST_SUITE=unit
+
+#=============================================================================
+# Environment
+#=============================================================================
# Use new Travis infrastructure (Docker can't sudo yet)
sudo: false
-# Install coveralls to obtain code coverage
-install:
- - "pip install coveralls"
- - "pip install flake8"
+# Docs need graphviz to build
+addons:
+ apt:
+ packages:
+ - gfortran
+ - graphviz
+ - libyaml-dev
+# Work around Travis's lack of support for Python on OSX
before_install:
+ - if [[ "$TRAVIS_OS_NAME" == "osx" ]]; then brew update; fi
+ - if [[ "$TRAVIS_OS_NAME" == "osx" ]]; then brew ls --versions python > /dev/null || brew install python; fi
+ - if [[ "$TRAVIS_OS_NAME" == "osx" ]]; then brew ls --versions gcc > /dev/null || brew install gcc; fi
+ - if [[ "$TRAVIS_OS_NAME" == "osx" ]]; then virtualenv venv; fi
+ - if [[ "$TRAVIS_OS_NAME" == "osx" ]]; then source venv/bin/activate; fi
+
+# Install various dependencies
+install:
+ - pip install --upgrade coveralls
+ - pip install --upgrade flake8
+ - pip install --upgrade sphinx
+ - pip install --upgrade mercurial
+
+before_script:
# Need this for the git tests to succeed.
- git config --global user.email "spack@example.com"
- git config --global user.name "Test User"
@@ -19,25 +74,19 @@ before_install:
# Need this to be able to compute the list of changed files
- git fetch origin develop:develop
-script:
- # Regular spack setup and tests
- - . share/spack/setup-env.sh
- - spack compilers
- - spack config get compilers
- - spack install -v libdwarf
-
- # Run unit tests with code coverage
- - coverage run bin/spack test
-
- # Run flake8 code style checks.
- - share/spack/qa/run-flake8
+#=============================================================================
+# Building
+#=============================================================================
+script: share/spack/qa/run-$TEST_SUITE-tests
after_success:
- - coveralls
+ - if [[ $TEST_SUITE == unit && $TRAVIS_PYTHON_VERSION == 2.7 && $TRAVIS_OS_NAME == "linux" ]]; then coveralls; fi
+#=============================================================================
+# Notifications
+#=============================================================================
notifications:
email:
- recipients:
- - tgamblin@llnl.gov
+ recipients: tgamblin@llnl.gov
on_success: change
on_failure: always
diff --git a/README.md b/README.md
index fe00e2af27..9d005605eb 100644
--- a/README.md
+++ b/README.md
@@ -20,7 +20,7 @@ written in pure Python, and specs allow package authors to write a
single build script for many different builds of the same package.
See the
-[Feature Overview](http://software.llnl.gov/spack/features.html)
+[Feature Overview](http://spack.readthedocs.io/en/latest/features.html)
for examples and highlights.
To install spack and install your first package:
@@ -32,9 +32,12 @@ To install spack and install your first package:
Documentation
----------------
-[**Full documentation**](http://software.llnl.gov/spack) for Spack is
+[**Full documentation**](http://spack.readthedocs.io/) for Spack is
the first place to look.
+We've also got a [**Spack 101 Tutorial**](http://spack.readthedocs.io/en/latest/tutorial_sc16.html),
+so you can learn Spack yourself, or teach users at your own site.
+
See also:
* [Technical paper](http://www.computer.org/csdl/proceedings/sc/2015/3723/00/2807623.pdf) and
[slides](https://tgamblin.github.io/files/Gamblin-Spack-SC15-Talk.pdf) on Spack's design and implementation.
@@ -58,17 +61,18 @@ can join it here:
### Contributions
-At the moment, contributing to Spack is relatively simple. Just send us
-a [pull request](https://help.github.com/articles/using-pull-requests/).
+Contributing to Spack is relatively easy. Just send us a
+[pull request](https://help.github.com/articles/using-pull-requests/).
When you send your request, make ``develop`` the destination branch on the
[Spack repository](https://github.com/LLNL/spack).
-Your contribution will need to pass all the tests run by the `spack test`
-command, as well as the formatting checks in `share/spack/qa/run-flake8`.
-You should run both of these before submitting your pull request, to
-ensure that the online checks succeed.
+Your PR must pass Spack's unit tests and documentation tests, and must be
+[PEP 8](https://www.python.org/dev/peps/pep-0008/) compliant.
+We enforce these guidelines with [Travis CI](https://travis-ci.org/LLNL/spack).
+To run these tests locally, and for helpful tips on git, see our
+[Contribution Guide](http://spack.readthedocs.io/en/latest/contribution_guide.html).
-Spack is using a rough approximation of the [Git
+Spack uses a rough approximation of the [Git
Flow](http://nvie.com/posts/a-successful-git-branching-model/)
branching model. The ``develop`` branch contains the latest
contributions, and ``master`` is always tagged and points to the
diff --git a/bin/sbang b/bin/sbang
index f6b6d35e8a..e71074b330 100755
--- a/bin/sbang
+++ b/bin/sbang
@@ -79,6 +79,15 @@
# Obviously, for this to work, `sbang` needs to have a short enough
# path that *it* will run without hitting OS limits.
#
+# For Lua, scripts the second line can't start with #!, as # is not
+# the comment character in lua (even though lua ignores #! on the
+# *first* line of a script). So, instrument a lua script like this,
+# using -- instead of # on the second line:
+#
+# 1 #!/bin/bash /path/to/sbang
+# 2 --!/long/path/to/lua with arguments
+# 3
+# 4 print "success!"
#
# How it works
# -----------------------------
@@ -95,13 +104,19 @@ lines=0
while read line && ((lines < 2)) ; do
if [[ "$line" = '#!'* ]]; then
interpreter="${line#\#!}"
+ elif [[ "$line" = '--!'*lua* ]]; then
+ interpreter="${line#--!}"
fi
lines=$((lines+1))
done < "$script"
# Invoke any interpreter found, or raise an error if none was found.
-if [ -n "$interpreter" ]; then
- exec $interpreter "$@"
+if [[ -n "$interpreter" ]]; then
+ if [[ "${interpreter##*/}" = "perl" ]]; then
+ exec $interpreter -x "$@"
+ else
+ exec $interpreter "$@"
+ fi
else
echo "error: sbang found no interpreter in $script"
exit 1
diff --git a/bin/spack b/bin/spack
index 3544feb10a..66bebe57e7 100755
--- a/bin/spack
+++ b/bin/spack
@@ -1,4 +1,5 @@
#!/usr/bin/env python
+# flake8: noqa
##############################################################################
# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
# Produced at the Lawrence Livermore National Laboratory.
@@ -24,11 +25,13 @@
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
import sys
-if not sys.version_info[:2] >= (2,6):
+if (sys.version_info[0] > 2) or (sys.version_info[:2] < (2, 6)):
v_info = sys.version_info[:3]
- sys.exit("Spack requires Python 2.6 or higher. This is Python %d.%d.%d." % v_info)
+ sys.exit("Spack requires Python 2.6 or 2.7. "
+ "This is Python %d.%d.%d." % v_info)
import os
+import inspect
# Find spack's location and its prefix.
SPACK_FILE = os.path.realpath(os.path.expanduser(__file__))
@@ -38,31 +41,34 @@ SPACK_PREFIX = os.path.dirname(os.path.dirname(SPACK_FILE))
# Allow spack libs to be imported in our scripts
SPACK_LIB_PATH = os.path.join(SPACK_PREFIX, "lib", "spack")
sys.path.insert(0, SPACK_LIB_PATH)
+
+# Add external libs
SPACK_EXTERNAL_LIBS = os.path.join(SPACK_LIB_PATH, "external")
sys.path.insert(0, SPACK_EXTERNAL_LIBS)
-import warnings
-# Avoid warnings when nose is installed with the python exe being used to run
-# spack. Note this must be done after Spack's external libs directory is added
-# to sys.path.
-with warnings.catch_warnings():
- warnings.filterwarnings("ignore", ".*nose was already imported")
- import nose
-
# Quick and dirty check to clean orphaned .pyc files left over from
# previous revisions. These files were present in earlier versions of
# Spack, were removed, but shadow system modules that Spack still
# imports. If we leave them, Spack will fail in mysterious ways.
# TODO: more elegant solution for orphaned pyc files.
-orphaned_pyc_files = [os.path.join(SPACK_EXTERNAL_LIBS, n)
- for n in ('functools.pyc', 'ordereddict.pyc')]
+orphaned_pyc_files = [
+ os.path.join(SPACK_EXTERNAL_LIBS, 'functools.pyc'),
+ os.path.join(SPACK_EXTERNAL_LIBS, 'ordereddict.pyc'),
+ os.path.join(SPACK_LIB_PATH, 'spack', 'platforms', 'cray_xc.pyc'),
+ os.path.join(SPACK_LIB_PATH, 'spack', 'cmd', 'package-list.pyc'),
+ os.path.join(SPACK_LIB_PATH, 'spack', 'cmd', 'test-install.pyc'),
+ os.path.join(SPACK_LIB_PATH, 'spack', 'cmd', 'url-parse.pyc'),
+ os.path.join(SPACK_LIB_PATH, 'spack', 'test', 'yaml.pyc')
+]
+
for pyc_file in orphaned_pyc_files:
if not os.path.exists(pyc_file):
continue
try:
os.remove(pyc_file)
except OSError as e:
- print "WARNING: Spack may fail mysteriously. Couldn't remove orphaned .pyc file: %s" % pyc_file
+ print ("WARNING: Spack may fail mysteriously. "
+ "Couldn't remove orphaned .pyc file: %s" % pyc_file)
# If there is no working directory, use the spack prefix.
try:
@@ -77,7 +83,7 @@ import llnl.util.tty as tty
from llnl.util.tty.color import *
import spack
from spack.error import SpackError
-from external import argparse
+import argparse
# Command parsing
parser = argparse.ArgumentParser(
@@ -107,6 +113,8 @@ parser.add_argument('-p', '--profile', action='store_true',
help="Profile execution using cProfile.")
parser.add_argument('-v', '--verbose', action='store_true',
help="Print additional output during builds")
+parser.add_argument('-s', '--stacktrace', action='store_true',
+ help="Add stacktrace information to all printed statements")
parser.add_argument('-V', '--version', action='version',
version="%s" % spack.spack_version)
@@ -114,30 +122,29 @@ parser.add_argument('-V', '--version', action='version',
# subparser for setup.
subparsers = parser.add_subparsers(metavar='SUBCOMMAND', dest="command")
+
import spack.cmd
for cmd in spack.cmd.commands:
module = spack.cmd.get_module(cmd)
- subparser = subparsers.add_parser(cmd, help=module.description)
+ cmd_name = cmd.replace('_', '-')
+ subparser = subparsers.add_parser(cmd_name, help=module.description)
module.setup_parser(subparser)
-# Just print help and exit if run with no arguments at all
-if len(sys.argv) == 1:
- parser.print_help()
- sys.exit(1)
-
-# actually parse the args.
-args = parser.parse_args()
-def main():
+def _main(args, unknown_args):
# Set up environment based on args.
tty.set_verbose(args.verbose)
tty.set_debug(args.debug)
+ tty.set_stacktrace(args.stacktrace)
spack.debug = args.debug
if spack.debug:
import spack.util.debug as debug
debug.register_interrupt_handler()
+ # Run any available pre-run hooks
+ spack.hooks.pre_run()
+
spack.spack_working_dir = working_dir
if args.mock:
from spack.repository import RepoPath
@@ -145,13 +152,26 @@ def main():
# If the user asked for it, don't check ssl certs.
if args.insecure:
- tty.warn("You asked for --insecure, which does not check SSL certificates.")
- spack.curl.add_default_arg('-k')
+ tty.warn("You asked for --insecure. Will NOT check SSL certificates.")
+ spack.insecure = True
# Try to load the particular command asked for and run it
- command = spack.cmd.get_command(args.command)
+ command = spack.cmd.get_command(args.command.replace('-', '_'))
+
+ # Allow commands to inject an optional argument and get unknown args
+ # if they want to handle them.
+ info = dict(inspect.getmembers(command))
+ varnames = info['__code__'].co_varnames
+ argcount = info['__code__'].co_argcount
+
+ # Actually execute the command
try:
- return_val = command(parser, args)
+ if argcount == 3 and varnames[2] == 'unknown_args':
+ return_val = command(parser, args, unknown_args)
+ else:
+ if unknown_args:
+ tty.die('unrecognized arguments: %s' % ' '.join(unknown_args))
+ return_val = command(parser, args)
except SpackError as e:
e.die()
except KeyboardInterrupt:
@@ -164,13 +184,29 @@ def main():
elif isinstance(return_val, int):
sys.exit(return_val)
else:
- tty.die("Bad return value from command %s: %s" % (args.command, return_val))
-
-if args.profile:
- import cProfile
- cProfile.run('main()', sort='tottime')
-elif args.pdb:
- import pdb
- pdb.run('main()')
-else:
- main()
+ tty.die("Bad return value from command %s: %s"
+ % (args.command, return_val))
+
+
+def main(args):
+ # Just print help and exit if run with no arguments at all
+ if len(args) == 1:
+ parser.print_help()
+ sys.exit(1)
+
+ # actually parse the args.
+ args, unknown = parser.parse_known_args()
+
+ if args.profile:
+ import cProfile
+ cProfile.runctx('_main(args, unknown)', globals(), locals(),
+ sort='time')
+ elif args.pdb:
+ import pdb
+ pdb.runctx('_main(args, unknown)', globals(), locals())
+ else:
+ _main(args, unknown)
+
+
+if __name__ == '__main__':
+ main(sys.argv)
diff --git a/etc/spack/defaults/config.yaml b/etc/spack/defaults/config.yaml
new file mode 100644
index 0000000000..4e02d0973d
--- /dev/null
+++ b/etc/spack/defaults/config.yaml
@@ -0,0 +1,68 @@
+# -------------------------------------------------------------------------
+# This is the default spack configuration file.
+#
+# Settings here are versioned with Spack and are intended to provide
+# sensible defaults out of the box. Spack maintainers should edit this
+# file to keep it current.
+#
+# Users can override these settings by editing the following files.
+#
+# Per-spack-instance settings (overrides defaults):
+# $SPACK_ROOT/etc/spack/config.yaml
+#
+# Per-user settings (overrides default and site settings):
+# ~/.spack/config.yaml
+# -------------------------------------------------------------------------
+config:
+ # This is the path to the root of the Spack install tree.
+ # You can use $spack here to refer to the root of the spack instance.
+ install_tree: $spack/opt/spack
+
+
+ # Locations where different types of modules should be installed.
+ module_roots:
+ tcl: $spack/share/spack/modules
+ lmod: $spack/share/spack/lmod
+ dotkit: $spack/share/spack/dotkit
+
+
+ # Temporary locations Spack can try to use for builds.
+ #
+ # Spack will use the first one it finds that exists and is writable.
+ # You can use $tempdir to refer to the system default temp directory
+ # (as returned by tempfile.gettempdir()).
+ #
+ # A value of $spack/var/spack/stage indicates that Spack should run
+ # builds directly inside its install directory without staging them in
+ # temporary space.
+ #
+ # The build stage can be purged with `spack purge --stage`.
+ build_stage:
+ - $tempdir
+ - /nfs/tmp2/$user
+ - $spack/var/spack/stage
+
+
+ # Cache directory already downloaded source tarballs and archived
+ # repositories. This can be purged with `spack purge --downloads`.
+ source_cache: $spack/var/spack/cache
+
+
+ # Cache directory for miscellaneous files, like the package index.
+ # This can be purged with `spack purge --misc-cache`
+ misc_cache: ~/.spack/cache
+
+
+ # If this is false, tools like curl that use SSL will not verify
+ # certifiates. (e.g., curl will use use the -k option)
+ verify_ssl: true
+
+
+ # If set to true, Spack will always check checksums after downloading
+ # archives. If false, Spack skips the checksum step.
+ checksum: true
+
+
+ # If set to true, `spack install` and friends will NOT clean
+ # potentially harmful variables from the build environment. Use wisely.
+ dirty: false
diff --git a/etc/spack/defaults/darwin/packages.yaml b/etc/spack/defaults/darwin/packages.yaml
new file mode 100644
index 0000000000..24a08809db
--- /dev/null
+++ b/etc/spack/defaults/darwin/packages.yaml
@@ -0,0 +1,18 @@
+# -------------------------------------------------------------------------
+# This file controls default concretization preferences for Spack.
+#
+# Settings here are versioned with Spack and are intended to provide
+# sensible defaults out of the box. Spack maintainers should edit this
+# file to keep it current.
+#
+# Users can override these settings by editing the following files.
+#
+# Per-spack-instance settings (overrides defaults):
+# $SPACK_ROOT/etc/spack/packages.yaml
+#
+# Per-user settings (overrides default and site settings):
+# ~/.spack/packages.yaml
+# -------------------------------------------------------------------------
+packages:
+ all:
+ compiler: [clang, gcc, intel]
diff --git a/etc/spack/defaults/modules.yaml b/etc/spack/defaults/modules.yaml
new file mode 100644
index 0000000000..25fe2088e7
--- /dev/null
+++ b/etc/spack/defaults/modules.yaml
@@ -0,0 +1,42 @@
+# -------------------------------------------------------------------------
+# This is the default configuration for Spack's module file generation.
+#
+# Settings here are versioned with Spack and are intended to provide
+# sensible defaults out of the box. Spack maintainers should edit this
+# file to keep it current.
+#
+# Users can override these settings by editing the following files.
+#
+# Per-spack-instance settings (overrides defaults):
+# $SPACK_ROOT/etc/spack/modules.yaml
+#
+# Per-user settings (overrides default and site settings):
+# ~/.spack/modules.yaml
+# -------------------------------------------------------------------------
+modules:
+ enable:
+ - tcl
+ - dotkit
+ prefix_inspections:
+ bin:
+ - PATH
+ man:
+ - MANPATH
+ share/man:
+ - MANPATH
+ share/aclocal:
+ - ACLOCAL_PATH
+ lib:
+ - LIBRARY_PATH
+ - LD_LIBRARY_PATH
+ lib64:
+ - LIBRARY_PATH
+ - LD_LIBRARY_PATH
+ include:
+ - CPATH
+ lib/pkgconfig:
+ - PKG_CONFIG_PATH
+ lib64/pkgconfig:
+ - PKG_CONFIG_PATH
+ '':
+ - CMAKE_PREFIX_PATH
diff --git a/etc/spack/defaults/packages.yaml b/etc/spack/defaults/packages.yaml
new file mode 100644
index 0000000000..eae7752eee
--- /dev/null
+++ b/etc/spack/defaults/packages.yaml
@@ -0,0 +1,23 @@
+# -------------------------------------------------------------------------
+# This file controls default concretization preferences for Spack.
+#
+# Settings here are versioned with Spack and are intended to provide
+# sensible defaults out of the box. Spack maintainers should edit this
+# file to keep it current.
+#
+# Users can override these settings by editing the following files.
+#
+# Per-spack-instance settings (overrides defaults):
+# $SPACK_ROOT/etc/spack/packages.yaml
+#
+# Per-user settings (overrides default and site settings):
+# ~/.spack/packages.yaml
+# -------------------------------------------------------------------------
+packages:
+ all:
+ compiler: [gcc, intel, pgi, clang, xl, nag]
+ providers:
+ mpi: [openmpi, mpich]
+ blas: [openblas]
+ lapack: [openblas]
+ pil: [py-pillow]
diff --git a/etc/spack/defaults/repos.yaml b/etc/spack/defaults/repos.yaml
new file mode 100644
index 0000000000..f3e00653eb
--- /dev/null
+++ b/etc/spack/defaults/repos.yaml
@@ -0,0 +1,14 @@
+# -------------------------------------------------------------------------
+# This is the default spack repository configuration. It includes the
+# builtin spack package repository.
+#
+# Users can override these settings by editing the following files.
+#
+# Per-spack-instance settings (overrides defaults):
+# $SPACK_ROOT/etc/spack/repos.yaml
+#
+# Per-user settings (overrides default and site settings):
+# ~/.spack/repos.yaml
+# -------------------------------------------------------------------------
+repos:
+ - $spack/var/spack/repos/builtin
diff --git a/etc/spack/modules.yaml b/etc/spack/modules.yaml
deleted file mode 100644
index 99be5e7b6d..0000000000
--- a/etc/spack/modules.yaml
+++ /dev/null
@@ -1,29 +0,0 @@
-# -------------------------------------------------------------------------
-# This is the default spack module files generation configuration.
-#
-# Changes to this file will affect all users of this spack install,
-# although users can override these settings in their ~/.spack/modules.yaml.
-# -------------------------------------------------------------------------
-modules:
- enable:
- - tcl
- - dotkit
- prefix_inspections:
- bin:
- - PATH
- man:
- - MANPATH
- lib:
- - LIBRARY_PATH
- - LD_LIBRARY_PATH
- lib64:
- - LIBRARY_PATH
- - LD_LIBRARY_PATH
- include:
- - CPATH
- lib/pkgconfig:
- - PKGCONFIG
- lib64/pkgconfig:
- - PKGCONFIG
- '':
- - CMAKE_PREFIX_PATH
diff --git a/etc/spack/repos.yaml b/etc/spack/repos.yaml
deleted file mode 100644
index 2d4ff54ce6..0000000000
--- a/etc/spack/repos.yaml
+++ /dev/null
@@ -1,8 +0,0 @@
-# -------------------------------------------------------------------------
-# This is the default spack repository configuration.
-#
-# Changes to this file will affect all users of this spack install,
-# although users can override these settings in their ~/.spack/repos.yaml.
-# -------------------------------------------------------------------------
-repos:
- - $spack/var/spack/repos/builtin
diff --git a/lib/spack/docs/.gitignore b/lib/spack/docs/.gitignore
index 26c343d3eb..0bbf78cce0 100644
--- a/lib/spack/docs/.gitignore
+++ b/lib/spack/docs/.gitignore
@@ -1,4 +1,5 @@
package_list.rst
command_index.rst
spack*.rst
+modules.rst
_build
diff --git a/lib/spack/docs/Makefile b/lib/spack/docs/Makefile
index 00203b5b61..bcba423d94 100644
--- a/lib/spack/docs/Makefile
+++ b/lib/spack/docs/Makefile
@@ -2,12 +2,13 @@
#
# You can set these variables from the command line.
-SPHINXOPTS =
-SPHINXBUILD = sphinx-build
+SPHINXOPTS = -E
+JOBS ?= $(shell python -c 'import multiprocessing; print multiprocessing.cpu_count()')
+SPHINXBUILD = sphinx-build -j $(JOBS)
PAPER =
BUILDDIR = _build
-export PYTHONPATH = ../../spack
+export PYTHONPATH := ../../spack:$(PYTHONPATH)
APIDOC_FILES = spack*.rst
# Internal variables.
@@ -22,24 +23,6 @@ I18NSPHINXOPTS = $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) .
all: html
#
-# This autogenerates a package list.
-#
-package_list:
- spack package-list > package_list.rst
-
-#
-# Generate a command index
-#
-command_index:
- cp command_index.in command_index.rst
- echo >> command_index.rst
- grep -ho '.. _spack-.*:' *rst \
- | perl -pe 's/.. _([^:]*):/ * :ref:`\1`/' \
- | sort >> command_index.rst
-
-custom_targets: package_list command_index
-
-#
# This creates a git repository and commits generated html docs.
# It them pushes the new branch into THIS repository as gh-pages.
#
@@ -58,9 +41,20 @@ gh-pages: _build/html
git push -f $$root master:gh-pages && \
rm -rf .git
+# This version makes gh-pages into a single page that redirects
+# to spack.readthedocs.io
+gh-pages-redirect:
+ root="$$(git rev-parse --show-toplevel)" && \
+ cd _gh_pages_redirect && \
+ rm -rf .git && \
+ git init && \
+ git add . && \
+ git commit -m "Spack Documentation" && \
+ git push -f $$root master:gh-pages && \
+ rm -rf .git
+
upload:
rsync -avz --rsh=ssh --delete _build/html/ cab:/usr/global/web-pages/lc/www/adept/docs/spack
- git push -f origin gh-pages
git push -f github gh-pages
apidoc:
@@ -89,10 +83,10 @@ help:
@echo " doctest to run all doctests embedded in the documentation (if enabled)"
clean:
- -rm -f package_list.rst command_index.rst
+ -rm -f package_list.rst command_index.rst modules.rst
-rm -rf $(BUILDDIR)/* $(APIDOC_FILES)
-html: apidoc custom_targets
+html:
$(SPHINXBUILD) -b html $(ALLSPHINXOPTS) $(BUILDDIR)/html
@echo
@echo "Build finished. The HTML pages are in $(BUILDDIR)/html."
diff --git a/lib/spack/docs/_gh_pages_redirect/.nojekyll b/lib/spack/docs/_gh_pages_redirect/.nojekyll
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/lib/spack/docs/_gh_pages_redirect/.nojekyll
diff --git a/lib/spack/docs/_gh_pages_redirect/index.html b/lib/spack/docs/_gh_pages_redirect/index.html
new file mode 100644
index 0000000000..9c456abdaa
--- /dev/null
+++ b/lib/spack/docs/_gh_pages_redirect/index.html
@@ -0,0 +1,10 @@
+<html>
+ <head>
+ <meta http-equiv="refresh" content="0; url=http://spack.readthedocs.io/" />
+ </head>
+ <body>
+ <p>
+ This page has moved to <a href="http://spack.readthedocs.io/">http://spack.readthedocs.io/</a>
+ </p>
+ </body>
+</html>
diff --git a/lib/spack/docs/basic_usage.rst b/lib/spack/docs/basic_usage.rst
index 15db2f7a16..f25247579b 100644
--- a/lib/spack/docs/basic_usage.rst
+++ b/lib/spack/docs/basic_usage.rst
@@ -1,40 +1,64 @@
.. _basic-usage:
-Basic usage
-=====================
+===========
+Basic Usage
+===========
The ``spack`` command has many *subcommands*. You'll only need a
small subset of them for typical usage.
+Note that Spack colorizes output. ``less -R`` should be used with
+Spack to maintain this colorization. E.g.:
+.. code-block:: console
+
+ $ spack find | less -R
+
+It is recommended that the following be put in your ``.bashrc`` file:
+
+.. code-block:: sh
+
+ alias less='less -R'
+
+--------------------------
Listing available packages
-------------------------------
+--------------------------
To install software with Spack, you need to know what software is
available. You can see a list of available package names at the
:ref:`package-list` webpage, or using the ``spack list`` command.
-.. _spack-list:
+.. _cmd-spack-list:
+^^^^^^^^^^^^^^
``spack list``
-~~~~~~~~~~~~~~~~
+^^^^^^^^^^^^^^
The ``spack list`` command prints out a list of all of the packages
Spack can install:
.. command-output:: spack list
-The packages are listed by name in alphabetical order. You can also
-do wildcats searches using ``*``:
+The packages are listed by name in alphabetical order.
+A pattern to match with no wildcards, ``*`` or ``?``,
+will be treated as though it started and ended with
+``*``, so ``util`` is equivalent to ``*util*``. All patterns will be treated
+as case-insensitive. You can also add the ``-d`` to search the description of
+the package in addition to the name. Some examples:
+
+All packages whose names contain "sql":
-.. command-output:: spack list m*
+.. command-output:: spack list sql
-.. command-output:: spack list *util*
+All packages whose names or descriptions contain documentation:
-.. _spack-info:
+.. command-output:: spack list --search-description documentation
+.. _cmd-spack-info:
+
+^^^^^^^^^^^^^^
``spack info``
-~~~~~~~~~~~~~~~~
+^^^^^^^^^^^^^^
To get more information on a particular package from `spack list`, use
`spack info`. Just supply the name of a package:
@@ -49,10 +73,11 @@ viruses.
:ref:`Dependencies <sec-specs>` and :ref:`virtual dependencies
<sec-virtual-dependencies>` are described in more detail later.
-.. _spack-versions:
+.. _cmd-spack-versions:
+^^^^^^^^^^^^^^^^^^
``spack versions``
-~~~~~~~~~~~~~~~~~~~~~~~~
+^^^^^^^^^^^^^^^^^^
To see *more* available versions of a package, run ``spack versions``.
For example:
@@ -69,41 +94,42 @@ by scraping it directly from package web pages. Depending on the
package and how its releases are organized, Spack may or may not be
able to find remote versions.
-
+---------------------------
Installing and uninstalling
-------------------------------
+---------------------------
-.. _spack-install:
+.. _cmd-spack-install:
+^^^^^^^^^^^^^^^^^
``spack install``
-~~~~~~~~~~~~~~~~~~~~~
+^^^^^^^^^^^^^^^^^
``spack install`` will install any package shown by ``spack list``.
For example, To install the latest version of the ``mpileaks``
package, you might type this:
-.. code-block:: sh
+.. code-block:: console
$ spack install mpileaks
-If `mpileaks` depends on other packages, Spack will install the
+If ``mpileaks`` depends on other packages, Spack will install the
dependencies first. It then fetches the ``mpileaks`` tarball, expands
it, verifies that it was downloaded without errors, builds it, and
installs it in its own directory under ``$SPACK_ROOT/opt``. You'll see
a number of messages from spack, a lot of build output, and a message
that the packages is installed:
-.. code-block:: sh
+.. code-block:: console
$ spack install mpileaks
==> Installing mpileaks
- ==> mpich is already installed in /home/gamblin2/spack/opt/chaos_5_x86_64_ib/gcc@4.4.7/mpich@3.0.4.
- ==> callpath is already installed in /home/gamblin2/spack/opt/chaos_5_x86_64_ib/gcc@4.4.7/callpath@1.0.2-5dce4318.
- ==> adept-utils is already installed in /home/gamblin2/spack/opt/chaos_5_x86_64_ib/gcc@4.4.7/adept-utils@1.0-5adef8da.
+ ==> mpich is already installed in ~/spack/opt/linux-debian7-x86_64/gcc@4.4.7/mpich@3.0.4.
+ ==> callpath is already installed in ~/spack/opt/linux-debian7-x86_64/gcc@4.4.7/callpath@1.0.2-5dce4318.
+ ==> adept-utils is already installed in ~/spack/opt/linux-debian7-x86_64/gcc@4.4.7/adept-utils@1.0-5adef8da.
==> Trying to fetch from https://github.com/hpc/mpileaks/releases/download/v1.0/mpileaks-1.0.tar.gz
######################################################################## 100.0%
- ==> Staging archive: /home/gamblin2/spack/var/spack/stage/mpileaks@1.0%gcc@4.4.7=chaos_5_x86_64_ib-59f6ad23/mpileaks-1.0.tar.gz
- ==> Created stage in /home/gamblin2/spack/var/spack/stage/mpileaks@1.0%gcc@4.4.7=chaos_5_x86_64_ib-59f6ad23.
+ ==> Staging archive: ~/spack/var/spack/stage/mpileaks@1.0%gcc@4.4.7 arch=linux-debian7-x86_64-59f6ad23/mpileaks-1.0.tar.gz
+ ==> Created stage in ~/spack/var/spack/stage/mpileaks@1.0%gcc@4.4.7 arch=linux-debian7-x86_64-59f6ad23.
==> No patches needed for mpileaks.
==> Building mpileaks.
@@ -111,18 +137,19 @@ that the packages is installed:
==> Successfully installed mpileaks.
Fetch: 2.16s. Build: 9.82s. Total: 11.98s.
- [+] /home/gamblin2/spack/opt/chaos_5_x86_64_ib/gcc@4.4.7/mpileaks@1.0-59f6ad23
+ [+] ~/spack/opt/linux-debian7-x86_64/gcc@4.4.7/mpileaks@1.0-59f6ad23
The last line, with the ``[+]``, indicates where the package is
installed.
+^^^^^^^^^^^^^^^^^^^^^^^^^^^
Building a specific version
-~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+^^^^^^^^^^^^^^^^^^^^^^^^^^^
Spack can also build *specific versions* of a package. To do this,
just add ``@`` after the package name, followed by a version:
-.. code-block:: sh
+.. code-block:: console
$ spack install mpich@3.0.4
@@ -132,75 +159,128 @@ sites, as installing a version that one user needs will not disrupt
existing installations for other users.
In addition to different versions, Spack can customize the compiler,
-compile-time options (variants), and platform (for cross compiles) of
-an installation. Spack is unique in that it can also configure the
-*dependencies* a package is built with. For example, two
-configurations of the same version of a package, one built with boost
+compile-time options (variants), compiler flags, and platform (for
+cross compiles) of an installation. Spack is unique in that it can
+also configure the *dependencies* a package is built with. For example,
+two configurations of the same version of a package, one built with boost
1.39.0, and the other version built with version 1.43.0, can coexist.
This can all be done on the command line using the *spec* syntax.
Spack calls the descriptor used to refer to a particular package
configuration a **spec**. In the commands above, ``mpileaks`` and
-``mpileaks@3.0.4``. We'll talk more about how you can use them to
-customize an installation in :ref:`sec-specs`.
+``mpileaks@3.0.4`` are both valid *specs*. We'll talk more about how
+you can use them to customize an installation in :ref:`sec-specs`.
-.. _spack-uninstall:
+.. _cmd-spack-uninstall:
+^^^^^^^^^^^^^^^^^^^
``spack uninstall``
-~~~~~~~~~~~~~~~~~~~~~
+^^^^^^^^^^^^^^^^^^^
-To uninstall a package, type ``spack uninstall <package>``. This will ask the user for
-confirmation, and in case will completely remove the directory in which the package was installed.
+To uninstall a package, type ``spack uninstall <package>``. This will ask
+the user for confirmation before completely removing the directory
+in which the package was installed.
-.. code-block:: sh
+.. code-block:: console
- spack uninstall mpich
+ $ spack uninstall mpich
If there are still installed packages that depend on the package to be
uninstalled, spack will refuse to uninstall it.
To uninstall a package and every package that depends on it, you may give the
-`--dependents` option.
+``--dependents`` option.
-.. code-block:: sh
+.. code-block:: console
- spack uninstall --dependents mpich
+ $ spack uninstall --dependents mpich
-will display a list of all the packages that depends on `mpich` and, upon confirmation,
-will uninstall them in the right order.
+will display a list of all the packages that depend on ``mpich`` and, upon
+confirmation, will uninstall them in the right order.
-A line like
+A command like
-.. code-block:: sh
+.. code-block:: console
- spack uninstall mpich
+ $ spack uninstall mpich
-may be ambiguous, if multiple ``mpich`` configurations are installed. For example, if both
-``mpich@3.0.2`` and ``mpich@3.1`` are installed, ``mpich`` could refer
-to either one. Because it cannot determine which one to uninstall,
-Spack will ask you either to provide a version number to remove the
-ambiguity or use the ``--all`` option to uninstall all of the matching packages.
+may be ambiguous if multiple ``mpich`` configurations are installed.
+For example, if both ``mpich@3.0.2`` and ``mpich@3.1`` are installed,
+``mpich`` could refer to either one. Because it cannot determine which
+one to uninstall, Spack will ask you either to provide a version number
+to remove the ambiguity or use the ``--all`` option to uninstall all of
+the matching packages.
-You may force uninstall a package with the `--force` option
+You may force uninstall a package with the ``--force`` option
-.. code-block:: sh
+.. code-block:: console
+
+ $ spack uninstall --force mpich
+
+but you risk breaking other installed packages. In general, it is safer to
+remove dependent packages *before* removing their dependencies or use the
+``--dependents`` option.
+
+
+.. _nondownloadable:
+
+^^^^^^^^^^^^^^^^^^^^^^^^^
+Non-Downloadable Tarballs
+^^^^^^^^^^^^^^^^^^^^^^^^^
+
+The tarballs for some packages cannot be automatically downloaded by
+Spack. This could be for a number of reasons:
+
+#. The author requires users to manually accept a license agreement
+ before downloading (``jdk`` and ``galahad``).
+
+#. The software is proprietary and cannot be downloaded on the open
+ Internet.
+
+To install these packages, one must create a mirror and manually add
+the tarballs in question to it (see :ref:`mirrors`):
+
+#. Create a directory for the mirror. You can create this directory
+ anywhere you like, it does not have to be inside ``~/.spack``:
+
+ .. code-block:: console
+
+ $ mkdir ~/.spack/manual_mirror
+
+#. Register the mirror with Spack by creating ``~/.spack/mirrors.yaml``:
- spack uninstall --force mpich
+ .. code-block:: yaml
-but you risk breaking other installed packages. In general, it is safer to remove dependent
-packages *before* removing their dependencies or use the `--dependents` option.
+ mirrors:
+ manual: file://~/.spack/manual_mirror
+#. Put your tarballs in it. Tarballs should be named
+ ``<package>/<package>-<version>.tar.gz``. For example:
+ .. code-block:: console
+
+ $ ls -l manual_mirror/galahad
+
+ -rw-------. 1 me me 11657206 Jun 21 19:25 galahad-2.60003.tar.gz
+
+#. Install as usual:
+
+ .. code-block:: console
+
+ $ spack install galahad
+
+-------------------------
Seeing installed packages
------------------------------------
+-------------------------
We know that ``spack list`` shows you the names of available packages,
-but how do you figure out which are installed?
+but how do you figure out which are already installed?
-.. _spack-find:
+.. _cmd-spack-find:
+^^^^^^^^^^^^^^
``spack find``
-~~~~~~~~~~~~~~~~~~~~~~
+^^^^^^^^^^^^^^
``spack find`` shows the *specs* of installed packages. A spec is
like a name, but it has a version, compiler, architecture, and build
@@ -209,11 +289,11 @@ of the same package with different specs.
Running ``spack find`` with no arguments lists installed packages:
-.. code-block:: sh
+.. code-block:: console
$ spack find
==> 74 installed packages.
- -- chaos_5_x86_64_ib / gcc@4.4.7 --------------------------------
+ -- linux-debian7-x86_64 / gcc@4.4.7 --------------------------------
ImageMagick@6.8.9-10 libdwarf@20130729 py-dateutil@2.4.0
adept-utils@1.0 libdwarf@20130729 py-ipython@2.3.1
atk@2.14.0 libelf@0.8.12 py-matplotlib@1.4.2
@@ -239,24 +319,31 @@ Running ``spack find`` with no arguments lists installed packages:
lcms@2.6 pixman@0.32.6 xz@5.2.0
libdrm@2.4.33 py-dateutil@2.4.0 zlib@1.2.8
- -- chaos_5_x86_64_ib / gcc@4.9.2 --------------------------------
+ -- linux-debian7-x86_64 / gcc@4.9.2 --------------------------------
libelf@0.8.10 mpich@3.0.4
Packages are divided into groups according to their architecture and
compiler. Within each group, Spack tries to keep the view simple, and
only shows the version of installed packages.
+``spack find`` can filter the package list based on the package name, spec, or
+a number of properties of their installation status. For example, missing
+dependencies of a spec can be shown with ``--missing``, packages which were
+explicitly installed with ``spack install <package>`` can be singled out with
+``--explicit`` and those which have been pulled in only as dependencies with
+``--implicit``.
+
In some cases, there may be different configurations of the *same*
version of a package installed. For example, there are two
-installations of of ``libdwarf@20130729`` above. We can look at them
-in more detail using ``spack find -d``, and by asking only to show
+installations of ``libdwarf@20130729`` above. We can look at them
+in more detail using ``spack find --deps``, and by asking only to show
``libdwarf`` packages:
-.. code-block:: sh
+.. code-block:: console
$ spack find --deps libdwarf
==> 2 installed packages.
- -- chaos_5_x86_64_ib / gcc@4.4.7 --------------------------------
+ -- linux-debian7-x86_64 / gcc@4.4.7 --------------------------------
libdwarf@20130729-d9b90962
^libelf@0.8.12
libdwarf@20130729-b52fac98
@@ -266,202 +353,79 @@ Now we see that the two instances of ``libdwarf`` depend on
*different* versions of ``libelf``: 0.8.12 and 0.8.13. This view can
become complicated for packages with many dependencies. If you just
want to know whether two packages' dependencies differ, you can use
-``spack find -l``:
+``spack find --long``:
-.. code-block:: sh
+.. code-block:: console
- $ spack find -l libdwarf
+ $ spack find --long libdwarf
==> 2 installed packages.
- -- chaos_5_x86_64_ib / gcc@4.4.7 --------------------------------
+ -- linux-debian7-x86_64 / gcc@4.4.7 --------------------------------
libdwarf@20130729-d9b90962 libdwarf@20130729-b52fac98
-Now the ``libwarf`` installs have hashes after their names. These are
+Now the ``libdwarf`` installs have hashes after their names. These are
hashes over all of the dependencies of each package. If the hashes
are the same, then the packages have the same dependency configuration.
If you want to know the path where each package is installed, you can
-use ``spack find -p``:
+use ``spack find --paths``:
-.. code-block:: sh
+.. code-block:: console
- $ spack find -p
+ $ spack find --paths
==> 74 installed packages.
- -- chaos_5_x86_64_ib / gcc@4.4.7 --------------------------------
- ImageMagick@6.8.9-10 /home/gamblin2/spack/opt/chaos_5_x86_64_ib/gcc@4.4.7/ImageMagick@6.8.9-10-4df950dd
- adept-utils@1.0 /home/gamblin2/spack/opt/chaos_5_x86_64_ib/gcc@4.4.7/adept-utils@1.0-5adef8da
- atk@2.14.0 /home/gamblin2/spack/opt/chaos_5_x86_64_ib/gcc@4.4.7/atk@2.14.0-3d09ac09
- boost@1.55.0 /home/gamblin2/spack/opt/chaos_5_x86_64_ib/gcc@4.4.7/boost@1.55.0
- bzip2@1.0.6 /home/gamblin2/spack/opt/chaos_5_x86_64_ib/gcc@4.4.7/bzip2@1.0.6
- cairo@1.14.0 /home/gamblin2/spack/opt/chaos_5_x86_64_ib/gcc@4.4.7/cairo@1.14.0-fcc2ab44
- callpath@1.0.2 /home/gamblin2/spack/opt/chaos_5_x86_64_ib/gcc@4.4.7/callpath@1.0.2-5dce4318
+ -- linux-debian7-x86_64 / gcc@4.4.7 --------------------------------
+ ImageMagick@6.8.9-10 ~/spack/opt/linux-debian7-x86_64/gcc@4.4.7/ImageMagick@6.8.9-10-4df950dd
+ adept-utils@1.0 ~/spack/opt/linux-debian7-x86_64/gcc@4.4.7/adept-utils@1.0-5adef8da
+ atk@2.14.0 ~/spack/opt/linux-debian7-x86_64/gcc@4.4.7/atk@2.14.0-3d09ac09
+ boost@1.55.0 ~/spack/opt/linux-debian7-x86_64/gcc@4.4.7/boost@1.55.0
+ bzip2@1.0.6 ~/spack/opt/linux-debian7-x86_64/gcc@4.4.7/bzip2@1.0.6
+ cairo@1.14.0 ~/spack/opt/linux-debian7-x86_64/gcc@4.4.7/cairo@1.14.0-fcc2ab44
+ callpath@1.0.2 ~/spack/opt/linux-debian7-x86_64/gcc@4.4.7/callpath@1.0.2-5dce4318
...
And, finally, you can restrict your search to a particular package
by supplying its name:
-.. code-block:: sh
+.. code-block:: console
- $ spack find -p libelf
- -- chaos_5_x86_64_ib / gcc@4.4.7 --------------------------------
- libelf@0.8.11 /home/gamblin2/spack/opt/chaos_5_x86_64_ib/gcc@4.4.7/libelf@0.8.11
- libelf@0.8.12 /home/gamblin2/spack/opt/chaos_5_x86_64_ib/gcc@4.4.7/libelf@0.8.12
- libelf@0.8.13 /home/gamblin2/spack/opt/chaos_5_x86_64_ib/gcc@4.4.7/libelf@0.8.13
+ $ spack find --paths libelf
+ -- linux-debian7-x86_64 / gcc@4.4.7 --------------------------------
+ libelf@0.8.11 ~/spack/opt/linux-debian7-x86_64/gcc@4.4.7/libelf@0.8.11
+ libelf@0.8.12 ~/spack/opt/linux-debian7-x86_64/gcc@4.4.7/libelf@0.8.12
+ libelf@0.8.13 ~/spack/opt/linux-debian7-x86_64/gcc@4.4.7/libelf@0.8.13
``spack find`` actually does a lot more than this. You can use
*specs* to query for specific configurations and builds of each
package. If you want to find only libelf versions greater than version
0.8.12, you could say:
-.. code-block:: sh
+.. code-block:: console
$ spack find libelf@0.8.12:
- -- chaos_5_x86_64_ib / gcc@4.4.7 --------------------------------
+ -- linux-debian7-x86_64 / gcc@4.4.7 --------------------------------
libelf@0.8.12 libelf@0.8.13
Finding just the versions of libdwarf built with a particular version
of libelf would look like this:
-.. code-block:: sh
+.. code-block:: console
- $ spack find -l libdwarf ^libelf@0.8.12
+ $ spack find --long libdwarf ^libelf@0.8.12
==> 1 installed packages.
- -- chaos_5_x86_64_ib / gcc@4.4.7 --------------------------------
+ -- linux-debian7-x86_64 / gcc@4.4.7 --------------------------------
libdwarf@20130729-d9b90962
-The full spec syntax is discussed in detail in :ref:`sec-specs`.
-
-
-Compiler configuration
------------------------------------
-
-Spack has the ability to build packages with multiple compilers and
-compiler versions. Spack searches for compilers on your machine
-automatically the first time it is run. It does this by inspecting
-your path.
-
-.. _spack-compilers:
-
-``spack compilers``
-~~~~~~~~~~~~~~~~~~~~~~~
-
-You can see which compilers spack has found by running ``spack
-compilers`` or ``spack compiler list``::
-
- $ spack compilers
- ==> Available compilers
- -- gcc ---------------------------------------------------------
- gcc@4.9.0 gcc@4.8.0 gcc@4.7.0 gcc@4.6.2 gcc@4.4.7
- gcc@4.8.2 gcc@4.7.1 gcc@4.6.3 gcc@4.6.1 gcc@4.1.2
- -- intel -------------------------------------------------------
- intel@15.0.0 intel@14.0.0 intel@13.0.0 intel@12.1.0 intel@10.0
- intel@14.0.3 intel@13.1.1 intel@12.1.5 intel@12.0.4 intel@9.1
- intel@14.0.2 intel@13.1.0 intel@12.1.3 intel@11.1
- intel@14.0.1 intel@13.0.1 intel@12.1.2 intel@10.1
- -- clang -------------------------------------------------------
- clang@3.4 clang@3.3 clang@3.2 clang@3.1
- -- pgi ---------------------------------------------------------
- pgi@14.3-0 pgi@13.2-0 pgi@12.1-0 pgi@10.9-0 pgi@8.0-1
- pgi@13.10-0 pgi@13.1-1 pgi@11.10-0 pgi@10.2-0 pgi@7.1-3
- pgi@13.6-0 pgi@12.8-0 pgi@11.1-0 pgi@9.0-4 pgi@7.0-6
-
-Any of these compilers can be used to build Spack packages. More on
-how this is done is in :ref:`sec-specs`.
-
-.. _spack-compiler-add:
-
-``spack compiler add``
-~~~~~~~~~~~~~~~~~~~~~~~
-
-An alias for ``spack compiler find``.
-
-.. _spack-compiler-find:
-
-``spack compiler find``
-~~~~~~~~~~~~~~~~~~~~~~~
-
-If you do not see a compiler in this list, but you want to use it with
-Spack, you can simply run ``spack compiler find`` with the path to
-where the compiler is installed. For example::
-
- $ spack compiler find /usr/local/tools/ic-13.0.079
- ==> Added 1 new compiler to /Users/gamblin2/.spack/compilers.yaml
- intel@13.0.079
-
-Or you can run ``spack compiler find`` with no arguments to force
-auto-detection. This is useful if you do not know where compilers are
-installed, but you know that new compilers have been added to your
-``PATH``. For example, using dotkit, you might do this::
-
- $ module load gcc-4.9.0
- $ spack compiler find
- ==> Added 1 new compiler to /Users/gamblin2/.spack/compilers.yaml
- gcc@4.9.0
-
-This loads the environment module for gcc-4.9.0 to add it to
-``PATH``, and then it adds the compiler to Spack.
-
-.. _spack-compiler-info:
-
-``spack compiler info``
-~~~~~~~~~~~~~~~~~~~~~~~
-
-If you want to see specifics on a particular compiler, you can run
-``spack compiler info`` on it::
-
- $ spack compiler info intel@15
- intel@15.0.0:
- cc = /usr/local/bin/icc-15.0.090
- cxx = /usr/local/bin/icpc-15.0.090
- f77 = /usr/local/bin/ifort-15.0.090
- fc = /usr/local/bin/ifort-15.0.090
-
-This shows which C, C++, and Fortran compilers were detected by Spack.
-Notice also that we didn't have to be too specific about the
-version. We just said ``intel@15``, and information about the only
-matching Intel compiler was displayed.
-
-
-Manual compiler configuration
-~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
-
-If auto-detection fails, you can manually configure a compiler by
-editing your ``~/.spack/compilers.yaml`` file. You can do this by running
-``spack config edit compilers``, which will open the file in your ``$EDITOR``.
-
-Each compiler configuration in the file looks like this::
-
- ...
- chaos_5_x86_64_ib:
- ...
- intel@15.0.0:
- cc: /usr/local/bin/icc-15.0.024-beta
- cxx: /usr/local/bin/icpc-15.0.024-beta
- f77: /usr/local/bin/ifort-15.0.024-beta
- fc: /usr/local/bin/ifort-15.0.024-beta
- ...
-
-The chaos_5_x86_64_ib string is an architecture string, and multiple
-compilers can be listed underneath an architecture. The architecture
-string may be replaced with the string 'all' to signify compilers that
-work on all architectures.
-
-For compilers, like ``clang``, that do not support Fortran, put
-``None`` for ``f77`` and ``fc``::
-
- clang@3.3svn:
- cc: /usr/bin/clang
- cxx: /usr/bin/clang++
- f77: None
- fc: None
-
-Once you save the file, the configured compilers will show up in the
-list displayed by ``spack compilers``.
+We can also search for packages that have a certain attribute. For example,
+``spack find libdwarf +debug`` will show only installations of libdwarf
+with the 'debug' compile-time option enabled.
+The full spec syntax is discussed in detail in :ref:`sec-specs`.
.. _sec-specs:
+--------------------
Specs & dependencies
--------------------------
+--------------------
We know that ``spack install``, ``spack uninstall``, and other
commands take a package name with an optional version specifier. In
@@ -472,9 +436,11 @@ specify the compiler, compiler version, architecture, compile options,
and dependency options for a build. In this section, we'll go over
the full syntax of specs.
-Here is an example of a much longer spec than we've seen thus far::
+Here is an example of a much longer spec than we've seen thus far:
- mpileaks @1.2:1.4 %gcc@4.7.5 +debug -qt =bgqos_0 ^callpath @1.1 %gcc@4.7.2
+.. code-block:: none
+
+ mpileaks @1.2:1.4 %gcc@4.7.5 +debug -qt arch=bgq_os ^callpath @1.1 %gcc@4.7.2
If provided to ``spack install``, this will install the ``mpileaks``
library at some version between ``1.2`` and ``1.4`` (inclusive),
@@ -492,8 +458,13 @@ More formally, a spec consists of the following pieces:
* ``%`` Optional compiler specifier, with an optional compiler version
(``gcc`` or ``gcc@4.7.3``)
* ``+`` or ``-`` or ``~`` Optional variant specifiers (``+debug``,
- ``-qt``, or ``~qt``)
-* ``=`` Optional architecture specifier (``bgqos_0``)
+ ``-qt``, or ``~qt``) for boolean variants
+* ``name=<value>`` Optional variant specifiers that are not restricted to
+ boolean variants
+* ``name=<value>`` Optional compiler flag specifiers. Valid flag names are
+ ``cflags``, ``cxxflags``, ``fflags``, ``cppflags``, ``ldflags``, and ``ldlibs``.
+* ``target=<value> os=<value>`` Optional architecture specifier
+ (``target=haswell os=CNL10``)
* ``^`` Dependency specs (``^callpath@1.1``)
There are two things to notice here. The first is that specs are
@@ -546,7 +517,6 @@ some *other* version of ``mpich``. In general, such a configuration
would likely behave unexpectedly at runtime, and Spack enforces this
to ensure a consistent runtime environment.
-
The point of specs is to abstract this full DAG from Spack users. If
a user does not care about the DAG at all, she can refer to mpileaks
by simply writing ``mpileaks``. If she knows that ``mpileaks``
@@ -562,7 +532,7 @@ of dependencies within a spec is that they appear *after* the root
package. For example, these two specs represent exactly the same
configuration:
-.. code-block:: sh
+.. code-block:: none
mpileaks ^callpath@1.0 ^libelf@0.8.3
mpileaks ^libelf@0.8.3 ^callpath@1.0
@@ -573,7 +543,7 @@ compilers, variants, and architectures just like any other spec.
Specifiers are associated with the nearest package name to their left.
For example, above, ``@1.1`` and ``%gcc@4.7.2`` associates with the
``callpath`` package, while ``@1.2:1.4``, ``%gcc@4.7.5``, ``+debug``,
-``-qt``, and ``=bgqos_0`` all associate with the ``mpileaks`` package.
+``-qt``, and ``target=haswell os=CNL10`` all associate with the ``mpileaks`` package.
In the diagram above, ``mpileaks`` depends on ``mpich`` with an
unspecified version, but packages can depend on other packages with
@@ -583,8 +553,9 @@ could depend on ``mpich@1.2:`` if it can only build with version
Below are more details about the specifiers that you can add to specs.
+^^^^^^^^^^^^^^^^^
Version specifier
-~~~~~~~~~~~~~~~~~~~~~~~
+^^^^^^^^^^^^^^^^^
A version specifier comes somewhere after a package name and starts
with ``@``. It can be a single version, e.g. ``@1.0``, ``@3``, or
@@ -606,9 +577,9 @@ policies set for the particular Spack installation.
Details about how versions are compared and how Spack determines if
one version is less than another are discussed in the developer guide.
-
+^^^^^^^^^^^^^^^^^^
Compiler specifier
-~~~~~~~~~~~~~~~~~~~~~~~
+^^^^^^^^^^^^^^^^^^
A compiler specifier comes somewhere after a package name and starts
with ``%``. It tells Spack what compiler(s) a particular package
@@ -625,26 +596,29 @@ name or compiler specifier to their left in the spec.
If the compiler spec is omitted, Spack will choose a default compiler
based on site policies.
-
+^^^^^^^^
Variants
-~~~~~~~~~~~~~~~~~~~~~~~
-
-.. Note::
-
- Variants are not yet supported, but will be in the next Spack
- release (0.9), due in Q2 2015.
-
-Variants are named options associated with a particular package, and
-they can be turned on or off. For example, above, supplying
-``+debug`` causes ``mpileaks`` to be built with debug flags. The
-names of particular variants available for a package depend on what
-was provided by the package author. ``spack info <package>`` will
+^^^^^^^^
+
+Variants are named options associated with a particular package. They are
+optional, as each package must provide default values for each variant it
+makes available. Variants can be specified using
+a flexible parameter syntax ``name=<value>``. For example,
+``spack install libelf debug=True`` will install libelf build with debug
+flags. The names of particular variants available for a package depend on
+what was provided by the package author. ``spack info <package>`` will
provide information on what build variants are available.
-Depending on the package a variant may be on or off by default. For
-``mpileaks`` here, ``debug`` is off by default, and we turned it on
-with ``+debug``. If a package is on by default you can turn it off by
-either adding ``-name`` or ``~name`` to the spec.
+For compatibility with earlier versions, variants which happen to be
+boolean in nature can be specified by a syntax that represents turning
+options on and off. For example, in the previous spec we could have
+supplied ``libelf +debug`` with the same effect of enabling the debug
+compile time option for the libelf package.
+
+Depending on the package a variant may have any default value. For
+``libelf`` here, ``debug`` is ``False`` by default, and we turned it on
+with ``debug=True`` or ``+debug``. If a variant is ``True`` by default
+you can turn it off by either adding ``-name`` or ``~name`` to the spec.
There are two syntaxes here because, depending on context, ``~`` and
``-`` may mean different things. In most shells, the following will
@@ -656,7 +630,7 @@ result in the shell performing home directory substitution:
mpileaks~debug # use this instead
If there is a user called ``debug``, the ``~`` will be incorrectly
-expanded. In this situation, you would want to write ``mpileaks
+expanded. In this situation, you would want to write ``libelf
-debug``. However, ``-`` can be ambiguous when included after a
package name without spaces:
@@ -671,31 +645,87 @@ package, not a request for ``mpileaks`` built without ``debug``
options. In this scenario, you should write ``mpileaks~debug`` to
avoid ambiguity.
-When spack normalizes specs, it prints them out with no spaces and
-uses only ``~`` for disabled variants. We allow ``-`` and spaces on
-the command line is provided for convenience and legibility.
+When spack normalizes specs, it prints them out with no spaces boolean
+variants using the backwards compatibility syntax and uses only ``~``
+for disabled boolean variants. The ``-`` and spaces on the command
+line are provided for convenience and legibility.
+
+^^^^^^^^^^^^^^
+Compiler Flags
+^^^^^^^^^^^^^^
+
+Compiler flags are specified using the same syntax as non-boolean variants,
+but fulfill a different purpose. While the function of a variant is set by
+the package, compiler flags are used by the compiler wrappers to inject
+flags into the compile line of the build. Additionally, compiler flags are
+inherited by dependencies. ``spack install libdwarf cppflags="-g"`` will
+install both libdwarf and libelf with the ``-g`` flag injected into their
+compile line.
+
+Notice that the value of the compiler flags must be quoted if it
+contains any spaces. Any of ``cppflags=-O3``, ``cppflags="-O3"``,
+``cppflags='-O3'``, and ``cppflags="-O3 -fPIC"`` are acceptable, but
+``cppflags=-O3 -fPIC`` is not. Additionally, if they value of the
+compiler flags is not the last thing on the line, it must be followed
+by a space. The commmand ``spack install libelf cppflags="-O3"%intel``
+will be interpreted as an attempt to set `cppflags="-O3%intel"``.
+
+The six compiler flags are injected in the order of implicit make commands
+in GNU Autotools. If all flags are set, the order is
+``$cppflags $cflags|$cxxflags $ldflags <command> $ldlibs`` for C and C++ and
+``$fflags $cppflags $ldflags <command> $ldlibs`` for Fortran.
+
+^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+Compiler environment variables and additional RPATHs
+^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+In the exceptional case a compiler requires setting special environment
+variables, like an explicit library load path. These can bet set in an
+extra section in the compiler configuration. The user can also specify
+additional ``RPATHs`` that the compiler will add to all executables
+generated by that compiler. This is useful for forcing certain compilers
+to RPATH their own runtime libraries, so that executables will run
+without the need to set ``LD_LIBRARY_PATH``.
+.. code-block:: yaml
-Architecture specifier
-~~~~~~~~~~~~~~~~~~~~~~~
+ compilers:
+ - compiler:
+ spec: gcc@4.9.3
+ paths:
+ cc: /opt/gcc/bin/gcc
+ c++: /opt/gcc/bin/g++
+ f77: /opt/gcc/bin/gfortran
+ fc: /opt/gcc/bin/gfortran
+ environment:
+ set:
+ LD_LIBRARY_PATH : /opt/gcc/lib
+ extra_rpaths:
+ - /path/to/some/compiler/runtime/directory
+ - /path/to/some/other/compiler/runtime/directory
-.. Note::
+^^^^^^^^^^^^^^^^^^^^^^^
+Architecture specifiers
+^^^^^^^^^^^^^^^^^^^^^^^
+
+The architecture can be specified by using the reserved
+words ``target`` and/or ``os`` (``target=x86-64 os=debian7``). You can also
+use the triplet form of platform, operating system and processor.
- Architecture specifiers are part of specs but are not yet
- functional. They will be in Spack version 1.0, due in Q3 2015.
+.. code-block:: console
-The architecture specifier starts with a ``=`` and also comes after
-some package name within a spec. It allows a user to specify a
-particular architecture for the package to be built. This is mostly
-used for architectures that need cross-compilation, and in most cases,
-users will not need to specify the architecture when they install a
-package.
+ $ spack install libelf arch=cray-CNL10-haswell
+Users on non-Cray systems won't have to worry about specifying the architecture.
+Spack will autodetect what kind of operating system is on your machine as well
+as the processor. For more information on how the architecture can be
+used on Cray machines, see :ref:`cray-support`
.. _sec-virtual-dependencies:
+--------------------
Virtual dependencies
--------------------------
+--------------------
The dependence graph for ``mpileaks`` we saw above wasn't *quite*
accurate. ``mpileaks`` uses MPI, which is an interface that has many
@@ -738,27 +768,33 @@ version of a virtual package, e.g. if an application needs MPI-2
functions, it can depend on ``mpi@2:`` to indicate that it needs some
implementation that provides MPI-2 functions.
-
+^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
Constraining virtual packages
-~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
When installing a package that depends on a virtual package, you can
opt to specify the particular provider you want to use, or you can let
-Spack pick. For example, if you just type this::
+Spack pick. For example, if you just type this:
- spack install mpileaks
+.. code-block:: console
+
+ $ spack install mpileaks
Then spack will pick a provider for you according to site policies.
-If you really want a particular version, say mpich, then you could
-run this instead::
+If you really want a particular version, say ``mpich``, then you could
+run this instead:
- spack install mpileaks ^mpich
+.. code-block:: console
+
+ $ spack install mpileaks ^mpich
This forces spack to use some version of ``mpich`` for its
implementation. As always, you can be even more specific and require
-a particular ``mpich`` version::
+a particular ``mpich`` version:
+
+.. code-block:: console
- spack install mpileaks ^mpich@3
+ $ spack install mpileaks ^mpich@3
The ``mpileaks`` package in particular only needs MPI-1 commands, so
any MPI implementation will do. If another package depends on
@@ -767,10 +803,29 @@ any MPI implementation will do. If another package depends on
error. Likewise, if you try to plug in some package that doesn't
provide MPI, Spack will raise an error.
-.. _spack-providers:
+^^^^^^^^^^^^^^^^^^^^^^^^
+Specifying Specs by Hash
+^^^^^^^^^^^^^^^^^^^^^^^^
+
+Complicated specs can become cumbersome to enter on the command line,
+especially when many of the qualifications are necessary to
+distinguish between similar installs, for example when using the
+``uninstall`` command. To avoid this, when referencing an existing spec,
+Spack allows you to reference specs by their hash. We previously
+discussed the spec hash that Spack computes. In place of a spec in any
+command, substitute ``/<hash>`` where ``<hash>`` is any amount from
+the beginning of a spec hash. If the given spec hash is sufficient
+to be unique, Spack will replace the reference with the spec to which
+it refers. Otherwise, it will prompt for a more qualified hash.
+
+Note that this will not work to reinstall a depencency uninstalled by
+``spack uninstall --force``.
+
+.. _cmd-spack-providers:
+^^^^^^^^^^^^^^^^^^^
``spack providers``
-~~~~~~~~~~~~~~~~~~~~~~~~~~
+^^^^^^^^^^^^^^^^^^^
You can see what packages provide a particular virtual package using
``spack providers``. If you wanted to see what packages provide
@@ -786,467 +841,9 @@ add a version specifier to the spec:
Notice that the package versions that provide insufficient MPI
versions are now filtered out.
-.. _shell-support:
-
-Integration with module systems
--------------------------------
-
-.. note::
-
- Environment module support is currently experimental and should not
- be considered a stable feature of Spack. In particular, the
- interface and/or generated module names may change in future
- versions.
-
-Spack provides some integration with
-`Environment Modules <http://modules.sourceforge.net/>`_
-and `Dotkit <https://computing.llnl.gov/?set=jobs&page=dotkit>`_ to make
-it easier to use the packages it installed.
-
-
-
-Installing Environment Modules
-~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
-
-In order to use Spack's generated environment modules, you must have
-installed the *Environment Modules* package. On many Linux
-distributions, this can be installed from the vendor's repository:
-
-.. code-block:: sh
-
- yum install environment-modules # (Fedora/RHEL/CentOS)
- apt-get install environment-modules # (Ubuntu/Debian)
-
-If your Linux distribution does not have
-Environment Modules, you can get it with Spack:
-
-.. code-block:: sh
-
- spack install environment-modules
-
-
-In this case to activate it automatically you need to add the following two
-lines to your ``.bashrc`` profile (or similar):
-
-.. code-block:: sh
-
- MODULES_HOME=`spack location -i environment-modules`
- source ${MODULES_HOME}/Modules/init/bash
-
-If you use a Unix shell other than ``bash``, modify the commands above
-accordingly and source the appropriate file in
-``${MODULES_HOME}/Modules/init/``.
-
-
-.. TODO : Add a similar section on how to install dotkit ?
-
-Spack and module systems
-~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
-You can enable shell support by sourcing some files in the
-``/share/spack`` directory.
-
-For ``bash`` or ``ksh``, run:
-
-.. code-block:: sh
-
- . ${SPACK_ROOT}/share/spack/setup-env.sh
-
-For ``csh`` and ``tcsh`` run:
-
-.. code-block:: csh
-
- setenv SPACK_ROOT /path/to/spack
- source $SPACK_ROOT/share/spack/setup-env.csh
-
-You can put the above code in your ``.bashrc`` or ``.cshrc``, and
-Spack's shell support will be available on the command line.
-
-When you install a package with Spack, it automatically generates a module file
-that lets you add the package to your environment.
-
-Currently, Spack supports the generation of `Environment Modules
-<http://wiki.tcl.tk/12999>`_ and `Dotkit
-<https://computing.llnl.gov/?set=jobs&page=dotkit>`_. Generated
-module files for each of these systems can be found in these
-directories:
-
-.. code-block:: sh
-
- ${SPACK_ROOT}/share/spack/modules
- ${SPACK_ROOT}/share/spack/dotkit
-
-The directories are automatically added to your ``MODULEPATH`` and
-``DK_NODE`` environment variables when you enable Spack's `shell
-support <shell-support_>`_.
-
-
-Using Modules & Dotkits
-~~~~~~~~~~~~~~~~~~~~~~~~~~~~
-
-If you have shell support enabled you should be able to run either
-``module avail`` or ``use -l spack`` to see what modules/dotkits have
-been installed. Here is sample output of those programs, showing lots
-of installed packages.
-
- .. code-block:: sh
-
- $ module avail
-
- ------- /home/gamblin2/spack/share/spack/modules/chaos_5_x86_64_ib --------
- adept-utils@1.0%gcc@4.4.7-5adef8da libelf@0.8.13%gcc@4.4.7
- automaded@1.0%gcc@4.4.7-d9691bb0 libelf@0.8.13%intel@15.0.0
- boost@1.55.0%gcc@4.4.7 mpc@1.0.2%gcc@4.4.7-559607f5
- callpath@1.0.1%gcc@4.4.7-5dce4318 mpfr@3.1.2%gcc@4.4.7
- dyninst@8.1.2%gcc@4.4.7-b040c20e mpich@3.0.4%gcc@4.4.7
- gcc@4.9.1%gcc@4.4.7-93ab98c5 mpich@3.0.4%gcc@4.9.0
- gmp@6.0.0a%gcc@4.4.7 mrnet@4.1.0%gcc@4.4.7-72b7881d
- graphlib@2.0.0%gcc@4.4.7 netgauge@2.4.6%gcc@4.9.0-27912b7b
- launchmon@1.0.1%gcc@4.4.7 stat@2.1.0%gcc@4.4.7-51101207
- libNBC@1.1.1%gcc@4.9.0-27912b7b sundials@2.5.0%gcc@4.9.0-27912b7b
- libdwarf@20130729%gcc@4.4.7-b52fac98
-
- .. code-block:: sh
-
- $ use -l spack
-
- spack ----------
- adept-utils@1.0%gcc@4.4.7-5adef8da - adept-utils @1.0
- automaded@1.0%gcc@4.4.7-d9691bb0 - automaded @1.0
- boost@1.55.0%gcc@4.4.7 - boost @1.55.0
- callpath@1.0.1%gcc@4.4.7-5dce4318 - callpath @1.0.1
- dyninst@8.1.2%gcc@4.4.7-b040c20e - dyninst @8.1.2
- gmp@6.0.0a%gcc@4.4.7 - gmp @6.0.0a
- libNBC@1.1.1%gcc@4.9.0-27912b7b - libNBC @1.1.1
- libdwarf@20130729%gcc@4.4.7-b52fac98 - libdwarf @20130729
- libelf@0.8.13%gcc@4.4.7 - libelf @0.8.13
- libelf@0.8.13%intel@15.0.0 - libelf @0.8.13
- mpc@1.0.2%gcc@4.4.7-559607f5 - mpc @1.0.2
- mpfr@3.1.2%gcc@4.4.7 - mpfr @3.1.2
- mpich@3.0.4%gcc@4.4.7 - mpich @3.0.4
- mpich@3.0.4%gcc@4.9.0 - mpich @3.0.4
- netgauge@2.4.6%gcc@4.9.0-27912b7b - netgauge @2.4.6
- sundials@2.5.0%gcc@4.9.0-27912b7b - sundials @2.5.0
-
-The names here should look familiar, they're the same ones from
-``spack find``. You *can* use the names here directly. For example,
-you could type either of these commands to load the callpath module:
-
-.. code-block:: sh
-
- use callpath@1.0.1%gcc@4.4.7-5dce4318
-
-.. code-block:: sh
-
- module load callpath@1.0.1%gcc@4.4.7-5dce4318
-
-Neither of these is particularly pretty, easy to remember, or
-easy to type. Luckily, Spack has its own interface for using modules
-and dotkits. You can use the same spec syntax you're used to:
-
- ========================= ==========================
- Environment Modules Dotkit
- ========================= ==========================
- ``spack load <spec>`` ``spack use <spec>``
- ``spack unload <spec>`` ``spack unuse <spec>``
- ========================= ==========================
-
-And you can use the same shortened names you use everywhere else in
-Spack. For example, this will add the ``mpich`` package built with
-``gcc`` to your path:
-
-.. code-block:: sh
-
- $ spack install mpich %gcc@4.4.7
-
- # ... wait for install ...
-
- $ spack use mpich %gcc@4.4.7
- Prepending: mpich@3.0.4%gcc@4.4.7 (ok)
- $ which mpicc
- ~/src/spack/opt/chaos_5_x86_64_ib/gcc@4.4.7/mpich@3.0.4/bin/mpicc
-
-Or, similarly with modules, you could type:
-
-.. code-block:: sh
-
- $ spack load mpich %gcc@4.4.7
-
-These commands will add appropriate directories to your ``PATH``,
-``MANPATH``, ``CPATH``, and ``LD_LIBRARY_PATH``. When you no longer want to use
-a package, you can type unload or unuse similarly:
-
-.. code-block:: sh
-
- $ spack unload mpich %gcc@4.4.7 # modules
- $ spack unuse mpich %gcc@4.4.7 # dotkit
-
-.. note::
-
- These ``use``, ``unuse``, ``load``, and ``unload`` subcommands are
- only available if you have enabled Spack's shell support *and* you
- have dotkit or modules installed on your machine.
-
-Ambiguous module names
-~~~~~~~~~~~~~~~~~~~~~~~~
-
-If a spec used with load/unload or use/unuse is ambiguous (i.e. more
-than one installed package matches it), then Spack will warn you:
-
-.. code-block:: sh
-
- $ spack load libelf
- ==> Error: Multiple matches for spec libelf. Choose one:
- libelf@0.8.13%gcc@4.4.7=chaos_5_x86_64_ib
- libelf@0.8.13%intel@15.0.0=chaos_5_x86_64_ib
-
-You can either type the ``spack load`` command again with a fully
-qualified argument, or you can add just enough extra constraints to
-identify one package. For example, above, the key differentiator is
-that one ``libelf`` is built with the Intel compiler, while the other
-used ``gcc``. You could therefore just type:
-
-.. code-block:: sh
-
- $ spack load libelf %intel
-
-To identify just the one built with the Intel compiler.
-
-Module files generation and customization
-~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
-
-Environment Modules and Dotkit files are generated when packages are installed,
-and are placed in the following directories under the Spack root:
-
-.. code-block:: sh
-
- ${SPACK_ROOT}/share/spack/modules
- ${SPACK_ROOT}/share/spack/dotkit
-
-The content that gets written in each module file can be customized in two ways:
-
- 1. overriding part of the ``spack.Package`` API within a ``package.py``
- 2. writing dedicated configuration files
-
-Override ``Package`` API
-^^^^^^^^^^^^^^^^^^^^^^^^
-There are currently two methods in ``spack.Package`` that may affect the content
-of module files:
-
-.. code-block:: python
-
- def setup_environment(self, spack_env, run_env):
- """Set up the compile and runtime environments for a package."""
- pass
-
-.. code-block:: python
-
- def setup_dependent_environment(self, spack_env, run_env, dependent_spec):
- """Set up the environment of packages that depend on this one"""
- pass
-
-As briefly stated in the comments, the first method lets you customize the
-module file content for the package you are currently writing, the second
-allows for modifications to your dependees module file. In both cases one
-needs to fill ``run_env`` with the desired list of environment modifications.
-
-Example : ``builtin/packages/python/package.py``
-""""""""""""""""""""""""""""""""""""""""""""""""
-
-The ``python`` package that comes with the ``builtin`` Spack repository
-overrides ``setup_dependent_environment`` in the following way:
-
-.. code-block:: python
-
- def setup_dependent_environment(self, spack_env, run_env, extension_spec):
- # ...
- if extension_spec.package.extends(self.spec):
- run_env.prepend_path('PYTHONPATH', os.path.join(extension_spec.prefix, self.site_packages_dir))
-
-to insert the appropriate ``PYTHONPATH`` modifications in the module
-files of python packages.
-
-Configuration files
-^^^^^^^^^^^^^^^^^^^
-
-Another way of modifying the content of module files is writing a
-``modules.yaml`` configuration file. Following usual Spack conventions, this
-file can be placed either at *site* or *user* scope.
-
-The default site configuration reads:
-
- .. literalinclude:: ../../../etc/spack/modules.yaml
- :language: yaml
-
-It basically inspects the installation prefixes for the
-existence of a few folders and, if they exist, it prepends a path to a given
-list of environment variables.
-
-For each module system that can be enabled a finer configuration is possible:
-
-.. code-block:: yaml
-
- modules:
- tcl:
- # contains environment modules specific customizations
- dotkit:
- # contains dotkit specific customizations
-
-The structure under the ``tcl`` and ``dotkit`` keys is almost equal, and will
-be showcased in the following by some examples.
-
-Select module files by spec constraints
-"""""""""""""""""""""""""""""""""""""""
-Using spec syntax it's possible to have different customizations for different
-groups of module files.
-
-Considering :
-
-.. code-block:: yaml
-
- modules:
- tcl:
- all: # Default addition for every package
- environment:
- set:
- BAR: 'bar'
- ^openmpi:: # A double ':' overrides previous rules
- environment:
- set:
- BAR: 'baz'
- zlib:
- environment:
- prepend_path:
- LD_LIBRARY_PATH: 'foo'
- zlib%gcc@4.8:
- environment:
- unset:
- - FOOBAR
-
-what will happen is that:
-
- - every module file will set ``BAR=bar``
- - unless the associated spec satisfies ``^openmpi`` in which case ``BAR=baz``
- - any spec that satisfies ``zlib`` will additionally prepend ``foo`` to ``LD_LIBRARY_PATH``
- - any spec that satisfies ``zlib%gcc@4.8`` will additionally unset ``FOOBAR``
-
-.. note::
- Order does matter
- The modifications associated with the ``all`` keyword are always evaluated
- first, no matter where they appear in the configuration file. All the other
- spec constraints are instead evaluated top to bottom.
-
-Filter modifications out of module files
-""""""""""""""""""""""""""""""""""""""""
-
-Modifications to certain environment variables in module files are generated by
-default. Suppose you would like to avoid having ``CPATH`` and ``LIBRARY_PATH``
-modified by your dotkit modules. Then :
-
-.. code-block:: yaml
-
- modules:
- dotkit:
- all:
- filter:
- environment_blacklist: ['CPATH', 'LIBRARY_PATH'] # Exclude changes to any of these variables
-
-will generate dotkit module files that will not contain modifications to either
-``CPATH`` or ``LIBRARY_PATH`` and environment module files that instead will
-contain those modifications.
-
-Autoload dependencies
-"""""""""""""""""""""
-
-The following lines in ``modules.yaml``:
-
-.. code-block:: yaml
-
- modules:
- tcl:
- all:
- autoload: 'direct'
-
-will produce environment module files that will automatically load their direct
-dependencies.
-
-.. note::
- Allowed values for ``autoload`` statements
- Allowed values for ``autoload`` statements are either ``none``, ``direct``
- or ``all``. In ``tcl`` configuration it is possible to use the option
- ``prerequisites`` that accepts the same values and will add ``prereq``
- statements instead of automatically loading other modules.
-
-Blacklist or whitelist the generation of specific module files
-""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""
-
-Sometimes it is desirable not to generate module files, a common use case being
-not providing the users with software built using the system compiler.
-
-A configuration file like:
-
-.. code-block:: yaml
-
- modules:
- tcl:
- whitelist: ['gcc', 'llvm'] # Whitelist will have precedence over blacklist
- blacklist: ['%gcc@4.4.7'] # Assuming gcc@4.4.7 is the system compiler
-
-will skip module file generation for anything that satisfies ``%gcc@4.4.7``,
-with the exception of specs that satisfy ``gcc`` or ``llvm``.
-
-Customize the naming scheme and insert conflicts
-""""""""""""""""""""""""""""""""""""""""""""""""
-
-A configuration file like:
-
-.. code-block:: yaml
-
- modules:
- tcl:
- naming_scheme: '{name}/{version}-{compiler.name}-{compiler.version}'
- all:
- conflict: ['{name}', 'intel/14.0.1']
-
-will create module files that will conflict with ``intel/14.0.1`` and with the
-base directory of the same module, effectively preventing the possibility to
-load two or more versions of the same software at the same time.
-
-.. note::
- Tokens available for the naming scheme
- currently only the tokens shown in the example are available to construct
- the naming scheme
-
-.. note::
- The ``conflict`` option is ``tcl`` specific
-
-Regenerating module files
-^^^^^^^^^^^^^^^^^^^^^^^^^
-
-Sometimes you may need to regenerate the modules files. For example,
-if newer, fancier module support is added to Spack at some later date,
-you may want to regenerate all the modules to take advantage of these
-new features.
-
-.. _spack-module:
-
-``spack module refresh``
-""""""""""""""""""""""""
-
-Running ``spack module refresh`` will remove the
-``share/spack/modules`` and ``share/spack/dotkit`` directories, then
-regenerate all module and dotkit files from scratch:
-
-.. code-block:: sh
-
- $ spack module refresh
- ==> Regenerating tcl module files.
- ==> Regenerating dotkit module files.
-
-
-.. _extensions:
-
+---------------------------
Extensions & Python support
-------------------------------------
+---------------------------
Spack's installation model assumes that each package will live in its
own install prefix. However, certain packages are typically installed
@@ -1259,24 +856,25 @@ Spack has support for this type of installation as well. In Spack,
a package that can live inside the prefix of another package is called
an *extension*. Suppose you have Python installed like so:
-.. code-block:: sh
+.. code-block:: console
$ spack find python
==> 1 installed packages.
- -- chaos_5_x86_64_ib / gcc@4.4.7 --------------------------------
+ -- linux-debian7-x86_64 / gcc@4.4.7 --------------------------------
python@2.7.8
-.. _spack-extensions:
+.. _cmd-spack-extensions:
+^^^^^^^^^^^^^^^^^^^^
``spack extensions``
-~~~~~~~~~~~~~~~~~~~~~~~
+^^^^^^^^^^^^^^^^^^^^
You can find extensions for your Python installation like this:
-.. code-block:: sh
+.. code-block:: console
$ spack extensions python
- ==> python@2.7.8%gcc@4.4.7=chaos_5_x86_64_ib-703c7a96
+ ==> python@2.7.8%gcc@4.4.7 arch=linux-debian7-x86_64-703c7a96
==> 36 extensions:
geos py-ipython py-pexpect py-pyside py-sip
py-basemap py-libxml2 py-pil py-pytz py-six
@@ -1288,7 +886,7 @@ You can find extensions for your Python installation like this:
py-h5py py-numpy py-pyqt py-shiboken
==> 12 installed:
- -- chaos_5_x86_64_ib / gcc@4.4.7 --------------------------------
+ -- linux-debian7-x86_64 / gcc@4.4.7 --------------------------------
py-dateutil@2.4.0 py-nose@1.3.4 py-pyside@1.2.2
py-dateutil@2.4.0 py-numpy@1.9.1 py-pytz@2014.10
py-ipython@2.3.1 py-pygments@2.0.1 py-setuptools@11.3.1
@@ -1298,19 +896,19 @@ You can find extensions for your Python installation like this:
The extensions are a subset of what's returned by ``spack list``, and
they are packages like any other. They are installed into their own
-prefixes, and you can see this with ``spack find -p``:
+prefixes, and you can see this with ``spack find --paths``:
-.. code-block:: sh
+.. code-block:: console
- $ spack find -p py-numpy
+ $ spack find --paths py-numpy
==> 1 installed packages.
- -- chaos_5_x86_64_ib / gcc@4.4.7 --------------------------------
- py-numpy@1.9.1 /g/g21/gamblin2/src/spack/opt/chaos_5_x86_64_ib/gcc@4.4.7/py-numpy@1.9.1-66733244
+ -- linux-debian7-x86_64 / gcc@4.4.7 --------------------------------
+ py-numpy@1.9.1 ~/spack/opt/linux-debian7-x86_64/gcc@4.4.7/py-numpy@1.9.1-66733244
However, even though this package is installed, you cannot use it
directly when you run ``python``:
-.. code-block:: sh
+.. code-block:: console
$ spack load python
$ python
@@ -1323,8 +921,9 @@ directly when you run ``python``:
ImportError: No module named numpy
>>>
+^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
Extensions & Environment Modules
-~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
There are two ways to get ``numpy`` working in Python. The first is
to use :ref:`shell-support`. You can simply ``use`` or ``load`` the
@@ -1333,14 +932,14 @@ in your current shell.
For tcl modules:
-.. code-block:: sh
+.. code-block:: console
$ spack load python
$ spack load py-numpy
or, for dotkit:
-.. code-block:: sh
+.. code-block:: console
$ spack use python
$ spack use py-numpy
@@ -1348,9 +947,9 @@ or, for dotkit:
Now ``import numpy`` will succeed for as long as you keep your current
session open.
-
+^^^^^^^^^^^^^^^^^^^^^
Activating Extensions
-~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+^^^^^^^^^^^^^^^^^^^^^
It is often desirable to have certain packages *always* available as
part of a Python installation. Spack offers a more permanent solution
@@ -1358,17 +957,18 @@ for this case. Instead of requiring users to load particular
environment modules, you can *activate* the package within the Python
installation:
-.. _spack-activate:
+.. _cmd-spack-activate:
+^^^^^^^^^^^^^^^^^^
``spack activate``
-^^^^^^^^^^^^^^^^^^^^^^^
+^^^^^^^^^^^^^^^^^^
-.. code-block:: sh
+.. code-block:: console
$ spack activate py-numpy
- ==> Activated extension py-setuptools@11.3.1%gcc@4.4.7=chaos_5_x86_64_ib-3c74eb69 for python@2.7.8%gcc@4.4.7.
- ==> Activated extension py-nose@1.3.4%gcc@4.4.7=chaos_5_x86_64_ib-5f70f816 for python@2.7.8%gcc@4.4.7.
- ==> Activated extension py-numpy@1.9.1%gcc@4.4.7=chaos_5_x86_64_ib-66733244 for python@2.7.8%gcc@4.4.7.
+ ==> Activated extension py-setuptools@11.3.1%gcc@4.4.7 arch=linux-debian7-x86_64-3c74eb69 for python@2.7.8%gcc@4.4.7.
+ ==> Activated extension py-nose@1.3.4%gcc@4.4.7 arch=linux-debian7-x86_64-5f70f816 for python@2.7.8%gcc@4.4.7.
+ ==> Activated extension py-numpy@1.9.1%gcc@4.4.7 arch=linux-debian7-x86_64-66733244 for python@2.7.8%gcc@4.4.7.
Several things have happened here. The user requested that
``py-numpy`` be activated in the ``python`` installation it was built
@@ -1380,10 +980,10 @@ once all dependencies were activated in the ``python`` installation,
If we run ``spack extensions`` again, we now see the three new
packages listed as activated:
-.. code-block:: sh
+.. code-block:: console
$ spack extensions python
- ==> python@2.7.8%gcc@4.4.7=chaos_5_x86_64_ib-703c7a96
+ ==> python@2.7.8%gcc@4.4.7 arch=linux-debian7-x86_64-703c7a96
==> 36 extensions:
geos py-ipython py-pexpect py-pyside py-sip
py-basemap py-libxml2 py-pil py-pytz py-six
@@ -1395,17 +995,16 @@ packages listed as activated:
py-h5py py-numpy py-pyqt py-shiboken
==> 12 installed:
- -- chaos_5_x86_64_ib / gcc@4.4.7 --------------------------------
+ -- linux-debian7-x86_64 / gcc@4.4.7 --------------------------------
py-dateutil@2.4.0 py-nose@1.3.4 py-pyside@1.2.2
py-dateutil@2.4.0 py-numpy@1.9.1 py-pytz@2014.10
py-ipython@2.3.1 py-pygments@2.0.1 py-setuptools@11.3.1
py-matplotlib@1.4.2 py-pyparsing@2.0.3 py-six@1.9.0
==> 3 currently activated:
- -- chaos_5_x86_64_ib / gcc@4.4.7 --------------------------------
+ -- linux-debian7-x86_64 / gcc@4.4.7 --------------------------------
py-nose@1.3.4 py-numpy@1.9.1 py-setuptools@11.3.1
-
Now, when a user runs python, ``numpy`` will be available for import
*without* the user having to explicitly loaded. ``python@2.7.8`` now
acts like a system Python installation with ``numpy`` installed inside
@@ -1423,41 +1022,47 @@ into the same prefix. Users who want a different version of a package
can still get it by using environment modules, but they will have to
explicitly load their preferred version.
-``spack activate -f``
-^^^^^^^^^^^^^^^^^^^^^^^^^
+^^^^^^^^^^^^^^^^^^^^^^^^^^
+``spack activate --force``
+^^^^^^^^^^^^^^^^^^^^^^^^^^
+
If, for some reason, you want to activate a package *without* its
-dependencies, you can use ``spack activate -f``:
+dependencies, you can use ``spack activate --force``:
-.. code-block:: sh
+.. code-block:: console
- $ spack activate -f py-numpy
- ==> Activated extension py-numpy@1.9.1%gcc@4.4.7=chaos_5_x86_64_ib-66733244 for python@2.7.8%gcc@4.4.7.
+ $ spack activate --force py-numpy
+ ==> Activated extension py-numpy@1.9.1%gcc@4.4.7 arch=linux-debian7-x86_64-66733244 for python@2.7.8%gcc@4.4.7.
-.. _spack-deactivate:
+.. _cmd-spack-deactivate:
+^^^^^^^^^^^^^^^^^^^^
``spack deactivate``
-^^^^^^^^^^^^^^^^^^^^^^^^^
+^^^^^^^^^^^^^^^^^^^^
We've seen how activating an extension can be used to set up a default
version of a Python module. Obviously, you may want to change that at
some point. ``spack deactivate`` is the command for this. There are
several variants:
- * ``spack deactivate <extension>`` will deactivate a single
- extension. If another activated extension depends on this one,
- Spack will warn you and exit with an error.
- * ``spack deactivate -f <extension>`` deactivates an extension
- regardless of packages that depend on it.
- * ``spack deactivate -a <extension>`` deactivates an extension and
- all of its dependencies. Use ``-f`` to disregard dependents.
- * ``spack deactivate -a <extendee>`` deactivates *all* activated
- extensions of a package. For example, to deactivate *all* python
- extensions, use::
+* ``spack deactivate <extension>`` will deactivate a single
+ extension. If another activated extension depends on this one,
+ Spack will warn you and exit with an error.
+* ``spack deactivate --force <extension>`` deactivates an extension
+ regardless of packages that depend on it.
+* ``spack deactivate --all <extension>`` deactivates an extension and
+ all of its dependencies. Use ``--force`` to disregard dependents.
+* ``spack deactivate --all <extendee>`` deactivates *all* activated
+ extensions of a package. For example, to deactivate *all* python
+ extensions, use:
- spack deactivate -a python
+ .. code-block:: console
+ $ spack deactivate --all python
+
+-----------------------
Filesystem requirements
---------------------------
+-----------------------
Spack currently needs to be run from a filesystem that supports
``flock`` locking semantics. Nearly all local filesystems and recent
@@ -1466,7 +1071,7 @@ without ``flock`` support enabled. You can determine how your
filesystems are mounted with ``mount -p``. The output for a Lustre
filesystem might look like this:
-.. code-block:: sh
+.. code-block:: console
$ mount -l | grep lscratch
pilsner-mds1-lnet0@o2ib100:/lsd on /p/lscratchd type lustre (rw,nosuid,noauto,_netdev,lazystatfs,flock)
@@ -1478,16 +1083,16 @@ system administrator to enable ``flock``.
This issue typically manifests with the error below:
-.. code-block:: sh
+.. code-block:: console
$ ./spack find
Traceback (most recent call last):
File "./spack", line 176, in <module>
main()
- File "./spack", line 154, in main
+ File "./spack", line 154,' in main
return_val = command(parser, args)
File "./spack/lib/spack/spack/cmd/find.py", line 170, in find
- specs = set(spack.installed_db.query(**q_args))
+ specs = set(spack.installed_db.query(\**q_args))
File "./spack/lib/spack/spack/database.py", line 551, in query
with self.read_transaction():
File "./spack/lib/spack/spack/database.py", line 598, in __enter__
@@ -1502,16 +1107,19 @@ This issue typically manifests with the error below:
A nicer error message is TBD in future versions of Spack.
+
+------------
Getting Help
------------------------
+------------
-.. _spack-help:
+.. _cmd-spack-help:
+^^^^^^^^^^^^^^
``spack help``
-~~~~~~~~~~~~~~~~~~~~~~
+^^^^^^^^^^^^^^
If you don't find what you need here, the ``help`` subcommand will
-print out out a list of *all* of ``spack``'s options and subcommands:
+print out out a list of *all* of spack's options and subcommands:
.. command-output:: spack help
@@ -1520,5 +1128,5 @@ usage information for a particular subcommand:
.. command-output:: spack help install
-Alternately, you can use ``spack -h`` in place of ``spack help``, or
-``spack <subcommand> -h`` to get help on a particular subcommand.
+Alternately, you can use ``spack --help`` in place of ``spack help``, or
+``spack <subcommand> --help`` to get help on a particular subcommand.
diff --git a/lib/spack/docs/build_settings.rst b/lib/spack/docs/build_settings.rst
new file mode 100644
index 0000000000..60ff26f26f
--- /dev/null
+++ b/lib/spack/docs/build_settings.rst
@@ -0,0 +1,168 @@
+.. _build-settings:
+
+======================================
+Build customization
+======================================
+
+Spack allows you to customize how your software is built through the
+``packages.yaml`` file. Using it, you can make Spack prefer particular
+implementations of virtual dependencies (e.g., compilers, MPI, or BLAS),
+or you can make it prefer to build with particular compilers. You can
+also tell Spack to use *external* installations of certain software.
+
+At a high level, the ``packages.yaml`` file is structured like this:
+
+.. code-block:: yaml
+
+ packages:
+ package1:
+ # settings for package1
+ package2:
+ # settings for package2
+ # ...
+ all:
+ # settings that apply to all packages.
+
+So you can either set build preferences *specifically* for one package,
+or you can specify that certain settings should apply to all packages.
+The types of settings you can customize are described in detail below.
+
+Spack's build defaults are in the default
+``etc/spack/defaults/packages.yaml`` file. You can override them in
+``~/.spack/packages.yaml`` or ``etc/spack/packages.yaml``. For more
+details on how this works, see :ref:`configuration-scopes`
+
+.. _sec-external-packages:
+
+-----------------
+External Packages
+-----------------
+
+Spack can be configured to use externally-installed
+packages rather than building its own packages. This may be desirable
+if machines ship with system packages, such as a customized MPI
+that should be used instead of Spack building its own MPI.
+
+External packages are configured through the ``packages.yaml`` file found
+in a Spack installation's ``etc/spack/`` or a user's ``~/.spack/``
+directory. Here's an example of an external configuration:
+
+.. code-block:: yaml
+
+ packages:
+ openmpi:
+ paths:
+ openmpi@1.4.3%gcc@4.4.7 arch=linux-x86_64-debian7: /opt/openmpi-1.4.3
+ openmpi@1.4.3%gcc@4.4.7 arch=linux-x86_64-debian7+debug: /opt/openmpi-1.4.3-debug
+ openmpi@1.6.5%intel@10.1 arch=linux-x86_64-debian7: /opt/openmpi-1.6.5-intel
+
+This example lists three installations of OpenMPI, one built with gcc,
+one built with gcc and debug information, and another built with Intel.
+If Spack is asked to build a package that uses one of these MPIs as a
+dependency, it will use the the pre-installed OpenMPI in
+the given directory. Packages.yaml can also be used to specify modules
+
+Each ``packages.yaml`` begins with a ``packages:`` token, followed
+by a list of package names. To specify externals, add a ``paths`` or ``modules``
+token under the package name, which lists externals in a
+``spec: /path`` or ``spec: module-name`` format. Each spec should be as
+well-defined as reasonably possible. If a
+package lacks a spec component, such as missing a compiler or
+package version, then Spack will guess the missing component based
+on its most-favored packages, and it may guess incorrectly.
+
+Each package version and compilers listed in an external should
+have entries in Spack's packages and compiler configuration, even
+though the package and compiler may not every be built.
+
+The packages configuration can tell Spack to use an external location
+for certain package versions, but it does not restrict Spack to using
+external packages. In the above example, if an OpenMPI 1.8.4 became
+available Spack may choose to start building and linking with that version
+rather than continue using the pre-installed OpenMPI versions.
+
+To prevent this, the ``packages.yaml`` configuration also allows packages
+to be flagged as non-buildable. The previous example could be modified to
+be:
+
+.. code-block:: yaml
+
+ packages:
+ openmpi:
+ paths:
+ openmpi@1.4.3%gcc@4.4.7 arch=linux-x86_64-debian7: /opt/openmpi-1.4.3
+ openmpi@1.4.3%gcc@4.4.7 arch=linux-x86_64-debian7+debug: /opt/openmpi-1.4.3-debug
+ openmpi@1.6.5%intel@10.1 arch=linux-x86_64-debian7: /opt/openmpi-1.6.5-intel
+ buildable: False
+
+The addition of the ``buildable`` flag tells Spack that it should never build
+its own version of OpenMPI, and it will instead always rely on a pre-built
+OpenMPI. Similar to ``paths``, ``buildable`` is specified as a property under
+a package name.
+
+If an external module is specified as not buildable, then Spack will load the
+external module into the build environment which can be used for linking.
+
+The ``buildable`` does not need to be paired with external packages.
+It could also be used alone to forbid packages that may be
+buggy or otherwise undesirable.
+
+
+.. _concretization-preferences:
+
+--------------------------
+Concretization Preferences
+--------------------------
+
+Spack can be configured to prefer certain compilers, package
+versions, depends_on, and variants during concretization.
+The preferred configuration can be controlled via the
+``~/.spack/packages.yaml`` file for user configuations, or the
+``etc/spack/packages.yaml`` site configuration.
+
+Here's an example packages.yaml file that sets preferred packages:
+
+.. code-block:: yaml
+
+ packages:
+ opencv:
+ compiler: [gcc@4.9]
+ variants: +debug
+ gperftools:
+ version: [2.2, 2.4, 2.3]
+ all:
+ compiler: [gcc@4.4.7, gcc@4.6:, intel, clang, pgi]
+ providers:
+ mpi: [mvapich, mpich, openmpi]
+
+At a high level, this example is specifying how packages should be
+concretized. The opencv package should prefer using gcc 4.9 and
+be built with debug options. The gperftools package should prefer version
+2.2 over 2.4. Every package on the system should prefer mvapich for
+its MPI and gcc 4.4.7 (except for opencv, which overrides this by preferring gcc 4.9).
+These options are used to fill in implicit defaults. Any of them can be overwritten
+on the command line if explicitly requested.
+
+Each packages.yaml file begins with the string ``packages:`` and
+package names are specified on the next level. The special string ``all``
+applies settings to each package. Underneath each package name is
+one or more components: ``compiler``, ``variants``, ``version``,
+or ``providers``. Each component has an ordered list of spec
+``constraints``, with earlier entries in the list being preferred over
+later entries.
+
+Sometimes a package installation may have constraints that forbid
+the first concretization rule, in which case Spack will use the first
+legal concretization rule. Going back to the example, if a user
+requests gperftools 2.3 or later, then Spack will install version 2.4
+as the 2.4 version of gperftools is preferred over 2.3.
+
+An explicit concretization rule in the preferred section will always
+take preference over unlisted concretizations. In the above example,
+xlc isn't listed in the compiler list. Every listed compiler from
+gcc to pgi will thus be preferred over the xlc compiler.
+
+The syntax for the ``provider`` section differs slightly from other
+concretization rules. A provider lists a value that packages may
+``depend_on`` (e.g, mpi) and a list of rules for fulfilling that
+dependency.
diff --git a/lib/spack/docs/command_index.in b/lib/spack/docs/command_index.in
index 94cdf38109..6520352b42 100644
--- a/lib/spack/docs/command_index.in
+++ b/lib/spack/docs/command_index.in
@@ -1,7 +1,6 @@
-.. _command_index:
-
-Command index
-=================
+=============
+Command Index
+=============
This is an alphabetical list of commands with links to the places they
appear in the documentation.
diff --git a/lib/spack/docs/conf.py b/lib/spack/docs/conf.py
index f3cb268177..db8d3d29dc 100644
--- a/lib/spack/docs/conf.py
+++ b/lib/spack/docs/conf.py
@@ -1,26 +1,27 @@
+# flake8: noqa
##############################################################################
-# Copyright (c) 2013, Lawrence Livermore National Security, LLC.
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
# Produced at the Lawrence Livermore National Laboratory.
#
# This file is part of Spack.
-# Written by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
# LLNL-CODE-647188
#
# For details, see https://github.com/llnl/spack
# Please also see the LICENSE file for our notice and the LGPL.
#
# This program is free software; you can redistribute it and/or modify
-# it under the terms of the GNU General Public License (as published by
-# the Free Software Foundation) version 2.1 dated February 1999.
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
-# conditions of the GNU General Public License for more details.
+# conditions of the GNU Lesser General Public License for more details.
#
-# You should have received a copy of the GNU Lesser General Public License
-# along with this program; if not, write to the Free Software Foundation,
-# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
# -*- coding: utf-8 -*-
#
@@ -37,26 +38,85 @@
import sys
import os
+import re
+import shutil
import subprocess
+from glob import glob
+from sphinx.apidoc import main as sphinx_apidoc
+
+# -- Spack customizations -----------------------------------------------------
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
sys.path.insert(0, os.path.abspath('exts'))
sys.path.insert(0, os.path.abspath('../external'))
+sys.path.append(os.path.abspath('..'))
# Add the Spack bin directory to the path so that we can use its output in docs.
spack_root = '../../..'
os.environ['SPACK_ROOT'] = spack_root
-os.environ['PATH'] += os.pathsep + '$SPACK_ROOT/bin'
+os.environ['PATH'] += '%s%s/bin' % (os.pathsep, spack_root)
+# Get the spack version for use in the docs
spack_version = subprocess.Popen(
[spack_root + '/bin/spack', '-V'],
stderr=subprocess.PIPE).communicate()[1].strip().split('.')
# Set an environment variable so that colify will print output like it would to
# a terminal.
-os.environ['COLIFY_SIZE'] = '25x80'
+os.environ['COLIFY_SIZE'] = '25x120'
+
+#
+# Generate package list using spack command
+#
+with open('package_list.rst', 'w') as plist_file:
+ subprocess.Popen(
+ [spack_root + '/bin/spack', 'list', '--format=rst'], stdout=plist_file)
+
+#
+# Find all the `cmd-spack-*` references and add them to a command index
+#
+command_names = []
+for filename in glob('*rst'):
+ with open(filename) as f:
+ for line in f:
+ match = re.match('.. _(cmd-spack-.*):', line)
+ if match:
+ command_names.append(match.group(1).strip())
+
+shutil.copy('command_index.in', 'command_index.rst')
+with open('command_index.rst', 'a') as index:
+ index.write('\n')
+ for cmd in sorted(command_names):
+ index.write(' * :ref:`%s`\n' % cmd)
+
+
+# Run sphinx-apidoc
+sphinx_apidoc(['-T', '-o', '.', '../spack'])
+os.remove('modules.rst')
+
+#
+# Exclude everything in spack.__all__ from indexing. All of these
+# symbols are imported from elsewhere in spack; their inclusion in
+# __all__ simply allows package authors to use `from spack import *`.
+# Excluding them ensures they're only documented in their "real" module.
+#
+# This also avoids issues where some of these symbols shadow core spack
+# modules. Sphinx will complain about duplicate docs when this happens.
+#
+import fileinput, spack
+handling_spack = False
+for line in fileinput.input('spack.rst', inplace=1):
+ if handling_spack:
+ if not line.startswith(' :noindex:'):
+ print ' :noindex: %s' % ' '.join(spack.__all__)
+ handling_spack = False
+
+ if line.startswith('.. automodule::'):
+ handling_spack = (line == '.. automodule:: spack\n')
+
+ print line,
# Enable todo items
todo_include_todos = True
diff --git a/lib/spack/docs/config_yaml.rst b/lib/spack/docs/config_yaml.rst
new file mode 100644
index 0000000000..56aa6ed0a1
--- /dev/null
+++ b/lib/spack/docs/config_yaml.rst
@@ -0,0 +1,149 @@
+.. _config-yaml:
+
+====================================
+Basic settings in ``config.yaml``
+====================================
+
+Spack's basic configuration options are set in ``config.yaml``. You can
+see the default settings by looking at
+``etc/spack/defaults/config.yaml``:
+
+.. literalinclude:: ../../../etc/spack/defaults/config.yaml
+ :language: yaml
+
+These settings can be overridden in ``etc/spack/config.yaml`` or
+``~/.spack/config.yaml``. See :ref:`configuration-scopes` for details.
+
+.. _config-file-variables:
+
+------------------------------
+Config file variables
+------------------------------
+
+You may notice some variables prefixed with ``$`` in the settings above.
+Spack understands several variables that can be used in values of
+configuration parameters. They are:
+
+ * ``$spack``: path to the prefix of this spack installation
+ * ``$tempdir``: default system temporary directory (as specified in
+ Python's `tempfile.tempdir
+ <https://docs.python.org/2/library/tempfile.html#tempfile.tempdir>`_
+ variable.
+ * ``$user``: name of the current user
+
+Note that, as with shell variables, you can write these as ``$varname``
+or with braces to distinguish the variable from surrounding characters:
+``${varname}``.
+
+--------------------
+``install_tree``
+--------------------
+
+The location where Spack will install packages and their dependencies.
+Default is ``$spack/opt/spack``.
+
+--------------------
+``module_roots``
+--------------------
+
+Controls where Spack installs generated module files. You can customize
+the location for each type of module. e.g.:
+
+.. code-block:: yaml
+
+ module_roots:
+ tcl: $spack/share/spack/modules
+ lmod: $spack/share/spack/lmod
+ dotkit: $spack/share/spack/dotkit
+
+See :ref:`modules` for details.
+
+--------------------
+``build_stage``
+--------------------
+
+Spack is designed to run out of a user home directories, and on many
+systems the home directory a (slow) network filesystem. On most systems,
+building in a temporary filesystem results in faster builds than building
+in the home directory. Usually, there is also more space available in
+the temporary location than in the home directory. So, Spack tries to
+create build stages in temporary space.
+
+By default, Spack's ``build_stage`` is configured like this:
+
+.. code-block:: yaml
+
+ build_stage:
+ - $tempdir
+ - /nfs/tmp2/$user
+ - $spack/var/spack/stage
+
+This is an ordered list of paths that Spack should search when trying to
+find a temporary directory for the build stage. The list is searched in
+order, and Spack will use the first directory to which it has write access.
+See :ref:`config-file-variables` for more on ``$tempdir`` and ``$spack``.
+
+When Spack builds a package, it creates a temporary directory within the
+``build_stage``, and it creates a symbolic link to that directory in
+``$spack/var/spack/stage``. This is used totrack the stage.
+
+After a package is successfully installed, Spack deletes the temporary
+directory it used to build. Unsuccessful builds are not deleted, but you
+can manually purge them with :ref:`spack purge --stage
+<cmd-spack-purge>`.
+
+.. note::
+
+ The last item in the list is ``$spack/var/spack/stage``. If this is the
+ only writable directory in the ``build_stage`` list, Spack will build
+ *directly* in ``$spack/var/spack/stage`` and will not link to temporary
+ space.
+
+--------------------
+``source_cache``
+--------------------
+
+Location to cache downloaded tarballs and repositories. By default these
+are stored in ``$spack/var/spack/cache``. These are stored indefinitely
+by default. Can be purged with :ref:`spack purge --downloads
+<cmd-spack-purge>`.
+
+--------------------
+``misc_cache``
+--------------------
+
+Temporary directory to store long-lived cache files, such as indices of
+packages available in repositories. Defaults to ``~/.spack/cache``. Can
+be purged with :ref:`spack purge --misc-cache <cmd-spack-purge>`.
+
+--------------------
+``verify_ssl``
+--------------------
+
+When set to ``true`` (default) Spack will verify certificates of remote
+hosts when making ``ssl`` connections. Set to ``false`` to disable, and
+tools like ``curl`` will use their ``--insecure`` options. Disabling
+this can expose you to attacks. Use at your own risk.
+
+--------------------
+``checksum``
+--------------------
+
+When set to ``true``, Spack verifies downloaded source code using a
+checksum, and will refuse to build packages that it cannot verify. Set
+to ``false`` to disable these checks. Disabling this can expose you to
+attacks. Use at your own risk.
+
+--------------------
+``dirty``
+--------------------
+
+By default, Spack unsets variables in your environment that can change
+the way packages build. This includes ``LD_LIBRARY_PATH``, ``CPATH``,
+``LIBRARY_PATH``, ``DYLD_LIBRARY_PATH``, and others.
+
+By default, builds are ``clean``, but on some machines, compilers and
+other tools may need custom ``LD_LIBRARY_PATH`` setings to run. You can
+set ``dirty`` to ``true`` to skip the cleaning step and make all builds
+"dirty" by default. Be aware that this will reduce the reproducibility
+of builds.
diff --git a/lib/spack/docs/configuration.rst b/lib/spack/docs/configuration.rst
new file mode 100644
index 0000000000..32e1a8c170
--- /dev/null
+++ b/lib/spack/docs/configuration.rst
@@ -0,0 +1,253 @@
+.. _configuration:
+
+==============================
+Configuration Files in Spack
+==============================
+
+Spack has many configuration files. Here is a quick list of them, in
+case you want to skip directly to specific docs:
+
+* :ref:`compilers.yaml <compiler-config>`
+* :ref:`config.yaml <config-yaml>`
+* :ref:`mirrors.yaml <mirrors>`
+* :ref:`modules.yaml <modules>`
+* :ref:`packages.yaml <build-settings>`
+* :ref:`repos.yaml <repositories>`
+
+-------------------------
+YAML Format
+-------------------------
+
+Spack configuration files are written in YAML. We chose YAML because
+it's human readable, but also versatile in that it supports dictionaries,
+lists, and nested sections. For more details on the format, see `yaml.org
+<http://yaml.org>`_ and `libyaml <http://pyyaml.org/wiki/LibYAML>`_.
+Here is an example ``config.yaml`` file:
+
+.. code-block:: yaml
+
+ config:
+ install_tree: $spack/opt/spack
+ module_roots:
+ lmod: $spack/share/spack/lmod
+ build_stage:
+ - $tempdir
+ - /nfs/tmp2/$user
+
+Each spack configuration files is nested under a top-level section
+corresponding to its name. So, ``config.yaml`` starts with ``config:``,
+and ``mirrors.yaml`` starts with ``mirrors:``, etc.
+
+.. _configuration-scopes:
+
+-------------------------
+Configuration Scopes
+-------------------------
+
+Spack pulls configuration data from files in several directories. There
+are three configuration scopes. From lowest to highest:
+
+1. **defaults**: Stored in ``$(prefix)/etc/spack/defaults/``. These are
+ the "factory" settings. Users should generally not modify the settings
+ here, but should override them in other configuration scopes. The
+ defaults here will change from version to version of Spack.
+
+2. **site**: Stored in ``$(prefix)/etc/spack/``. Settings here affect
+ only *this instance* of Spack, and they override defaults. The site
+ scope can can be used for per-project settings (one spack instance per
+ project) or for site-wide settings on a multi-user machine (e.g., for
+ a common spack instance).
+
+3. **user**: Stored in the home directory: ``~/.spack/``. These settings
+ affect all instances of Spack and take the highest precedence.
+
+Each configuration directory may contain several configuration files,
+such as ``config.yaml``, ``compilers.yaml``, or ``mirrors.yaml``. When
+configurations conflict, settings from higher-precedence scopes override
+lower-precedence settings.
+
+Commands that modify scopes (e.g., ``spack compilers``, ``spack repo``,
+etc.) take a ``--scope=<name>`` parameter that you can use to control
+which scope is modified. By default they modify the highest-precedence
+scope.
+
+.. _platform-scopes:
+
+-------------------------
+Platform-specific scopes
+-------------------------
+
+For each scope above, there can *also* be platform-specific settings.
+For example, on Blue Gene/Q machines, Spack needs to know the location of
+cross-compilers for the compute nodes. This configuration is in
+``etc/spack/defaults/bgq/compilers.yaml``. It will take precedence over
+settings in the ``defaults`` scope, but can still be overridden by
+settings in ``site``, ``site/bgq``, ``user``, or ``user/bgq``. So, the
+full scope precedence is:
+
+1. ``defaults``
+2. ``defaults/<platform>``
+3. ``site``
+4. ``site/<platform>``
+5. ``user``
+6. ``user/<platform>``
+
+You can get the name to use for ``<platform>`` by running ``spack arch
+--platform``.
+
+-------------------------
+Scope precedence
+-------------------------
+
+When spack queries for configuration parameters, it searches in
+higher-precedence scopes first. So, settings in a higher-precedence file
+can override those with the same key in a lower-precedence one. For
+list-valued settings, Spack *prepends* higher-precedence settings to
+lower-precedence settings. Completely ignoring higher-level configuration
+options is supported with the ``::`` notation for keys (see
+:ref:`config-overrides` below).
+
+^^^^^^^^^^^^^^^^^^^^^^^^
+Simple keys
+^^^^^^^^^^^^^^^^^^^^^^^^
+
+Let's look at an example of overriding a single key in a Spack file. If
+your configurations look like this:
+
+**defaults** scope:
+
+.. code-block:: yaml
+
+ config:
+ install_tree: $spack/opt/spack
+ module_roots:
+ lmod: $spack/share/spack/lmod
+ build_stage:
+ - $tempdir
+ - /nfs/tmp2/$user
+
+**site** scope:
+
+.. code-block:: yaml
+
+ config:
+ install_tree: /some/other/directory
+
+Spack will only override ``install_tree`` in the ``config`` section, and
+will take the site preferences for other settings. You can see the
+final, combined configuration with the ``spack config get <configtype>``
+command:
+
+.. code-block:: console
+ :emphasize-lines: 3
+
+ $ spack config get config
+ config:
+ install_tree: /some/other/directory
+ module_roots:
+ lmod: $spack/share/spack/lmod
+ build_stage:
+ - $tempdir
+ - /nfs/tmp2/$user
+ $ _
+
+.. _config-overrides:
+
+^^^^^^^^^^^^^^^^^^^^^^^^^^
+Overriding entire sections
+^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+Above, the site ``config.yaml`` only overrides specific settings in the
+default ``config.yaml``. Sometimes, it is useful to *completely*
+override lower-precedence settings. To do this, you can use *two* colons
+at the end of a key in a configuration file. For example, if the
+**site** ``config.yaml`` above looks like this:
+
+.. code-block:: yaml
+ :emphasize-lines: 1
+
+ config::
+ install_tree: /some/other/directory
+
+Spack will ignore all lower-precedence configuration under the
+``config::`` section:
+
+.. code-block:: console
+
+ $ spack config get config
+ config:
+ install_tree: /some/other/directory
+
+^^^^^^^^^^^^^^^^^^^^^^
+List-valued settings
+^^^^^^^^^^^^^^^^^^^^^^
+
+Let's revisit the ``config.yaml`` example one more time. The
+``build_stage`` setting's value is an ordered list of directories:
+
+**defaults**
+
+.. code-block:: yaml
+
+ build_stage:
+ - $tempdir
+ - /nfs/tmp2/$user
+
+Suppose the user configuration adds its *own* list of ``build_stage``
+paths:
+
+**user**
+
+.. code-block:: yaml
+
+ build_stage:
+ - /lustre-scratch/$user
+ - ~/mystage
+
+Spack will first look at the paths in the site ``config.yaml``, then the
+paths in the user's ``~/.spack/config.yaml``. The list in the
+higher-precedence scope is *prepended* to the defaults. ``spack config
+get config`` shows the result:
+
+.. code-block:: console
+ :emphasize-lines: 7-10
+
+ $ spack config get config
+ config:
+ install_tree: /some/other/directory
+ module_roots:
+ lmod: $spack/share/spack/lmod
+ build_stage:
+ - /lustre-scratch/$user
+ - ~/mystage
+ - $tempdir
+ - /nfs/tmp2/$user
+ $ _
+
+As in :ref:`config-overrides`, the higher-precedence scope can
+*completely* override the lower-precedence scope using `::`. So if the
+user config looked like this:
+
+**user**
+
+.. code-block:: yaml
+ :emphasize-lines: 1
+
+ build_stage::
+ - /lustre-scratch/$user
+ - ~/mystage
+
+The merged configuration would look like this:
+
+.. code-block:: console
+ :emphasize-lines: 7-8
+
+ $ spack config get config
+ config:
+ install_tree: /some/other/directory
+ module_roots:
+ lmod: $spack/share/spack/lmod
+ build_stage:
+ - /lustre-scratch/$user
+ - ~/mystage
+ $ _
diff --git a/lib/spack/docs/contribution_guide.rst b/lib/spack/docs/contribution_guide.rst
new file mode 100644
index 0000000000..4abf97ef92
--- /dev/null
+++ b/lib/spack/docs/contribution_guide.rst
@@ -0,0 +1,522 @@
+.. _contribution-guide:
+
+==================
+Contribution Guide
+==================
+
+This guide is intended for developers or administrators who want to
+contribute a new package, feature, or bugfix to Spack.
+It assumes that you have at least some familiarity with Git VCS and Github.
+The guide will show a few examples of contributing workflows and discuss
+the granularity of pull-requests (PRs). It will also discuss the tests your
+PR must pass in order to be accepted into Spack.
+
+First, what is a PR? Quoting `Bitbucket's tutorials <https://www.atlassian.com/git/tutorials/making-a-pull-request/>`_:
+
+ Pull requests are a mechanism for a developer to notify team members that
+ they have **completed a feature**. The pull request is more than just a
+ notification—it’s a dedicated forum for discussing the proposed feature.
+
+Important is **completed feature**. The changes one proposes in a PR should
+correspond to one feature/bugfix/extension/etc. One can create PRs with
+changes relevant to different ideas, however reviewing such PRs becomes tedious
+and error prone. If possible, try to follow the **one-PR-one-package/feature** rule.
+
+Spack uses a rough approximation of the `Git Flow <http://nvie.com/posts/a-successful-git-branching-model/>`_
+branching model. The develop branch contains the latest contributions, and
+master is always tagged and points to the latest stable release. Therefore, when
+you send your request, make ``develop`` the destination branch on the
+`Spack repository <https://github.com/LLNL/spack>`_.
+
+----------------------
+Continuous Integration
+----------------------
+
+Spack uses `Travis CI <https://travis-ci.org/LLNL/spack>`_ for Continuous Integration
+testing. This means that every time you submit a pull request, a series of tests will
+be run to make sure you didn't accidentally introduce any bugs into Spack. Your PR
+will not be accepted until it passes all of these tests. While you can certainly wait
+for the results of these tests after submitting a PR, we recommend that you run them
+locally to speed up the review process.
+
+If you take a look in ``$SPACK_ROOT/.travis.yml``, you'll notice that we test
+against Python 2.6 and 2.7. We currently perform 3 types of tests:
+
+^^^^^^^^^^
+Unit Tests
+^^^^^^^^^^
+
+Unit tests ensure that core Spack features like fetching or spec resolution are
+working as expected. If your PR only adds new packages or modifies existing ones,
+there's very little chance that your changes could cause the unit tests to fail.
+However, if you make changes to Spack's core libraries, you should run the unit
+tests to make sure you didn't break anything.
+
+Since they test things like fetching from VCS repos, the unit tests require
+`git <https://git-scm.com/>`_, `mercurial <https://www.mercurial-scm.org/>`_,
+and `subversion <https://subversion.apache.org/>`_ to run. Make sure these are
+installed on your system and can be found in your ``PATH``. All of these can be
+installed with Spack or with your system package manager.
+
+To run *all* of the unit tests, use:
+
+.. code-block:: console
+
+ $ spack test
+
+These tests may take several minutes to complete. If you know you are only
+modifying a single Spack feature, you can run a single unit test at a time:
+
+.. code-block:: console
+
+ $ spack test architecture
+
+This allows you to develop iteratively: make a change, test that change, make
+another change, test that change, etc. To get a list of all available unit
+tests, run:
+
+.. command-output:: spack test --collect-only
+
+Unit tests are crucial to making sure bugs aren't introduced into Spack. If you
+are modifying core Spack libraries or adding new functionality, please consider
+adding new unit tests or strengthening existing tests.
+
+.. note::
+
+ There is also a ``run-unit-tests`` script in ``share/spack/qa`` that
+ runs the unit tests. Afterwards, it reports back to Coverage with the
+ percentage of Spack that is covered by unit tests. This script is
+ designed for Travis CI. If you want to run the unit tests yourself, we
+ suggest you use ``spack test``.
+
+^^^^^^^^^^^^
+Flake8 Tests
+^^^^^^^^^^^^
+
+Spack uses `Flake8 <http://flake8.pycqa.org/en/latest/>`_ to test for
+`PEP 8 <https://www.python.org/dev/peps/pep-0008/>`_ conformance. PEP 8 is
+a series of style guides for Python that provide suggestions for everything
+from variable naming to indentation. In order to limit the number of PRs that
+were mostly style changes, we decided to enforce PEP 8 conformance. Your PR
+needs to comply with PEP 8 in order to be accepted.
+
+Testing for PEP 8 compliance is easy. Simply run the ``spack flake8``
+command:
+
+.. code-block:: console
+
+ $ spack flake8
+
+``spack flake8`` has a couple advantages over running ``flake8`` by hand:
+
+#. It only tests files that you have modified since branching off of
+ ``develop``.
+
+#. It works regardless of what directory you are in.
+
+#. It automatically adds approved exemptions from the ``flake8``
+ checks. For example, URLs are often longer than 80 characters, so we
+ exempt them from line length checks. We also exempt lines that start
+ with "homepage", "url", "version", "variant", "depends_on", and
+ "extends" in ``package.py`` files.
+
+More approved flake8 exemptions can be found
+`here <https://github.com/LLNL/spack/blob/develop/.flake8>`_.
+
+If all is well, you'll see something like this:
+
+.. code-block:: console
+
+ $ run-flake8-tests
+ Dependencies found.
+ =======================================================
+ flake8: running flake8 code checks on spack.
+
+ Modified files:
+
+ var/spack/repos/builtin/packages/hdf5/package.py
+ var/spack/repos/builtin/packages/hdf/package.py
+ var/spack/repos/builtin/packages/netcdf/package.py
+ =======================================================
+ Flake8 checks were clean.
+
+However, if you aren't compliant with PEP 8, flake8 will complain:
+
+.. code-block:: console
+
+ var/spack/repos/builtin/packages/netcdf/package.py:26: [F401] 'os' imported but unused
+ var/spack/repos/builtin/packages/netcdf/package.py:61: [E303] too many blank lines (2)
+ var/spack/repos/builtin/packages/netcdf/package.py:106: [E501] line too long (92 > 79 characters)
+ Flake8 found errors.
+
+Most of the error messages are straightforward, but if you don't understand what
+they mean, just ask questions about them when you submit your PR. The line numbers
+will change if you add or delete lines, so simply run ``run-flake8-tests`` again
+to update them.
+
+.. tip::
+
+ Try fixing flake8 errors in reverse order. This eliminates the need for
+ multiple runs of ``flake8`` just to re-compute line numbers and makes it
+ much easier to fix errors directly off of the Travis output.
+
+.. warning::
+
+ Flake8 requires setuptools in order to run. If you installed ``py-flake8``
+ with Spack, make sure to add ``py-setuptools`` to your ``PYTHONPATH``.
+ Otherwise, you will get an error message like:
+
+ .. code-block:: console
+
+ Traceback (most recent call last):
+ File: "/usr/bin/flake8", line 5, in <module>
+ from pkg_resources import load_entry_point
+ ImportError: No module named pkg_resources
+
+^^^^^^^^^^^^^^^^^^^
+Documentation Tests
+^^^^^^^^^^^^^^^^^^^
+
+Spack uses `Sphinx <http://www.sphinx-doc.org/en/stable/>`_ to build its
+documentation. In order to prevent things like broken links and missing imports,
+we added documentation tests that build the documentation and fail if there
+are any warning or error messages.
+
+Building the documentation requires several dependencies, all of which can be
+installed with Spack:
+
+* sphinx
+* graphviz
+* git
+* mercurial
+* subversion
+
+.. warning::
+
+ Sphinx has `several required dependencies <https://github.com/LLNL/spack/blob/develop/var/spack/repos/builtin/packages/py-sphinx/package.py>`_.
+ If you installed ``py-sphinx`` with Spack, make sure to add all of these
+ dependencies to your ``PYTHONPATH``. The easiest way to do this is to run
+ ``spack activate py-sphinx`` so that all of the dependencies are symlinked
+ to a central location. If you see an error message like:
+
+ .. code-block:: console
+
+ Traceback (most recent call last):
+ File: "/usr/bin/flake8", line 5, in <module>
+ from pkg_resources import load_entry_point
+ ImportError: No module named pkg_resources
+
+ that means Sphinx couldn't find setuptools in your ``PYTHONPATH``.
+
+Once all of the dependencies are installed, you can try building the documentation:
+
+.. code-block:: console
+
+ $ cd "$SPACK_ROOT/lib/spack/docs"
+ $ make clean
+ $ make
+
+If you see any warning or error messages, you will have to correct those before
+your PR is accepted.
+
+.. note::
+
+ There is also a ``run-doc-tests`` script in the Quality Assurance directory.
+ The only difference between running this script and running ``make`` by hand
+ is that the script will exit immediately if it encounters an error or warning.
+ This is necessary for Travis CI. If you made a lot of documentation tests, it
+ is much quicker to run ``make`` by hand so that you can see all of the warnings
+ at once.
+
+If you are editing the documentation, you should obviously be running the
+documentation tests. But even if you are simply adding a new package, your
+changes could cause the documentation tests to fail:
+
+.. code-block:: console
+
+ package_list.rst:8745: WARNING: Block quote ends without a blank line; unexpected unindent.
+
+At first, this error message will mean nothing to you, since you didn't edit
+that file. Until you look at line 8745 of the file in question:
+
+.. code-block:: rst
+
+ Description:
+ NetCDF is a set of software libraries and self-describing, machine-
+ independent data formats that support the creation, access, and sharing
+ of array-oriented scientific data.
+
+Our documentation includes :ref:`a list of all Spack packages <package-list>`.
+If you add a new package, its docstring is added to this page. The problem in
+this case was that the docstring looked like:
+
+.. code-block:: python
+
+ class Netcdf(Package):
+ """
+ NetCDF is a set of software libraries and self-describing,
+ machine-independent data formats that support the creation,
+ access, and sharing of array-oriented scientific data.
+ """
+
+Docstrings cannot start with a newline character, or else Sphinx will complain.
+Instead, they should look like:
+
+.. code-block:: python
+
+ class Netcdf(Package):
+ """NetCDF is a set of software libraries and self-describing,
+ machine-independent data formats that support the creation,
+ access, and sharing of array-oriented scientific data."""
+
+Documentation changes can result in much more obfuscated warning messages.
+If you don't understand what they mean, feel free to ask when you submit
+your PR.
+
+-------------
+Git Workflows
+-------------
+
+Spack is still in the beta stages of development. Most of our users run off of
+the develop branch, and fixes and new features are constantly being merged. So
+how do you keep up-to-date with upstream while maintaining your own local
+differences and contributing PRs to Spack?
+
+^^^^^^^^^
+Branching
+^^^^^^^^^
+
+The easiest way to contribute a pull request is to make all of your changes on
+new branches. Make sure your ``develop`` is up-to-date and create a new branch
+off of it:
+
+.. code-block:: console
+
+ $ git checkout develop
+ $ git pull upstream develop
+ $ git branch <descriptive_branch_name>
+ $ git checkout <descriptive_branch_name>
+
+Here we assume that the local ``develop`` branch tracks the upstream develop
+branch of Spack. This is not a requirement and you could also do the same with
+remote branches. But for some it is more convenient to have a local branch that
+tracks upstream.
+
+Normally we prefer that commits pertaining to a package ``<package-name>`` have
+a message ``<package-name>: descriptive message``. It is important to add
+descriptive message so that others, who might be looking at your changes later
+(in a year or maybe two), would understand the rationale behind them.
+
+Now, you can make your changes while keeping the ``develop`` branch pure.
+Edit a few files and commit them by running:
+
+.. code-block:: console
+
+ $ git add <files_to_be_part_of_the_commit>
+ $ git commit --message <descriptive_message_of_this_particular_commit>
+
+Next, push it to your remote fork and create a PR:
+
+.. code-block:: console
+
+ $ git push origin <descriptive_branch_name> --set-upstream
+
+GitHub provides a `tutorial <https://help.github.com/articles/about-pull-requests/>`_
+on how to file a pull request. When you send the request, make ``develop`` the
+destination branch.
+
+If you need this change immediately and don't have time to wait for your PR to
+be merged, you can always work on this branch. But if you have multiple PRs,
+another option is to maintain a Frankenstein branch that combines all of your
+other branches:
+
+.. code-block:: console
+
+ $ git co develop
+ $ git branch <your_modified_develop_branch>
+ $ git checkout <your_modified_develop_branch>
+ $ git merge <descriptive_branch_name>
+
+This can be done with each new PR you submit. Just make sure to keep this local
+branch up-to-date with upstream ``develop`` too.
+
+^^^^^^^^^^^^^^
+Cherry-Picking
+^^^^^^^^^^^^^^
+
+What if you made some changes to your local modified develop branch and already
+committed them, but later decided to contribute them to Spack? You can use
+cherry-picking to create a new branch with only these commits.
+
+First, check out your local modified develop branch:
+
+.. code-block:: console
+
+ $ git checkout <your_modified_develop_branch>
+
+Now, get the hashes of the commits you want from the output of:
+
+.. code-block:: console
+
+ $ git log
+
+Next, create a new branch off of upstream ``develop`` and copy the commits
+that you want in your PR:
+
+.. code-block:: console
+
+ $ git checkout develop
+ $ git pull upstream develop
+ $ git branch <descriptive_branch_name>
+ $ git checkout <descriptive_branch_name>
+ $ git cherry-pick <hash>
+ $ git push origin <descriptive_branch_name> --set-upstream
+
+Now you can create a PR from the web-interface of GitHub. The net result is as
+follows:
+
+#. You patched your local version of Spack and can use it further.
+#. You "cherry-picked" these changes in a stand-alone branch and submitted it
+ as a PR upstream.
+
+Should you have several commits to contribute, you could follow the same
+procedure by getting hashes of all of them and cherry-picking to the PR branch.
+
+.. note::
+
+ It is important that whenever you change something that might be of
+ importance upstream, create a pull request as soon as possible. Do not wait
+ for weeks/months to do this, because:
+
+ #. you might forget why you modified certain files
+ #. it could get difficult to isolate this change into a stand-alone clean PR.
+
+^^^^^^^^
+Rebasing
+^^^^^^^^
+
+Other developers are constantly making contributions to Spack, possibly on the
+same files that your PR changed. If their PR is merged before yours, it can
+create a merge conflict. This means that your PR can no longer be automatically
+merged without a chance of breaking your changes. In this case, you will be
+asked to rebase on top of the latest upstream ``develop``.
+
+First, make sure your develop branch is up-to-date:
+
+.. code-block:: console
+
+ $ git checkout develop
+ $ git pull upstream develop
+
+Now, we need to switch to the branch you submitted for your PR and rebase it
+on top of develop:
+
+.. code-block:: console
+
+ $ git checkout <descriptive_branch_name>
+ $ git rebase develop
+
+Git will likely ask you to resolve conflicts. Edit the file that it says can't
+be merged automatically and resolve the conflict. Then, run:
+
+.. code-block:: console
+
+ $ git add <file_that_could_not_be_merged>
+ $ git rebase --continue
+
+You may have to repeat this process multiple times until all conflicts are resolved.
+Once this is done, simply force push your rebased branch to your remote fork:
+
+.. code-block:: console
+
+ $ git push --force origin <descriptive_branch_name>
+
+^^^^^^^^^^^^^^^^^^^^^^^^^
+Rebasing with cherry-pick
+^^^^^^^^^^^^^^^^^^^^^^^^^
+
+You can also perform a rebase using ``cherry-pick``. First, create a temporary
+backup branch:
+
+.. code-block:: console
+
+ $ git checkout <descriptive_branch_name>
+ $ git branch tmp
+
+If anything goes wrong, you can always go back to your ``tmp`` branch.
+Now, look at the logs and save the hashes of any commits you would like to keep:
+
+.. code-block:: console
+
+ $ git log
+
+Next, go back to the original branch and reset it to ``develop``.
+Before doing so, make sure that you local ``develop`` branch is up-to-date
+with upstream:
+
+.. code-block:: console
+
+ $ git checkout develop
+ $ git pull upstream develop
+ $ git checkout <descriptive_branch_name>
+ $ git reset --hard develop
+
+Now you can cherry-pick relevant commits:
+
+.. code-block:: console
+
+ $ git cherry-pick <hash1>
+ $ git cherry-pick <hash2>
+
+Push the modified branch to your fork:
+
+.. code-block:: console
+
+ $ git push --force origin <descriptive_branch_name>
+
+If everything looks good, delete the backup branch:
+
+.. code-block:: console
+
+ $ git branch --delete --force tmp
+
+^^^^^^^^^^^^^^^^^^
+Re-writing History
+^^^^^^^^^^^^^^^^^^
+
+Sometimes you may end up on a branch that has diverged so much from develop
+that it cannot easily be rebased. If the current commits history is more of
+an experimental nature and only the net result is important, you may rewrite
+the history.
+
+First, merge upstream ``develop`` and reset you branch to it. On the branch
+in question, run:
+
+.. code-block:: console
+
+ $ git merge develop
+ $ git reset develop
+
+At this point your branch will point to the same commit as develop and
+thereby the two are indistinguishable. However, all the files that were
+previously modified will stay as such. In other words, you do not lose the
+changes you made. Changes can be reviewed by looking at diffs:
+
+.. code-block:: console
+
+ $ git status
+ $ git diff
+
+The next step is to rewrite the history by adding files and creating commits:
+
+.. code-block:: console
+
+ $ git add <files_to_be_part_of_commit>
+ $ git commit --message <descriptive_message>
+
+After all changed files are committed, you can push the branch to your fork
+and create a PR:
+
+.. code-block:: console
+
+ $ git push origin --set-upstream
diff --git a/lib/spack/docs/developer_guide.rst b/lib/spack/docs/developer_guide.rst
index 0b618aa683..5ddbaf2478 100644
--- a/lib/spack/docs/developer_guide.rst
+++ b/lib/spack/docs/developer_guide.rst
@@ -1,7 +1,8 @@
.. _developer_guide:
+===============
Developer Guide
-=====================
+===============
This guide is intended for people who want to work on Spack itself.
If you just want to develop packages, see the :ref:`packaging-guide`.
@@ -11,17 +12,18 @@ It is assumed that you've read the :ref:`basic-usage` and
concepts discussed there. If you're not, we recommend reading those
first.
+--------
Overview
------------------------
+--------
Spack is designed with three separate roles in mind:
- #. **Users**, who need to install software *without* knowing all the
- details about how it is built.
- #. **Packagers** who know how a particular software package is
- built and encode this information in package files.
- #. **Developers** who work on Spack, add new features, and try to
- make the jobs of packagers and users easier.
+#. **Users**, who need to install software *without* knowing all the
+ details about how it is built.
+#. **Packagers** who know how a particular software package is
+ built and encode this information in package files.
+#. **Developers** who work on Spack, add new features, and try to
+ make the jobs of packagers and users easier.
Users could be end users installing software in their home directory,
or administrators installing software to a shared directory on a
@@ -41,9 +43,9 @@ specification.
This gets us to the two key concepts in Spack's software design:
- #. **Specs**: expressions for describing builds of software, and
- #. **Packages**: Python modules that build software according to a
- spec.
+#. **Specs**: expressions for describing builds of software, and
+#. **Packages**: Python modules that build software according to a
+ spec.
A package is a template for building particular software, and a spec
as a descriptor for one or more instances of that template. Users
@@ -63,74 +65,75 @@ building the software off to the package object. The rest of this
document describes all the pieces that come together to make that
happen.
-
+-------------------
Directory Structure
--------------------------
+-------------------
So that you can familiarize yourself with the project, we'll start
-with a high level view of Spack's directory structure::
+with a high level view of Spack's directory structure:
+
+.. code-block:: none
- spack/ <- installation root
- bin/
- spack <- main spack executable
+ spack/ <- installation root
+ bin/
+ spack <- main spack executable
- etc/
- spack/ <- Spack config files.
- Can be overridden by files in ~/.spack.
+ etc/
+ spack/ <- Spack config files.
+ Can be overridden by files in ~/.spack.
- var/
- spack/ <- build & stage directories
- repos/ <- contains package repositories
- builtin/ <- pkg repository that comes with Spack
- repo.yaml <- descriptor for the builtin repository
- packages/ <- directories under here contain packages
+ var/
+ spack/ <- build & stage directories
+ repos/ <- contains package repositories
+ builtin/ <- pkg repository that comes with Spack
+ repo.yaml <- descriptor for the builtin repository
+ packages/ <- directories under here contain packages
+ cache/ <- saves resources downloaded during installs
- opt/
- spack/ <- packages are installed here
+ opt/
+ spack/ <- packages are installed here
- lib/
- spack/
- docs/ <- source for this documentation
- env/ <- compiler wrappers for build environment
+ lib/
+ spack/
+ docs/ <- source for this documentation
+ env/ <- compiler wrappers for build environment
- external/ <- external libs included in Spack distro
- llnl/ <- some general-use libraries
+ external/ <- external libs included in Spack distro
+ llnl/ <- some general-use libraries
- spack/ <- spack module; contains Python code
- cmd/ <- each file in here is a spack subcommand
- compilers/ <- compiler description files
- test/ <- unit test modules
- util/ <- common code
+ spack/ <- spack module; contains Python code
+ cmd/ <- each file in here is a spack subcommand
+ compilers/ <- compiler description files
+ test/ <- unit test modules
+ util/ <- common code
Spack is designed so that it could live within a `standard UNIX
directory hierarchy <http://linux.die.net/man/7/hier>`_, so ``lib``,
``var``, and ``opt`` all contain a ``spack`` subdirectory in case
Spack is installed alongside other software. Most of the interesting
-parts of Spack live in ``lib/spack``. Files under ``var`` are created
-as needed, so there is no ``var`` directory when you initially clone
-Spack from the repository.
+parts of Spack live in ``lib/spack``.
Spack has *one* directory layout and there is no install process.
-version and the source code. Most Python programs don't look like
-this (they use distutils, ``setup.py``, etc.) but we wanted to make
-Spack *very* easy to use. The simple layout spares users from the
-need to install Spack into a Python environment. Many users don't
-have write access to a Python installation, and installing an entire
-new instance of Python to bootstrap Spack would be very complicated.
+Most Python programs don't look like this (they use distutils, ``setup.py``,
+etc.) but we wanted to make Spack *very* easy to use. The simple layout
+spares users from the need to install Spack into a Python environment.
+Many users don't have write access to a Python installation, and installing
+an entire new instance of Python to bootstrap Spack would be very complicated.
Users should not have to install install a big, complicated package to
use the thing that's supposed to spare them from the details of big,
complicated packages. The end result is that Spack works out of the
box: clone it and add ``bin`` to your PATH and you're ready to go.
-
+--------------
Code Structure
--------------------------
+--------------
This section gives an overview of the various Python modules in Spack,
grouped by functionality.
+^^^^^^^^^^^^^^^^^^^^^^^
Package-related modules
-~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+^^^^^^^^^^^^^^^^^^^^^^^
:mod:`spack.package`
Contains the :class:`Package <spack.package.Package>` class, which
@@ -157,9 +160,9 @@ Package-related modules
decorator, which allows :ref:`multimethods <multimethods>` in
packages.
-
+^^^^^^^^^^^^^^^^^^^^
Spec-related modules
-~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+^^^^^^^^^^^^^^^^^^^^
:mod:`spack.spec`
Contains :class:`Spec <spack.spec.Spec>` and :class:`SpecParser
@@ -207,9 +210,9 @@ Spec-related modules
Not yet implemented. Should eventually have architecture
descriptions for cross-compiling.
-
+^^^^^^^^^^^^^^^^^
Build environment
-~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+^^^^^^^^^^^^^^^^^
:mod:`spack.stage`
Handles creating temporary directories for builds.
@@ -223,15 +226,17 @@ Build environment
Create more implementations of this to change the hierarchy and
naming scheme in ``$spack_prefix/opt``
+^^^^^^^^^^^^^^^^^
Spack Subcommands
-~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+^^^^^^^^^^^^^^^^^
:mod:`spack.cmd`
Each module in this package implements a Spack subcommand. See
:ref:`writing commands <writing-commands>` for details.
+^^^^^^^^^^
Unit tests
-~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+^^^^^^^^^^
:mod:`spack.test`
Implements Spack's test suite. Add a module and put its name in
@@ -241,78 +246,100 @@ Unit tests
This is a fake package hierarchy used to mock up packages for
Spack's test suite.
+^^^^^^^^^^^^^
Other Modules
-~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
-
-:mod:`spack.globals`
- Includes global settings for Spack. the default policy classes for
- things like :ref:`temporary space <temp-space>` and
- :ref:`concretization <concretization-policies>`.
-
-:mod:`spack.tty`
- Basic output functions for all of the messages Spack writes to the
- terminal.
-
-:mod:`spack.color`
- Implements a color formatting syntax used by ``spack.tty``.
+^^^^^^^^^^^^^
:mod:`spack.url`
URL parsing, for deducing names and versions of packages from
tarball URLs.
-:mod:`spack.util`
- In this package are a number of utility modules for the rest of
- Spack.
-
:mod:`spack.error`
:class:`SpackError <spack.error.SpackError>`, the base class for
Spack's exception hierarchy.
+:mod:`llnl.util.tty`
+ Basic output functions for all of the messages Spack writes to the
+ terminal.
-Spec objects
--------------------------
-
-Package objects
--------------------------
+:mod:`llnl.util.tty.color`
+ Implements a color formatting syntax used by ``spack.tty``.
+:mod:`llnl.util`
+ In this package are a number of utility modules for the rest of
+ Spack.
-Most spack commands
-look something like this:
+------------
+Spec objects
+------------
- #. Parse an abstract spec (or specs) from the command line,
- #. *Normalize* the spec based on information in package files,
- #. *Concretize* the spec according to some customizable policies,
- #. Instantiate a package based on the spec, and
- #. Call methods (e.g., ``install()``) on the package object.
+---------------
+Package objects
+---------------
+Most spack commands look something like this:
+#. Parse an abstract spec (or specs) from the command line,
+#. *Normalize* the spec based on information in package files,
+#. *Concretize* the spec according to some customizable policies,
+#. Instantiate a package based on the spec, and
+#. Call methods (e.g., ``install()``) on the package object.
The information in Package files is used at all stages in this
process.
+Conceptually, packages are overloaded. They contain:
-Conceptually, packages are overloaded. They contain
-
+-------------
Stage objects
--------------------------
+-------------
.. _writing-commands:
+----------------
Writing commands
--------------------------
+----------------
+----------
Unit tests
--------------------------
+----------
+------------
Unit testing
--------------------------
-
+------------
+------------------
Developer commands
--------------------------
+------------------
+^^^^^^^^^^^^^
``spack doc``
-~~~~~~~~~~~~~~~~~
+^^^^^^^^^^^^^
+^^^^^^^^^^^^^^
``spack test``
-~~~~~~~~~~~~~~~~~
+^^^^^^^^^^^^^^
+
+---------
+Profiling
+---------
+
+Spack has some limited built-in support for profiling, and can report
+statistics using standard Python timing tools. To use this feature,
+supply ``--profile`` to Spack on the command line, before any subcommands.
+
+.. _spack-p:
+
+^^^^^^^^^^^^^^^^^^^
+``spack --profile``
+^^^^^^^^^^^^^^^^^^^
+
+``spack --profile`` output looks like this:
+
+.. command-output:: spack --profile graph dyninst
+ :ellipsis: 25
+
+The bottom of the output shows the top most time consuming functions,
+slowest on top. The profiling support is from Python's built-in tool,
+`cProfile
+<https://docs.python.org/2/library/profile.html#module-cProfile>`_.
diff --git a/lib/spack/docs/exts/sphinxcontrib/__init__.py b/lib/spack/docs/exts/sphinxcontrib/__init__.py
index 298856746c..591cf0e16e 100644
--- a/lib/spack/docs/exts/sphinxcontrib/__init__.py
+++ b/lib/spack/docs/exts/sphinxcontrib/__init__.py
@@ -1,27 +1,3 @@
-##############################################################################
-# Copyright (c) 2013, Lawrence Livermore National Security, LLC.
-# Produced at the Lawrence Livermore National Laboratory.
-#
-# This file is part of Spack.
-# Written by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
-# LLNL-CODE-647188
-#
-# For details, see https://github.com/llnl/spack
-# Please also see the LICENSE file for our notice and the LGPL.
-#
-# This program is free software; you can redistribute it and/or modify
-# it under the terms of the GNU General Public License (as published by
-# the Free Software Foundation) version 2.1 dated February 1999.
-#
-# This program is distributed in the hope that it will be useful, but
-# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
-# conditions of the GNU General Public License for more details.
-#
-# You should have received a copy of the GNU Lesser General Public License
-# along with this program; if not, write to the Free Software Foundation,
-# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
-##############################################################################
# -*- coding: utf-8 -*-
"""
sphinxcontrib
diff --git a/lib/spack/docs/exts/sphinxcontrib/programoutput.py b/lib/spack/docs/exts/sphinxcontrib/programoutput.py
index f0fa045c86..3f6a4f1595 100644
--- a/lib/spack/docs/exts/sphinxcontrib/programoutput.py
+++ b/lib/spack/docs/exts/sphinxcontrib/programoutput.py
@@ -1,27 +1,3 @@
-##############################################################################
-# Copyright (c) 2013, Lawrence Livermore National Security, LLC.
-# Produced at the Lawrence Livermore National Laboratory.
-#
-# This file is part of Spack.
-# Written by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
-# LLNL-CODE-647188
-#
-# For details, see https://github.com/llnl/spack
-# Please also see the LICENSE file for our notice and the LGPL.
-#
-# This program is free software; you can redistribute it and/or modify
-# it under the terms of the GNU General Public License (as published by
-# the Free Software Foundation) version 2.1 dated February 1999.
-#
-# This program is distributed in the hope that it will be useful, but
-# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
-# conditions of the GNU General Public License for more details.
-#
-# You should have received a copy of the GNU Lesser General Public License
-# along with this program; if not, write to the Free Software Foundation,
-# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
-##############################################################################
# -*- coding: utf-8 -*-
# Copyright (c) 2010, 2011, 2012, Sebastian Wiesner <lunaryorn@gmail.com>
# All rights reserved.
diff --git a/lib/spack/docs/features.rst b/lib/spack/docs/features.rst
index 0998ba8da4..8d7c1ec0cd 100644
--- a/lib/spack/docs/features.rst
+++ b/lib/spack/docs/features.rst
@@ -1,29 +1,32 @@
-Feature overview
-==================
+================
+Feature Overview
+================
This is a high-level overview of features that make Spack different
from other `package managers
<http://en.wikipedia.org/wiki/Package_management_system>`_ and `port
systems <http://en.wikipedia.org/wiki/Ports_collection>`_.
+---------------------------
Simple package installation
-----------------------------
+---------------------------
Installing the default version of a package is simple. This will install
the latest version of the ``mpileaks`` package and all of its dependencies:
-.. code-block:: sh
+.. code-block:: console
$ spack install mpileaks
+--------------------------------
Custom versions & configurations
--------------------------------------------
+--------------------------------
Spack allows installation to be customized. Users can specify the
version, build compiler, compile-time options, and cross-compile
platform, all on the command line.
-.. code-block:: sh
+.. code-block:: console
# Install a particular version by appending @
$ spack install mpileaks@1.1.2
@@ -31,46 +34,55 @@ platform, all on the command line.
# Specify a compiler (and its version), with %
$ spack install mpileaks@1.1.2 %gcc@4.7.3
- # Add special compile-time options with +
+ # Add special compile-time options by name
+ $ spack install mpileaks@1.1.2 %gcc@4.7.3 debug=True
+
+ # Add special boolean compile-time options with +
$ spack install mpileaks@1.1.2 %gcc@4.7.3 +debug
- # Cross-compile for a different architecture with =
- $ spack install mpileaks@1.1.2 =bgqos_0
+ # Add compiler flags using the conventional names
+ $ spack install mpileaks@1.1.2 %gcc@4.7.3 cppflags="-O3 -floop-block"
-Users can specify as many or few options as they care about. Spack
-will fill in the unspecified values with sensible defaults.
+ # Cross-compile for a different architecture with arch=
+ $ spack install mpileaks@1.1.2 arch=bgqos_0
+Users can specify as many or few options as they care about. Spack
+will fill in the unspecified values with sensible defaults. The two listed
+syntaxes for variants are identical when the value is boolean.
+----------------------
Customize dependencies
--------------------------------------
+----------------------
Spack allows *dependencies* of a particular installation to be
customized extensively. Suppose that ``mpileaks`` depends indirectly
on ``libelf`` and ``libdwarf``. Using ``^``, users can add custom
configurations for the dependencies:
-.. code-block:: sh
+.. code-block:: console
# Install mpileaks and link it with specific versions of libelf and libdwarf
$ spack install mpileaks@1.1.2 %gcc@4.7.3 +debug ^libelf@0.8.12 ^libdwarf@20130729+debug
-
+------------------------
Non-destructive installs
--------------------------------------
+------------------------
Spack installs every unique package/dependency configuration into its
own prefix, so new installs will not break existing ones.
+-------------------------------
Packages can peacefully coexist
--------------------------------------
+-------------------------------
Spack avoids library misconfiguration by using ``RPATH`` to link
dependencies. When a user links a library or runs a program, it is
tied to the dependencies it was built with, so there is no need to
manipulate ``LD_LIBRARY_PATH`` at runtime.
+-------------------------
Creating packages is easy
--------------------------------------
+-------------------------
To create a new packages, all Spack needs is a URL for the source
archive. The ``spack create`` command will create a boilerplate
@@ -79,7 +91,7 @@ in pure Python.
For example, this command:
-.. code-block:: sh
+.. code-block:: console
$ spack create http://www.mr511.de/software/libelf-0.8.13.tar.gz
@@ -89,16 +101,26 @@ creates a simple python file:
from spack import *
+
class Libelf(Package):
- homepage = "http://www.example.com/"
+ """FIXME: Put a proper description of your package here."""
+
+ # FIXME: Add a proper url for your package's homepage here.
+ homepage = "http://www.example.com"
url = "http://www.mr511.de/software/libelf-0.8.13.tar.gz"
version('0.8.13', '4136d7b4c04df68b686570afa26988ac')
- def install(self, prefix):
- configure("--prefix=%s" % prefix)
+ # FIXME: Add dependencies if required.
+ # depends_on('foo')
+
+ def install(self, spec, prefix):
+ # FIXME: Modify the configure line to suit your build system here.
+ configure('--prefix={0}'.format(prefix))
+
+ # FIXME: Add logic to build and install here.
make()
- make("install")
+ make('install')
It doesn't take much python coding to get from there to a working
package:
diff --git a/lib/spack/docs/getting_started.rst b/lib/spack/docs/getting_started.rst
index 2c5b68ea65..efc1965ce9 100644
--- a/lib/spack/docs/getting_started.rst
+++ b/lib/spack/docs/getting_started.rst
@@ -1,55 +1,1137 @@
+.. _getting_started:
+
+===============
Getting Started
-====================
+===============
+
+-------------
+Prerequisites
+-------------
+
+Spack has the following minimum requirements, which must be installed
+before Spack is run:
+
+1. Python 2.6 or 2.7
+2. A C/C++ compiler
+3. The ``git`` and ``curl`` commands.
+
+These requirements can be easily installed on most modern Linux systems;
+on Macintosh, XCode is required. Spack is designed to run on HPC
+platforms like Cray and BlueGene/Q. Not all packages should be expected
+to work on all platforms. A build matrix showing which packages are
+working on which systems is planned but not yet available.
-Download
---------------------
+------------
+Installation
+------------
-Getting spack is easy. You can clone it from the `github repository
+Getting Spack is easy. You can clone it from the `github repository
<https://github.com/llnl/spack>`_ using this command:
-.. code-block:: sh
+.. code-block:: console
$ git clone https://github.com/llnl/spack.git
-This will create a directory called ``spack``. We'll assume that the
-full path to this directory is in the ``SPACK_ROOT`` environment
-variable. Add ``$SPACK_ROOT/bin`` to your path and you're ready to
-go:
+This will create a directory called ``spack``.
+
+^^^^^^^^^^^^^^^^^^^^^^^^
+Add Spack to the Shell
+^^^^^^^^^^^^^^^^^^^^^^^^
+
+We'll assume that the full path to your downloaded Spack directory is
+in the ``SPACK_ROOT`` environment variable. Add ``$SPACK_ROOT/bin``
+to your path and you're ready to go:
-.. code-block:: sh
+.. code-block:: console
$ export PATH=$SPACK_ROOT/bin:$PATH
$ spack install libelf
-For a richer experience, use Spack's `shell support
-<http://software.llnl.gov/spack/basic_usage.html#environment-modules>`_:
+For a richer experience, use Spack's shell support:
-.. code-block:: sh
+.. code-block:: console
# For bash users
+ $ export SPACK_ROOT=/path/to/spack
$ . $SPACK_ROOT/share/spack/setup-env.sh
# For tcsh or csh users (note you must set SPACK_ROOT)
$ setenv SPACK_ROOT /path/to/spack
$ source $SPACK_ROOT/share/spack/setup-env.csh
-This automatically adds Spack to your ``PATH``.
+This automatically adds Spack to your ``PATH`` and allows the ``spack``
+command to :ref:`load environment modules <shell-support>` and execute
+:ref:`useful packaging commands <packaging-shell-support>`.
-Installation
---------------------
+^^^^^^^^^^^^^^^^^
+Clean Environment
+^^^^^^^^^^^^^^^^^
+
+Many packages' installs can be broken by changing environment
+variables. For example, a package might pick up the wrong build-time
+dependencies (most of them not specified) depending on the setting of
+``PATH``. ``GCC`` seems to be particularly vulnerable to these issues.
-You don't need to install Spack; it's ready to run as soon as you
-clone it from git.
+Therefore, it is recommended that Spack users run with a *clean
+environment*, especially for ``PATH``. Only software that comes with
+the system, or that you know you wish to use with Spack, should be
+included. This procedure will avoid many strange build errors.
-You may want to run it out of a prefix other than the git repository
+
+^^^^^^^^^^^^^^^^^^
+Check Installation
+^^^^^^^^^^^^^^^^^^
+
+With Spack installed, you should be able to run some basic Spack
+commands. For example:
+
+.. code-block:: console
+
+ $ spack spec netcdf
+ ...
+ netcdf@4.4.1%gcc@5.3.0~hdf4+mpi arch=linux-SuSE11-x86_64
+ ^curl@7.50.1%gcc@5.3.0 arch=linux-SuSE11-x86_64
+ ^openssl@system%gcc@5.3.0 arch=linux-SuSE11-x86_64
+ ^zlib@1.2.8%gcc@5.3.0 arch=linux-SuSE11-x86_64
+ ^hdf5@1.10.0-patch1%gcc@5.3.0+cxx~debug+fortran+mpi+shared~szip~threadsafe arch=linux-SuSE11-x86_64
+ ^openmpi@1.10.1%gcc@5.3.0~mxm~pmi~psm~psm2~slurm~sqlite3~thread_multiple~tm+verbs+vt arch=linux-SuSE11-x86_64
+ ^m4@1.4.17%gcc@5.3.0+sigsegv arch=linux-SuSE11-x86_64
+ ^libsigsegv@2.10%gcc@5.3.0 arch=linux-SuSE11-x86_64
+
+^^^^^^^^^^^^^^^^^^^^^^^^^^
+Optional: Alternate Prefix
+^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+You may want to run Spack out of a prefix other than the git repository
you cloned. The ``spack bootstrap`` command provides this
functionality. To install spack in a new directory, simply type:
-.. code-block:: sh
+.. code-block:: console
- $ spack bootstrap /my/favorite/prefix
+ $ spack bootstrap /my/favorite/prefix
This will install a new spack script in ``/my/favorite/prefix/bin``,
which you can use just like you would the regular spack script. Each
copy of spack installs packages into its own ``$PREFIX/opt``
directory.
+
+
+^^^^^^^^^^
+Next Steps
+^^^^^^^^^^
+
+In theory, Spack doesn't need any additional installation; just
+download and run! But in real life, additional steps are usually
+required before Spack can work in a practical sense. Read on...
+
+
+.. _compiler-config:
+
+----------------------
+Compiler configuration
+----------------------
+
+Spack has the ability to build packages with multiple compilers and
+compiler versions. Spack searches for compilers on your machine
+automatically the first time it is run. It does this by inspecting
+your ``PATH``.
+
+.. _cmd-spack-compilers:
+
+^^^^^^^^^^^^^^^^^^^
+``spack compilers``
+^^^^^^^^^^^^^^^^^^^
+
+You can see which compilers spack has found by running ``spack
+compilers`` or ``spack compiler list``:
+
+.. code-block:: console
+
+ $ spack compilers
+ ==> Available compilers
+ -- gcc ---------------------------------------------------------
+ gcc@4.9.0 gcc@4.8.0 gcc@4.7.0 gcc@4.6.2 gcc@4.4.7
+ gcc@4.8.2 gcc@4.7.1 gcc@4.6.3 gcc@4.6.1 gcc@4.1.2
+ -- intel -------------------------------------------------------
+ intel@15.0.0 intel@14.0.0 intel@13.0.0 intel@12.1.0 intel@10.0
+ intel@14.0.3 intel@13.1.1 intel@12.1.5 intel@12.0.4 intel@9.1
+ intel@14.0.2 intel@13.1.0 intel@12.1.3 intel@11.1
+ intel@14.0.1 intel@13.0.1 intel@12.1.2 intel@10.1
+ -- clang -------------------------------------------------------
+ clang@3.4 clang@3.3 clang@3.2 clang@3.1
+ -- pgi ---------------------------------------------------------
+ pgi@14.3-0 pgi@13.2-0 pgi@12.1-0 pgi@10.9-0 pgi@8.0-1
+ pgi@13.10-0 pgi@13.1-1 pgi@11.10-0 pgi@10.2-0 pgi@7.1-3
+ pgi@13.6-0 pgi@12.8-0 pgi@11.1-0 pgi@9.0-4 pgi@7.0-6
+
+Any of these compilers can be used to build Spack packages. More on
+how this is done is in :ref:`sec-specs`.
+
+.. _spack-compiler-add:
+
+^^^^^^^^^^^^^^^^^^^^^^
+``spack compiler add``
+^^^^^^^^^^^^^^^^^^^^^^
+
+An alias for ``spack compiler find``.
+
+.. _spack-compiler-find:
+
+^^^^^^^^^^^^^^^^^^^^^^^
+``spack compiler find``
+^^^^^^^^^^^^^^^^^^^^^^^
+
+If you do not see a compiler in this list, but you want to use it with
+Spack, you can simply run ``spack compiler find`` with the path to
+where the compiler is installed. For example:
+
+.. code-block:: console
+
+ $ spack compiler find /usr/local/tools/ic-13.0.079
+ ==> Added 1 new compiler to ~/.spack/compilers.yaml
+ intel@13.0.079
+
+Or you can run ``spack compiler find`` with no arguments to force
+auto-detection. This is useful if you do not know where compilers are
+installed, but you know that new compilers have been added to your
+``PATH``. For example, you might load a module, like this:
+
+.. code-block:: console
+
+ $ module load gcc-4.9.0
+ $ spack compiler find
+ ==> Added 1 new compiler to ~/.spack/compilers.yaml
+ gcc@4.9.0
+
+This loads the environment module for gcc-4.9.0 to add it to
+``PATH``, and then it adds the compiler to Spack.
+
+.. _spack-compiler-info:
+
+^^^^^^^^^^^^^^^^^^^^^^^
+``spack compiler info``
+^^^^^^^^^^^^^^^^^^^^^^^
+
+If you want to see specifics on a particular compiler, you can run
+``spack compiler info`` on it:
+
+.. code-block:: console
+
+ $ spack compiler info intel@15
+ intel@15.0.0:
+ paths:
+ cc = /usr/local/bin/icc-15.0.090
+ cxx = /usr/local/bin/icpc-15.0.090
+ f77 = /usr/local/bin/ifort-15.0.090
+ fc = /usr/local/bin/ifort-15.0.090
+ modules = []
+ operating system = centos6
+ ...
+
+This shows which C, C++, and Fortran compilers were detected by Spack.
+Notice also that we didn't have to be too specific about the
+version. We just said ``intel@15``, and information about the only
+matching Intel compiler was displayed.
+
+^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+Manual compiler configuration
+^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+If auto-detection fails, you can manually configure a compiler by
+editing your ``~/.spack/compilers.yaml`` file. You can do this by running
+``spack config edit compilers``, which will open the file in your ``$EDITOR``.
+
+Each compiler configuration in the file looks like this:
+
+.. code-block:: yaml
+
+ compilers:
+ - compiler:
+ modules: []
+ operating_system: centos6
+ paths:
+ cc: /usr/local/bin/icc-15.0.024-beta
+ cxx: /usr/local/bin/icpc-15.0.024-beta
+ f77: /usr/local/bin/ifort-15.0.024-beta
+ fc: /usr/local/bin/ifort-15.0.024-beta
+ spec: intel@15.0.0:
+
+For compilers that do not support Fortran (like ``clang``), put
+``None`` for ``f77`` and ``fc``:
+
+.. code-block:: yaml
+
+ compilers:
+ - compiler:
+ modules: []
+ operating_system: centos6
+ paths:
+ cc: /usr/bin/clang
+ cxx: /usr/bin/clang++
+ f77: None
+ fc: None
+ spec: clang@3.3svn
+
+Once you save the file, the configured compilers will show up in the
+list displayed by ``spack compilers``.
+
+You can also add compiler flags to manually configured compilers. These
+flags should be specified in the ``flags`` section of the compiler
+specification. The valid flags are ``cflags``, ``cxxflags``, ``fflags``,
+``cppflags``, ``ldflags``, and ``ldlibs``. For example:
+
+.. code-block:: yaml
+
+ compilers:
+ - compiler:
+ modules: []
+ operating_system: centos6
+ paths:
+ cc: /usr/bin/gcc
+ cxx: /usr/bin/g++
+ f77: /usr/bin/gfortran
+ fc: /usr/bin/gfortran
+ flags:
+ cflags: -O3 -fPIC
+ cxxflags: -O3 -fPIC
+ cppflags: -O3 -fPIC
+ spec: gcc@4.7.2
+
+These flags will be treated by spack as if they were entered from
+the command line each time this compiler is used. The compiler wrappers
+then inject those flags into the compiler command. Compiler flags
+entered from the command line will be discussed in more detail in the
+following section.
+
+^^^^^^^^^^^^^^^^^^^^^^^
+Build Your Own Compiler
+^^^^^^^^^^^^^^^^^^^^^^^
+
+If you are particular about which compiler/version you use, you might
+wish to have Spack build it for you. For example:
+
+.. code-block:: console
+
+ $ spack install gcc@4.9.3
+
+Once that has finished, you will need to add it to your
+``compilers.yaml`` file. You can then set Spack to use it by default
+by adding the following to your ``packages.yaml`` file:
+
+.. code-block:: yaml
+
+ packages:
+ all:
+ compiler: [gcc@4.9.3]
+
+
+.. tip::
+
+ If you are building your own compiler, some users prefer to have a
+ Spack instance just for that. For example, create a new Spack in
+ ``~/spack-tools`` and then run ``~/spack-tools/bin/spack install
+ gcc@4.9.3``. Once the compiler is built, don't build anything
+ more in that Spack instance; instead, create a new "real" Spack
+ instance, configure Spack to use the compiler you've just built,
+ and then build your application software in the new Spack
+ instance. Following this tip makes it easy to delete all your
+ Spack packages *except* the compiler.
+
+
+^^^^^^^^^^^^^^^^^^^^^^^^^^^
+Compilers Requiring Modules
+^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+Many installed compilers will work regardless of the environment they
+are called with. However, some installed compilers require
+``$LD_LIBRARY_PATH`` or other environment variables to be set in order
+to run; this is typical for Intel and other proprietary compilers.
+
+In such a case, you should tell Spack which module(s) to load in order
+to run the chosen compiler (If the compiler does not come with a
+module file, you might consider making one by hand). Spack will load
+this module into the environment ONLY when the compiler is run, and
+NOT in general for a package's ``install()`` method. See, for
+example, this ``compilers.yaml`` file:
+
+.. code-block:: yaml
+
+ compilers:
+ - compiler:
+ modules: [other/comp/gcc-5.3-sp3]
+ operating_system: SuSE11
+ paths:
+ cc: /usr/local/other/SLES11.3/gcc/5.3.0/bin/gcc
+ cxx: /usr/local/other/SLES11.3/gcc/5.3.0/bin/g++
+ f77: /usr/local/other/SLES11.3/gcc/5.3.0/bin/gfortran
+ fc: /usr/local/other/SLES11.3/gcc/5.3.0/bin/gfortran
+ spec: gcc@5.3.0
+
+Some compilers require special environment settings to be loaded not just
+to run, but also to execute the code they build, breaking packages that
+need to execute code they just compiled. If it's not possible or
+practical to use a better compiler, you'll need to ensure that
+environment settings are preserved for compilers like this (i.e., you'll
+need to load the module or source the compiler's shell script).
+
+By default, Spack tries to ensure that builds are reproducible by
+cleaning the environment before building. If this interferes with your
+compiler settings, you CAN use ``spack install --dirty`` as a workaround.
+Note that this MAY interfere with package builds.
+
+.. _licensed-compilers:
+
+^^^^^^^^^^^^^^^^^^
+Licensed Compilers
+^^^^^^^^^^^^^^^^^^
+
+Some proprietary compilers require licensing to use. If you need to
+use a licensed compiler (eg, PGI), the process is similar to a mix of
+build your own, plus modules:
+
+#. Create a Spack package (if it doesn't exist already) to install
+ your compiler. Follow instructions on installing :ref:`license`.
+
+#. Once the compiler is installed, you should be able to test it by
+ using Spack to load the module it just created, and running simple
+ builds (eg: ``cc helloWorld.c && ./a.out``)
+
+#. Add the newly-installed compiler to ``compilers.yaml`` as shown
+ above.
+
+.. _mixed-toolchains:
+
+^^^^^^^^^^^^^^^^
+Mixed Toolchains
+^^^^^^^^^^^^^^^^
+
+Modern compilers typically come with related compilers for C, C++ and
+Fortran bundled together. When possible, results are best if the same
+compiler is used for all languages.
+
+In some cases, this is not possible. For example, starting with macOS El
+Capitan (10.11), many packages no longer build with GCC, but XCode
+provides no Fortran compilers. The user is therefore forced to use a
+mixed toolchain: XCode-provided Clang for C/C++ and GNU ``gfortran`` for
+Fortran.
+
+#. You need to make sure that command-line tools are installed. To that
+ end run ``$ xcode-select --install``.
+
+#. Run ``$ spack compiler find`` to locate Clang.
+
+#. There are different ways to get ``gfortran`` on macOS. For example, you can
+ install GCC with Spack (``$ spack install gcc``) or with Homebrew
+ (``$ brew install gcc``).
+
+#. The only thing left to do is to edit ``~/.spack/compilers.yaml`` to provide
+ the path to ``gfortran``:
+
+ .. code-block:: yaml
+
+ compilers:
+ darwin-x86_64:
+ clang@7.3.0-apple:
+ cc: /usr/bin/clang
+ cxx: /usr/bin/clang++
+ f77: /path/to/bin/gfortran
+ fc: /path/to/bin/gfortran
+
+ If you used Spack to install GCC, you can get the installation prefix by
+ ``$ spack location -i gcc`` (this will only work if you have a single version
+ of GCC installed). Whereas for Homebrew, GCC is installed in
+ ``/usr/local/Cellar/gcc/x.y.z``.
+
+^^^^^^^^^^^^^^^^^^^^^
+Compiler Verification
+^^^^^^^^^^^^^^^^^^^^^
+
+You can verify that your compilers are configured properly by installing a
+simple package. For example:
+
+.. code-block:: console
+
+ $ spack install zlib%gcc@5.3.0
+
+--------------------------------------
+Vendor-Specific Compiler Configuration
+--------------------------------------
+
+With Spack, things usually "just work" with GCC. Not so for other
+compilers. This section provides details on how to get specific
+compilers working.
+
+^^^^^^^^^^^^^^^
+Intel Compilers
+^^^^^^^^^^^^^^^
+
+Intel compilers are unusual because a single Intel compiler version
+can emulate multiple GCC versions. In order to provide this
+functionality, the Intel compiler needs GCC to be installed.
+Therefore, the following steps are necessary to successfully use Intel
+compilers:
+
+#. Install a version of GCC that implements the desired language
+ features (``spack install gcc``).
+
+#. Tell the Intel compiler how to find that desired GCC. This may be
+ done in one of two ways:
+
+ "By default, the compiler determines which version of ``gcc`` or ``g++``
+ you have installed from the ``PATH`` environment variable.
+
+ If you want use a version of ``gcc`` or ``g++`` other than the default
+ version on your system, you need to use either the ``-gcc-name``
+ or ``-gxx-name`` compiler option to specify the path to the version of
+ ``gcc`` or ``g++`` that you want to use."
+
+ -- `Intel Reference Guide <https://software.intel.com/en-us/node/522750>`_
+
+Intel compilers may therefore be configured in one of two ways with
+Spack: using modules, or using compiler flags.
+
+""""""""""""""""""""""""""
+Configuration with Modules
+""""""""""""""""""""""""""
+
+One can control which GCC is seen by the Intel compiler with modules.
+A module must be loaded both for the Intel Compiler (so it will run)
+and GCC (so the compiler can find the intended GCC). The following
+configuration in ``compilers.yaml`` illustrates this technique:
+
+.. code-block:: yaml
+
+ compilers:
+ - compiler:
+ modules: [gcc-4.9.3, intel-15.0.24]
+ operating_system: centos7
+ paths:
+ cc: /opt/intel-15.0.24/bin/icc-15.0.24-beta
+ cxx: /opt/intel-15.0.24/bin/icpc-15.0.24-beta
+ f77: /opt/intel-15.0.24/bin/ifort-15.0.24-beta
+ fc: /opt/intel-15.0.24/bin/ifort-15.0.24-beta
+ spec: intel@15.0.24.4.9.3
+
+
+.. note::
+
+ The version number on the Intel compiler is a combination of
+ the "native" Intel version number and the GNU compiler it is
+ targeting.
+
+""""""""""""""""""""""""""
+Command Line Configuration
+""""""""""""""""""""""""""
+
+One can also control which GCC is seen by the Intel compiler by adding
+flags to the ``icc`` command:
+
+#. Identify the location of the compiler you just installed:
+
+ .. code-block:: console
+
+ $ spack location --install-dir gcc
+ ~/spack/opt/spack/linux-centos7-x86_64/gcc-4.9.3-iy4rw...
+
+#. Set up ``compilers.yaml``, for example:
+
+ .. code-block:: yaml
+
+ compilers:
+ - compiler:
+ modules: [intel-15.0.24]
+ operating_system: centos7
+ paths:
+ cc: /opt/intel-15.0.24/bin/icc-15.0.24-beta
+ cflags: -gcc-name ~/spack/opt/spack/linux-centos7-x86_64/gcc-4.9.3-iy4rw.../bin/gcc
+ cxx: /opt/intel-15.0.24/bin/icpc-15.0.24-beta
+ cxxflags: -gxx-name ~/spack/opt/spack/linux-centos7-x86_64/gcc-4.9.3-iy4rw.../bin/g++
+ f77: /opt/intel-15.0.24/bin/ifort-15.0.24-beta
+ fc: /opt/intel-15.0.24/bin/ifort-15.0.24-beta
+ fflags: -gcc-name ~/spack/opt/spack/linux-centos7-x86_64/gcc-4.9.3-iy4rw.../bin/gcc
+ spec: intel@15.0.24.4.9.3
+
+
+^^^
+PGI
+^^^
+
+PGI comes with two sets of compilers for C++ and Fortran,
+distinguishable by their names. "Old" compilers:
+
+.. code-block:: yaml
+
+ cc: /soft/pgi/15.10/linux86-64/15.10/bin/pgcc
+ cxx: /soft/pgi/15.10/linux86-64/15.10/bin/pgCC
+ f77: /soft/pgi/15.10/linux86-64/15.10/bin/pgf77
+ fc: /soft/pgi/15.10/linux86-64/15.10/bin/pgf90
+
+"New" compilers:
+
+.. code-block:: yaml
+
+ cc: /soft/pgi/15.10/linux86-64/15.10/bin/pgcc
+ cxx: /soft/pgi/15.10/linux86-64/15.10/bin/pgc++
+ f77: /soft/pgi/15.10/linux86-64/15.10/bin/pgfortran
+ fc: /soft/pgi/15.10/linux86-64/15.10/bin/pgfortran
+
+Older installations of PGI contains just the old compilers; whereas
+newer installations contain the old and the new. The new compiler is
+considered preferable, as some packages
+(``hdf``) will not build with the old compiler.
+
+When auto-detecting a PGI compiler, there are cases where Spack will
+find the old compilers, when you really want it to find the new
+compilers. It is best to check this ``compilers.yaml``; and if the old
+compilers are being used, change ``pgf77`` and ``pgf90`` to
+``pgfortran``.
+
+Other issues:
+
+* There are reports that some packages will not build with PGI,
+ including ``libpciaccess`` and ``openssl``. A workaround is to
+ build these packages with another compiler and then use them as
+ dependencies for PGI-build packages. For example:
+
+ .. code-block:: console
+
+ $ spack install openmpi%pgi ^libpciaccess%gcc
+
+
+* PGI requires a license to use; see :ref:`licensed-compilers` for more
+ information on installation.
+
+.. note::
+
+ It is believed the problem with HDF 4 is that everything is
+ compiled with the ``F77`` compiler, but at some point some Fortran
+ 90 code slipped in there. So compilers that can handle both FORTRAN
+ 77 and Fortran 90 (``gfortran``, ``pgfortran``, etc) are fine. But
+ compilers specific to one or the other (``pgf77``, ``pgf90``) won't
+ work.
+
+
+^^^
+NAG
+^^^
+
+The Numerical Algorithms Group provides a licensed Fortran compiler. Like Clang,
+this requires you to set up a :ref:`mixed-toolchains`. It is recommended to use
+GCC for your C/C++ compilers.
+
+The NAG Fortran compilers are a bit more strict than other compilers, and many
+packages will fail to install with error messages like:
+
+.. code-block:: none
+
+ Error: mpi_comm_spawn_multiple_f90.f90: Argument 3 to MPI_COMM_SPAWN_MULTIPLE has data type DOUBLE PRECISION in reference from MPI_COMM_SPAWN_MULTIPLEN and CHARACTER in reference from MPI_COMM_SPAWN_MULTIPLEA
+
+In order to convince the NAG compiler not to be too picky about calling conventions,
+you can use ``FFLAGS=-mismatch`` and ``FCFLAGS=-mismatch``. This can be done through
+the command line:
+
+.. code-block:: console
+
+ $ spack install openmpi fflags="-mismatch"
+
+Or it can be set permanently in your ``compilers.yaml``:
+
+.. code-block:: yaml
+
+ - compiler:
+ modules: []
+ operating_system: centos6
+ paths:
+ cc: /soft/spack/opt/spack/linux-x86_64/gcc-5.3.0/gcc-6.1.0-q2zosj3igepi3pjnqt74bwazmptr5gpj/bin/gcc
+ cxx: /soft/spack/opt/spack/linux-x86_64/gcc-5.3.0/gcc-6.1.0-q2zosj3igepi3pjnqt74bwazmptr5gpj/bin/g++
+ f77: /soft/spack/opt/spack/linux-x86_64/gcc-4.4.7/nag-6.1-jt3h5hwt5myezgqguhfsan52zcskqene/bin/nagfor
+ fc: /soft/spack/opt/spack/linux-x86_64/gcc-4.4.7/nag-6.1-jt3h5hwt5myezgqguhfsan52zcskqene/bin/nagfor
+ flags:
+ fflags: -mismatch
+ spec: nag@6.1
+
+---------------
+System Packages
+---------------
+
+Once compilers are configured, one needs to determine which
+pre-installed system packages, if any, to use in builds. This is
+configured in the file ``~/.spack/packages.yaml``. For example, to use
+an OpenMPI installed in /opt/local, one would use:
+
+.. code-block:: yaml
+
+ packages:
+ openmpi:
+ paths:
+ openmpi@1.10.1: /opt/local
+ buildable: False
+
+In general, Spack is easier to use and more reliable if it builds all of
+its own dependencies. However, there are two packages for which one
+commonly needs to use system versions:
+
+^^^
+MPI
+^^^
+
+On supercomputers, sysadmins have already built MPI versions that take
+into account the specifics of that computer's hardware. Unless you
+know how they were built and can choose the correct Spack variants,
+you are unlikely to get a working MPI from Spack. Instead, use an
+appropriate pre-installed MPI.
+
+If you choose a pre-installed MPI, you should consider using the
+pre-installed compiler used to build that MPI; see above on
+``compilers.yaml``.
+
+^^^^^^^
+OpenSSL
+^^^^^^^
+
+The ``openssl`` package underlies much of modern security in a modern
+OS; an attacker can easily "pwn" any computer on which they can modify SSL.
+Therefore, any ``openssl`` used on a system should be created in a
+"trusted environment" --- for example, that of the OS vendor.
+
+OpenSSL is also updated by the OS vendor from time to time, in
+response to security problems discovered in the wider community. It
+is in everyone's best interest to use any newly updated versions as
+soon as they come out. Modern Linux installations have standard
+procedures for security updates without user involvement.
+
+Spack running at user-level is not a trusted environment, nor do Spack
+users generally keep up-to-date on the latest security holes in SSL. For
+these reasons, a Spack-installed OpenSSL should likely not be trusted.
+
+As long as the system-provided SSL works, you can use it instead. One
+can check if it works by trying to download an ``https://``. For
+example:
+
+.. code-block:: console
+
+ $ curl -O https://github.com/ImageMagick/ImageMagick/archive/7.0.2-7.tar.gz
+
+The recommended way to tell Spack to use the system-supplied OpenSSL is
+to add the following to ``packages.yaml``. Note that the ``@system``
+"version" means "I don't care what version it is, just use what is
+there." This is reasonable for OpenSSL, which has a stable API.
+
+
+.. code-block:: yaml
+
+ packages:
+ openssl:
+ paths:
+ openssl@system: /usr
+ version: [system]
+ buildable: False
+
+
+^^^^^^^^^^^^^
+BLAS / LAPACK
+^^^^^^^^^^^^^
+
+The recommended way to use system-supplied BLAS / LAPACK packages is
+to add the following to ``packages.yaml``:
+
+.. code-block:: yaml
+
+ packages:
+ netlib-lapack:
+ paths:
+ netlib-lapack@system: /usr
+ version: [system]
+ buildable: False
+ all:
+ providers:
+ blas: [netlib-lapack]
+ lapack: [netlib-lapack]
+
+.. note::
+
+ The ``@system`` "version" means "I don't care what version it is,
+ just use what is there." Above we pretend that the system-provided
+ Blas/Lapack is ``netlib-lapack`` only because it is the only BLAS / LAPACK
+ provider which use standard names for libraries (as opposed to, for example,
+ `libopenblas.so`).
+
+ Although we specify external package in ``/usr``, Spack is smart enough not
+ to add ``/usr/lib`` to RPATHs, where it could cause unrelated system
+ libraries to be used instead of their Spack equivalents. ``usr/bin`` will be
+ present in PATH, however it will have lower precedence compared to paths
+ from other dependencies. This ensures that binaries in Spack dependencies
+ are preferred over system binaries.
+
+^^^
+Git
+^^^
+
+Some Spack packages use ``git`` to download, which might not work on
+some computers. For example, the following error was
+encountered on a Macintosh during ``spack install julia-master``:
+
+.. code-block:: console
+
+ ==> Trying to clone git repository:
+ https://github.com/JuliaLang/julia.git
+ on branch master
+ Cloning into 'julia'...
+ fatal: unable to access 'https://github.com/JuliaLang/julia.git/':
+ SSL certificate problem: unable to get local issuer certificate
+
+This problem is related to OpenSSL, and in some cases might be solved
+by installing a new version of ``git`` and ``openssl``:
+
+#. Run ``spack install git``
+#. Add the output of ``spack module loads git`` to your ``.bahsrc``.
+
+If this doesn't work, it is also possible to disable checking of SSL
+certificates by using:
+
+.. code-block:: console
+
+ $ spack --insecure install
+
+Using ``--insecure`` makes Spack disable SSL checking when fetching
+from websites and from git.
+
+.. warning::
+
+ This workaround should be used ONLY as a last resort! Wihout SSL
+ certificate verification, spack and git will download from sites you
+ wouldn't normally trust. The code you download and run may then be
+ compromised! While this is not a major issue for archives that will
+ be checksummed, it is especially problematic when downloading from
+ name Git branches or tags, which relies entirely on trusting a
+ certificate for security (no verification).
+
+-----------------------
+Utilities Configuration
+-----------------------
+
+Although Spack does not need installation *per se*, it does rely on
+other packages to be available on its host system. If those packages
+are out of date or missing, then Spack will not work. Sometimes, an
+appeal to the system's package manager can fix such problems. If not,
+the solution is have Spack install the required packages, and then
+have Spack use them.
+
+For example, if ``curl`` doesn't work, one could use the following steps
+to provide Spack a working ``curl``:
+
+.. code-block:: console
+
+ $ spack install curl
+ $ spack load curl
+
+or alternately:
+
+.. code-block:: console
+
+ $ spack module loads curl >>~/.bashrc
+
+or if environment modules don't work:
+
+.. code-block:: console
+
+ $ export PATH=`spack location --install-dir curl`/bin:$PATH
+
+
+External commands are used by Spack in two places: within core Spack,
+and in the package recipes. The bootstrapping procedure for these two
+cases is somewhat different, and is treated separately below.
+
+^^^^^^^^^^^^^^^^^^^^
+Core Spack Utilities
+^^^^^^^^^^^^^^^^^^^^
+
+Core Spack uses the following packages, mainly to download and unpack
+source code, and to load generated environment modules: ``curl``,
+``env``, ``git``, ``go``, ``hg``, ``svn``, ``tar``, ``unzip``,
+``patch``, ``environment-modules``.
+
+As long as the user's environment is set up to successfully run these
+programs from outside of Spack, they should work inside of Spack as
+well. They can generally be activated as in the ``curl`` example above;
+or some systems might already have an appropriate hand-built
+environment module that may be loaded. Either way works.
+
+A few notes on specific programs in this list:
+
+""""""""""""""""""""""""""
+cURL, git, Mercurial, etc.
+""""""""""""""""""""""""""
+
+Spack depends on cURL to download tarballs, the format that most
+Spack-installed packages come in. Your system's cURL should always be
+able to download unencrypted ``http://``. However, the cURL on some
+systems has problems with SSL-enabled ``https://`` URLs, due to
+outdated / insecure versions of OpenSSL on those systems. This will
+prevent Spack from installing any software requiring ``https://``
+until a new cURL has been installed, using the technique above.
+
+.. warning::
+
+ remember that if you install ``curl`` via Spack that it may rely on a
+ user-space OpenSSL that is not upgraded regularly. It may fall out of
+ date faster than your system OpenSSL.
+
+Some packages use source code control systems as their download method:
+``git``, ``hg``, ``svn`` and occasionally ``go``. If you had to install
+a new ``curl``, then chances are the system-supplied version of these
+other programs will also not work, because they also rely on OpenSSL.
+Once ``curl`` has been installed, you can similarly install the others.
+
+
+.. _InstallEnvironmentModules:
+
+"""""""""""""""""""
+Environment Modules
+"""""""""""""""""""
+
+In order to use Spack's generated environment modules, you must have
+installed one of *Environment Modules* or *Lmod*. On many Linux
+distributions, this can be installed from the vendor's repository. For
+example: ``yum install environment-modules`` (Fedora/RHEL/CentOS). If
+your Linux distribution does not have Environment Modules, you can get it
+with Spack:
+
+#. Consider using system tcl (as long as your system has Tcl version 8.0 or later):
+
+ #) Identify its location using ``which tclsh``
+ #) Identify its version using ``echo 'puts $tcl_version;exit 0' | tclsh``
+ #) Add to ``~/.spack/packages.yaml`` and modify as appropriate:
+
+ .. code-block:: yaml
+
+ packages:
+ tcl:
+ paths:
+ tcl@8.5: /usr
+ version: [8.5]
+ buildable: False
+
+#. Install with:
+
+ .. code-block:: console
+
+ $ spack install environment-modules
+
+#. Activate with the following script (or apply the updates to your
+ ``.bashrc`` file manually):
+
+ .. code-block:: sh
+
+ TMP=`tempfile`
+ echo >$TMP
+ MODULE_HOME=`spack location --install-dir environment-modules`
+ MODULE_VERSION=`ls -1 $MODULE_HOME/Modules | head -1`
+ ${MODULE_HOME}/Modules/${MODULE_VERSION}/bin/add.modules <$TMP
+ cp .bashrc $TMP
+ echo "MODULE_VERSION=${MODULE_VERSION}" > .bashrc
+ cat $TMP >>.bashrc
+
+This adds to your ``.bashrc`` (or similar) files, enabling Environment
+Modules when you log in. Re-load your .bashrc (or log out and in
+again), and then test that the ``module`` command is found with:
+
+.. code-block:: console
+
+ $ module avail
+
+
+^^^^^^^^^^^^^^^^^
+Package Utilities
+^^^^^^^^^^^^^^^^^
+
+Spack may also encounter bootstrapping problems inside a package's
+``install()`` method. In this case, Spack will normally be running
+inside a *sanitized build environment*. This includes all of the
+package's dependencies, but none of the environment Spack inherited
+from the user: if you load a module or modify ``$PATH`` before
+launching Spack, it will have no effect.
+
+In this case, you will likely need to use the ``--dirty`` flag when
+running ``spack install``, causing Spack to **not** sanitize the build
+environment. You are now responsible for making sure that environment
+does not do strange things to Spack or its installs.
+
+Another way to get Spack to use its own version of something is to add
+that something to a package that needs it. For example:
+
+.. code-block:: python
+
+ depends_on('binutils', type='build')
+
+This is considered best practice for some common build dependencies,
+such as ``autotools`` (if the ``autoreconf`` command is needed) and
+``cmake`` --- ``cmake`` especially, because different packages require
+a different version of CMake.
+
+""""""""
+binutils
+""""""""
+
+.. https://groups.google.com/forum/#!topic/spack/i_7l_kEEveI
+
+Sometimes, strange error messages can happen while building a package.
+For example, ``ld`` might crash. Or one receives a message like:
+
+.. code-block:: console
+
+ ld: final link failed: Nonrepresentable section on output
+
+
+or:
+
+.. code-block:: console
+
+ ld: .../_fftpackmodule.o: unrecognized relocation (0x2a) in section `.text'
+
+These problems are often caused by an outdated ``binutils`` on your
+system. Unlike CMake or Autotools, adding ``depends_on('binutils')`` to
+every package is not considered a best practice because every package
+written in C/C++/Fortran would need it. A potential workaround is to
+load a recent ``binutils`` into your environment and use the ``--dirty``
+flag.
+
+
+.. _cray-support:
+
+-------------
+Spack on Cray
+-------------
+
+Spack differs slightly when used on a Cray system. The architecture spec
+can differentiate between the front-end and back-end processor and operating system.
+For example, on Edison at NERSC, the back-end target processor
+is "Ivy Bridge", so you can specify to use the back-end this way:
+
+.. code-block:: console
+
+ $ spack install zlib target=ivybridge
+
+You can also use the operating system to build against the back-end:
+
+.. code-block:: console
+
+ $ spack install zlib os=CNL10
+
+Notice that the name includes both the operating system name and the major
+version number concatenated together.
+
+Alternatively, if you want to build something for the front-end,
+you can specify the front-end target processor. The processor for a login node
+on Edison is "Sandy bridge" so we specify on the command line like so:
+
+.. code-block:: console
+
+ $ spack install zlib target=sandybridge
+
+And the front-end operating system is:
+
+.. code-block:: console
+
+ $ spack install zlib os=SuSE11
+
+^^^^^^^^^^^^^^^^^^^^^^^
+Cray compiler detection
+^^^^^^^^^^^^^^^^^^^^^^^
+
+Spack can detect compilers using two methods. For the front-end, we treat
+everything the same. The difference lies in back-end compiler detection.
+Back-end compiler detection is made via the Tcl module avail command.
+Once it detects the compiler it writes the appropriate PrgEnv and compiler
+module name to compilers.yaml and sets the paths to each compiler with Cray\'s
+compiler wrapper names (i.e. cc, CC, ftn). During build time, Spack will load
+the correct PrgEnv and compiler module and will call appropriate wrapper.
+
+The compilers.yaml config file will also differ. There is a
+modules section that is filled with the compiler's Programming Environment
+and module name. On other systems, this field is empty []:
+
+.. code-block:: yaml
+
+ - compiler:
+ modules:
+ - PrgEnv-intel
+ - intel/15.0.109
+
+As mentioned earlier, the compiler paths will look different on a Cray system.
+Since most compilers are invoked using cc, CC and ftn, the paths for each
+compiler are replaced with their respective Cray compiler wrapper names:
+
+.. code-block:: yaml
+
+ paths:
+ cc: cc
+ cxx: CC
+ f77: ftn
+ fc: ftn
+
+As opposed to an explicit path to the compiler executable. This allows Spack
+to call the Cray compiler wrappers during build time.
+
+For more on compiler configuration, check out :ref:`compiler-config`.
+
+Spack sets the default Cray link type to dynamic, to better match other
+other platforms. Individual packages can enable static linking (which is the
+default outside of Spack on cray systems) using the ``-static`` flag.
+
+^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+Setting defaults and using Cray modules
+^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+If you want to use default compilers for each PrgEnv and also be able
+to load cray external modules, you will need to set up a ``packages.yaml``.
+
+Here's an example of an external configuration for cray modules:
+
+.. code-block:: yaml
+
+ packages:
+ mpi:
+ modules:
+ mpich@7.3.1%gcc@5.2.0 arch=cray_xc-haswell-CNL10: cray-mpich
+ mpich@7.3.1%intel@16.0.0.109 arch=cray_xc-haswell-CNL10: cray-mpich
+
+This tells Spack that for whatever package that depends on mpi, load the
+cray-mpich module into the environment. You can then be able to use whatever
+environment variables, libraries, etc, that are brought into the environment
+via module load.
+
+You can set the default compiler that Spack can use for each compiler type.
+If you want to use the Cray defaults, then set them under ``all:`` in packages.yaml.
+In the compiler field, set the compiler specs in your order of preference.
+Whenever you build with that compiler type, Spack will concretize to that version.
+
+Here is an example of a full packages.yaml used at NERSC
+
+.. code-block:: yaml
+
+ packages:
+ mpi:
+ modules:
+ mpich@7.3.1%gcc@5.2.0 arch=cray_xc-CNL10-ivybridge: cray-mpich
+ mpich@7.3.1%intel@16.0.0.109 arch=cray_xc-SuSE11-ivybridge: cray-mpich
+ buildable: False
+ netcdf:
+ modules:
+ netcdf@4.3.3.1%gcc@5.2.0 arch=cray_xc-CNL10-ivybridge: cray-netcdf
+ netcdf@4.3.3.1%intel@16.0.0.109 arch=cray_xc-CNL10-ivybridge: cray-netcdf
+ buildable: False
+ hdf5:
+ modules:
+ hdf5@1.8.14%gcc@5.2.0 arch=cray_xc-CNL10-ivybridge: cray-hdf5
+ hdf5@1.8.14%intel@16.0.0.109 arch=cray_xc-CNL10-ivybridge: cray-hdf5
+ buildable: False
+ all:
+ compiler: [gcc@5.2.0, intel@16.0.0.109]
+
+Here we tell spack that whenever we want to build with gcc use version 5.2.0 or
+if we want to build with intel compilers, use version 16.0.0.109. We add a spec
+for each compiler type for each cray modules. This ensures that for each
+compiler on our system we can use that external module.
+
+For more on external packages check out the section :ref:`sec-external-packages`.
diff --git a/lib/spack/docs/index.rst b/lib/spack/docs/index.rst
index d6ce52b747..4dffe6f091 100644
--- a/lib/spack/docs/index.rst
+++ b/lib/spack/docs/index.rst
@@ -3,8 +3,9 @@
You can adapt this file completely to your liking, but it should at least
contain the root `toctree` directive.
+===================
Spack Documentation
-=================================
+===================
Spack is a package management tool designed to support multiple
versions and configurations of software on a wide variety of platforms
@@ -27,7 +28,7 @@ Get spack from the `github repository
<https://github.com/llnl/spack>`_ and install your first
package:
-.. code-block:: sh
+.. code-block:: console
$ git clone https://github.com/llnl/spack.git
$ cd spack/bin
@@ -36,23 +37,40 @@ package:
If you're new to spack and want to start using it, see :doc:`getting_started`,
or refer to the full manual below.
-Table of Contents
----------------------
.. toctree::
:maxdepth: 2
+ :caption: Basics
features
getting_started
basic_usage
- packaging_guide
+ workflows
+ tutorial_sc16
+
+.. toctree::
+ :maxdepth: 2
+ :caption: Reference
+
+ configuration
+ config_yaml
+ build_settings
mirrors
- site_configuration
- developer_guide
+ module_file_support
+ repositories
command_index
package_list
+
+.. toctree::
+ :maxdepth: 2
+ :caption: Contributing
+
+ contribution_guide
+ packaging_guide
+ developer_guide
API Docs <spack>
+==================
Indices and tables
==================
diff --git a/lib/spack/docs/mirrors.rst b/lib/spack/docs/mirrors.rst
index dad04d053b..c69496066f 100644
--- a/lib/spack/docs/mirrors.rst
+++ b/lib/spack/docs/mirrors.rst
@@ -1,7 +1,8 @@
.. _mirrors:
+=======
Mirrors
-============================
+=======
Some sites may not have access to the internet for fetching packages.
These sites will need a local repository of tarballs from which they
@@ -10,27 +11,29 @@ mirror is a URL that points to a directory, either on the local
filesystem or on some server, containing tarballs for all of Spack's
packages.
-Here's an example of a mirror's directory structure::
-
- mirror/
- cmake/
- cmake-2.8.10.2.tar.gz
- dyninst/
- dyninst-8.1.1.tgz
- dyninst-8.1.2.tgz
- libdwarf/
- libdwarf-20130126.tar.gz
- libdwarf-20130207.tar.gz
- libdwarf-20130729.tar.gz
- libelf/
- libelf-0.8.12.tar.gz
- libelf-0.8.13.tar.gz
- libunwind/
- libunwind-1.1.tar.gz
- mpich/
- mpich-3.0.4.tar.gz
- mvapich2/
- mvapich2-1.9.tgz
+Here's an example of a mirror's directory structure:
+
+.. code-block:: none
+
+ mirror/
+ cmake/
+ cmake-2.8.10.2.tar.gz
+ dyninst/
+ dyninst-8.1.1.tgz
+ dyninst-8.1.2.tgz
+ libdwarf/
+ libdwarf-20130126.tar.gz
+ libdwarf-20130207.tar.gz
+ libdwarf-20130729.tar.gz
+ libelf/
+ libelf-0.8.12.tar.gz
+ libelf-0.8.13.tar.gz
+ libunwind/
+ libunwind-1.1.tar.gz
+ mpich/
+ mpich-3.0.4.tar.gz
+ mvapich2/
+ mvapich2-1.9.tgz
The structure is very simple. There is a top-level directory. The
second level directories are named after packages, and the third level
@@ -49,27 +52,16 @@ contains tarballs for each package, named after each package.
not standardize on a particular compression algorithm, because this
would potentially require expanding and re-compressing each archive.
-.. _spack-mirror:
+.. _cmd-spack-mirror:
+----------------
``spack mirror``
-----------------------------
+----------------
Mirrors are managed with the ``spack mirror`` command. The help for
-``spack mirror`` looks like this::
+``spack mirror`` looks like this:
- $ spack mirror -h
- usage: spack mirror [-h] SUBCOMMAND ...
-
- positional arguments:
- SUBCOMMAND
- create Create a directory to be used as a spack mirror, and fill
- it with package archives.
- add Add a mirror to Spack.
- remove Remove a mirror by name.
- list Print out available mirrors to the console.
-
- optional arguments:
- -h, --help show this help message and exit
+.. command-output:: spack help mirror
The ``create`` command actually builds a mirror by fetching all of its
packages from the internet and checksumming them.
@@ -79,8 +71,9 @@ control the URL(s) from which Spack downloads its packages.
.. _spack-mirror-create:
+-----------------------
``spack mirror create``
-----------------------------
+-----------------------
You can create a mirror using the ``spack mirror create`` command, assuming
you're on a machine where you can access the internet.
@@ -89,8 +82,7 @@ The command will iterate through all of Spack's packages and download
the safe ones into a directory structure like the one above. Here is
what it looks like:
-
-.. code-block:: bash
+.. code-block:: console
$ spack mirror create libelf libdwarf
==> Created new mirror in spack-mirror-2014-06-24
@@ -124,25 +116,31 @@ what it looks like:
Once this is done, you can tar up the ``spack-mirror-2014-06-24`` directory and
copy it over to the machine you want it hosted on.
+^^^^^^^^^^^^^^^^^^^
Custom package sets
-~~~~~~~~~~~~~~~~~~~~~~~
+^^^^^^^^^^^^^^^^^^^
Normally, ``spack mirror create`` downloads all the archives it has
checksums for. If you want to only create a mirror for a subset of
packages, you can do that by supplying a list of package specs on the
command line after ``spack mirror create``. For example, this
-command::
+command:
+
+.. code-block:: console
- $ spack mirror create libelf@0.8.12: boost@1.44:
+ $ spack mirror create libelf@0.8.12: boost@1.44:
Will create a mirror for libelf versions greater than or equal to
0.8.12 and boost versions greater than or equal to 1.44.
+^^^^^^^^^^^^
Mirror files
-~~~~~~~~~~~~~~~~~~~~~~~
+^^^^^^^^^^^^
If you have a *very* large number of packages you want to mirror, you
-can supply a file with specs in it, one per line::
+can supply a file with specs in it, one per line:
+
+.. code-block:: console
$ cat specs.txt
libdwarf
@@ -150,7 +148,7 @@ can supply a file with specs in it, one per line::
boost@1.44:
boost@1.39.0
...
- $ spack mirror create -f specs.txt
+ $ spack mirror create --file specs.txt
...
This is useful if there is a specific suite of software managed by
@@ -158,59 +156,90 @@ your site.
.. _spack-mirror-add:
+--------------------
``spack mirror add``
-----------------------------
+--------------------
Once you have a mirror, you need to let spack know about it. This is
relatively simple. First, figure out the URL for the mirror. If it's
-a file, you can use a file URL like this one::
+a file, you can use a file URL like this one:
+
+.. code-block:: none
- file:///Users/gamblin2/spack-mirror-2014-06-24
+ file://~/spack-mirror-2014-06-24
That points to the directory on the local filesystem. If it were on a
web server, you could use a URL like this one:
- https://example.com/some/web-hosted/directory/spack-mirror-2014-06-24
+https://example.com/some/web-hosted/directory/spack-mirror-2014-06-24
Spack will use the URL as the root for all of the packages it fetches.
You can tell your Spack installation to use that mirror like this:
-.. code-block:: bash
+.. code-block:: console
- $ spack mirror add local_filesystem file:///Users/gamblin2/spack-mirror-2014-06-24
+ $ spack mirror add local_filesystem file://~/spack-mirror-2014-06-24
Each mirror has a name so that you can refer to it again later.
.. _spack-mirror-list:
+---------------------
``spack mirror list``
-----------------------------
+---------------------
+
+To see all the mirrors Spack knows about, run ``spack mirror list``:
-To see all the mirrors Spack knows about, run ``spack mirror list``::
+.. code-block:: console
$ spack mirror list
- local_filesystem file:///Users/gamblin2/spack-mirror-2014-06-24
+ local_filesystem file://~/spack-mirror-2014-06-24
.. _spack-mirror-remove:
+-----------------------
``spack mirror remove``
-----------------------------
+-----------------------
-To remove a mirror by name::
+To remove a mirror by name, run:
+
+.. code-block:: console
$ spack mirror remove local_filesystem
$ spack mirror list
==> No mirrors configured.
+-----------------
Mirror precedence
-----------------------------
+-----------------
+
+Adding a mirror really adds a line in ``~/.spack/mirrors.yaml``:
-Adding a mirror really adds a line in ``~/.spack/mirrors.yaml``::
+.. code-block:: yaml
mirrors:
- local_filesystem: file:///Users/gamblin2/spack-mirror-2014-06-24
+ local_filesystem: file://~/spack-mirror-2014-06-24
remote_server: https://example.com/some/web-hosted/directory/spack-mirror-2014-06-24
If you want to change the order in which mirrors are searched for
packages, you can edit this file and reorder the sections. Spack will
search the topmost mirror first and the bottom-most mirror last.
+
+.. _caching:
+
+-------------------
+Local Default Cache
+-------------------
+
+Spack caches resources that are downloaded as part of installs. The cache is
+a valid spack mirror: it uses the same directory structure and naming scheme
+as other Spack mirrors (so it can be copied anywhere and referenced with a URL
+like other mirrors). The mirror is maintained locally (within the Spack
+installation directory) at :file:`var/spack/cache/`. It is always enabled (and
+is always searched first when attempting to retrieve files for an installation)
+but can be cleared with :ref:`purge <cmd-spack-purge>`; the cache directory can also
+be deleted manually without issue.
+
+Caching includes retrieved tarball archives and source control repositories, but
+only resources with an associated digest or commit ID (e.g. a revision number
+for SVN) will be cached.
diff --git a/lib/spack/docs/module_file_support.rst b/lib/spack/docs/module_file_support.rst
new file mode 100644
index 0000000000..93c2ee33c6
--- /dev/null
+++ b/lib/spack/docs/module_file_support.rst
@@ -0,0 +1,682 @@
+.. _modules:
+
+=======
+Modules
+=======
+
+The use of module systems to manage user environment in a controlled way
+is a common practice at HPC centers that is often embraced also by individual
+programmers on their development machines. To support this common practice
+Spack provides integration with `Environment Modules
+<http://modules.sourceforge.net/>`_ , `LMod
+<http://lmod.readthedocs.io/en/latest/>`_ and `Dotkit <https://computing.llnl.gov/?set=jobs&page=dotkit>`_ by:
+
+* generating module files after a successful installation
+* providing commands that can leverage the spec syntax to manipulate modules
+
+In the following you will see how to activate shell support for commands in Spack
+that requires it, and discover what benefits this may bring with respect to deal
+directly with automatically generated module files.
+
+.. note::
+
+ If your machine does not already have a module system installed,
+ we advise you to use either Environment Modules or LMod. See :ref:`InstallEnvironmentModules`
+ for more details.
+
+.. _shell-support:
+
+-------------
+Shell support
+-------------
+
+You can enable shell support by sourcing the appropriate setup file
+in the ``$SPACK_ROOT/share/spack`` directory.
+For ``bash`` or ``ksh`` users:
+
+.. code-block:: console
+
+ $ . ${SPACK_ROOT}/share/spack/setup-env.sh
+
+For ``csh`` and ``tcsh`` instead:
+
+.. code-block:: console
+
+ $ source $SPACK_ROOT/share/spack/setup-env.csh
+
+
+.. note::
+ You can put the source line in your ``.bashrc`` or ``.cshrc`` to
+ have Spack's shell support available on the command line at any login.
+
+
+----------------------------
+Using module files via Spack
+----------------------------
+
+If you have shell support enabled you should be able to run either
+``module avail`` or ``use -l spack`` to see what module/dotkit files have
+been installed. Here is sample output of those programs, showing lots
+of installed packages.
+
+.. code-block:: console
+
+ $ module avail
+
+ ------- ~/spack/share/spack/modules/linux-debian7-x86_64 --------
+ adept-utils@1.0%gcc@4.4.7-5adef8da libelf@0.8.13%gcc@4.4.7
+ automaded@1.0%gcc@4.4.7-d9691bb0 libelf@0.8.13%intel@15.0.0
+ boost@1.55.0%gcc@4.4.7 mpc@1.0.2%gcc@4.4.7-559607f5
+ callpath@1.0.1%gcc@4.4.7-5dce4318 mpfr@3.1.2%gcc@4.4.7
+ dyninst@8.1.2%gcc@4.4.7-b040c20e mpich@3.0.4%gcc@4.4.7
+ gcc@4.9.1%gcc@4.4.7-93ab98c5 mpich@3.0.4%gcc@4.9.0
+ gmp@6.0.0a%gcc@4.4.7 mrnet@4.1.0%gcc@4.4.7-72b7881d
+ graphlib@2.0.0%gcc@4.4.7 netgauge@2.4.6%gcc@4.9.0-27912b7b
+ launchmon@1.0.1%gcc@4.4.7 stat@2.1.0%gcc@4.4.7-51101207
+ libNBC@1.1.1%gcc@4.9.0-27912b7b sundials@2.5.0%gcc@4.9.0-27912b7b
+ libdwarf@20130729%gcc@4.4.7-b52fac98
+
+.. code-block:: console
+
+ $ use -l spack
+
+ spack ----------
+ adept-utils@1.0%gcc@4.4.7-5adef8da - adept-utils @1.0
+ automaded@1.0%gcc@4.4.7-d9691bb0 - automaded @1.0
+ boost@1.55.0%gcc@4.4.7 - boost @1.55.0
+ callpath@1.0.1%gcc@4.4.7-5dce4318 - callpath @1.0.1
+ dyninst@8.1.2%gcc@4.4.7-b040c20e - dyninst @8.1.2
+ gmp@6.0.0a%gcc@4.4.7 - gmp @6.0.0a
+ libNBC@1.1.1%gcc@4.9.0-27912b7b - libNBC @1.1.1
+ libdwarf@20130729%gcc@4.4.7-b52fac98 - libdwarf @20130729
+ libelf@0.8.13%gcc@4.4.7 - libelf @0.8.13
+ libelf@0.8.13%intel@15.0.0 - libelf @0.8.13
+ mpc@1.0.2%gcc@4.4.7-559607f5 - mpc @1.0.2
+ mpfr@3.1.2%gcc@4.4.7 - mpfr @3.1.2
+ mpich@3.0.4%gcc@4.4.7 - mpich @3.0.4
+ mpich@3.0.4%gcc@4.9.0 - mpich @3.0.4
+ netgauge@2.4.6%gcc@4.9.0-27912b7b - netgauge @2.4.6
+ sundials@2.5.0%gcc@4.9.0-27912b7b - sundials @2.5.0
+
+The names here should look familiar, they're the same ones from
+``spack find``. You *can* use the names here directly. For example,
+you could type either of these commands to load the callpath module:
+
+.. code-block:: console
+
+ $ use callpath@1.0.1%gcc@4.4.7-5dce4318
+
+.. code-block:: console
+
+ $ module load callpath@1.0.1%gcc@4.4.7-5dce4318
+
+.. _cmd-spack-load:
+
+^^^^^^^^^^^^^^^^^^^^^^^
+``spack load / unload``
+^^^^^^^^^^^^^^^^^^^^^^^
+
+Neither of these is particularly pretty, easy to remember, or
+easy to type. Luckily, Spack has its own interface for using modules
+and dotkits. You can use the same spec syntax you're used to:
+
+========================= ==========================
+Environment Modules Dotkit
+========================= ==========================
+``spack load <spec>`` ``spack use <spec>``
+``spack unload <spec>`` ``spack unuse <spec>``
+========================= ==========================
+
+And you can use the same shortened names you use everywhere else in
+Spack. For example, this will add the ``mpich`` package built with
+``gcc`` to your path:
+
+.. code-block:: console
+
+ $ spack install mpich %gcc@4.4.7
+
+ # ... wait for install ...
+
+ $ spack use mpich %gcc@4.4.7
+ Prepending: mpich@3.0.4%gcc@4.4.7 (ok)
+ $ which mpicc
+ ~/spack/opt/linux-debian7-x86_64/gcc@4.4.7/mpich@3.0.4/bin/mpicc
+
+Or, similarly with modules, you could type:
+
+.. code-block:: console
+
+ $ spack load mpich %gcc@4.4.7
+
+These commands will add appropriate directories to your ``PATH``,
+``MANPATH``, ``CPATH``, and ``LD_LIBRARY_PATH``. When you no longer
+want to use a package, you can type unload or unuse similarly:
+
+.. code-block:: console
+
+ $ spack unload mpich %gcc@4.4.7 # modules
+ $ spack unuse mpich %gcc@4.4.7 # dotkit
+
+.. note::
+
+ These ``use``, ``unuse``, ``load``, and ``unload`` subcommands are
+ only available if you have enabled Spack's shell support *and* you
+ have dotkit or modules installed on your machine.
+
+^^^^^^^^^^^^^^^^^^^^^^
+Ambiguous module names
+^^^^^^^^^^^^^^^^^^^^^^
+
+If a spec used with load/unload or use/unuse is ambiguous (i.e. more
+than one installed package matches it), then Spack will warn you:
+
+.. code-block:: console
+
+ $ spack load libelf
+ ==> Error: Multiple matches for spec libelf. Choose one:
+ libelf@0.8.13%gcc@4.4.7 arch=linux-debian7-x86_64
+ libelf@0.8.13%intel@15.0.0 arch=linux-debian7-x86_64
+
+You can either type the ``spack load`` command again with a fully
+qualified argument, or you can add just enough extra constraints to
+identify one package. For example, above, the key differentiator is
+that one ``libelf`` is built with the Intel compiler, while the other
+used ``gcc``. You could therefore just type:
+
+.. code-block:: console
+
+ $ spack load libelf %intel
+
+To identify just the one built with the Intel compiler.
+
+.. _extensions:
+
+.. _cmd-spack-module-loads:
+
+^^^^^^^^^^^^^^^^^^^^^^
+``spack module loads``
+^^^^^^^^^^^^^^^^^^^^^^
+
+In some cases, it is desirable to load not just a module, but also all
+the modules it depends on. This is not required for most modules
+because Spack builds binaries with RPATH support. However, not all
+packages use RPATH to find their dependencies: this can be true in
+particular for Python extensions, which are currently *not* built with
+RPATH.
+
+Scripts to load modules recursively may be made with the command:
+
+.. code-block:: console
+
+ $ spack module loads --dependencies <spec>
+
+An equivalent alternative is:
+
+.. code-block :: console
+
+ $ source <( spack module loads --dependencies <spec> )
+
+.. warning::
+
+ The ``spack load`` command does not currently accept the
+ ``--dependencies`` flag. Use ``spack module loads`` instead, for
+ now.
+
+.. See #1662
+
+
+^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+Module Commands for Shell Scripts
+^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+Although Spack is flexible, the ``module`` command is much faster.
+This could become an issue when emitting a series of ``spack load``
+commands inside a shell script. By adding the ``--shell`` flag,
+``spack module find`` may also be used to generate code that can be
+cut-and-pasted into a shell script. For example:
+
+.. code-block:: console
+
+ $ spack module loads --dependencies py-numpy git
+ # bzip2@1.0.6%gcc@4.9.3=linux-x86_64
+ module load bzip2-1.0.6-gcc-4.9.3-ktnrhkrmbbtlvnagfatrarzjojmkvzsx
+ # ncurses@6.0%gcc@4.9.3=linux-x86_64
+ module load ncurses-6.0-gcc-4.9.3-kaazyneh3bjkfnalunchyqtygoe2mncv
+ # zlib@1.2.8%gcc@4.9.3=linux-x86_64
+ module load zlib-1.2.8-gcc-4.9.3-v3ufwaahjnviyvgjcelo36nywx2ufj7z
+ # sqlite@3.8.5%gcc@4.9.3=linux-x86_64
+ module load sqlite-3.8.5-gcc-4.9.3-a3eediswgd5f3rmto7g3szoew5nhehbr
+ # readline@6.3%gcc@4.9.3=linux-x86_64
+ module load readline-6.3-gcc-4.9.3-se6r3lsycrwxyhreg4lqirp6xixxejh3
+ # python@3.5.1%gcc@4.9.3=linux-x86_64
+ module load python-3.5.1-gcc-4.9.3-5q5rsrtjld4u6jiicuvtnx52m7tfhegi
+ # py-setuptools@20.5%gcc@4.9.3=linux-x86_64
+ module load py-setuptools-20.5-gcc-4.9.3-4qr2suj6p6glepnedmwhl4f62x64wxw2
+ # py-nose@1.3.7%gcc@4.9.3=linux-x86_64
+ module load py-nose-1.3.7-gcc-4.9.3-pwhtjw2dvdvfzjwuuztkzr7b4l6zepli
+ # openblas@0.2.17%gcc@4.9.3+shared=linux-x86_64
+ module load openblas-0.2.17-gcc-4.9.3-pw6rmlom7apfsnjtzfttyayzc7nx5e7y
+ # py-numpy@1.11.0%gcc@4.9.3+blas+lapack=linux-x86_64
+ module load py-numpy-1.11.0-gcc-4.9.3-mulodttw5pcyjufva4htsktwty4qd52r
+ # curl@7.47.1%gcc@4.9.3=linux-x86_64
+ module load curl-7.47.1-gcc-4.9.3-ohz3fwsepm3b462p5lnaquv7op7naqbi
+ # autoconf@2.69%gcc@4.9.3=linux-x86_64
+ module load autoconf-2.69-gcc-4.9.3-bkibjqhgqm5e3o423ogfv2y3o6h2uoq4
+ # cmake@3.5.0%gcc@4.9.3~doc+ncurses+openssl~qt=linux-x86_64
+ module load cmake-3.5.0-gcc-4.9.3-x7xnsklmgwla3ubfgzppamtbqk5rwn7t
+ # expat@2.1.0%gcc@4.9.3=linux-x86_64
+ module load expat-2.1.0-gcc-4.9.3-6pkz2ucnk2e62imwakejjvbv6egncppd
+ # git@2.8.0-rc2%gcc@4.9.3+curl+expat=linux-x86_64
+ module load git-2.8.0-rc2-gcc-4.9.3-3bib4hqtnv5xjjoq5ugt3inblt4xrgkd
+
+The script may be further edited by removing unnecessary modules.
+
+
+^^^^^^^^^^^^^^^
+Module Prefixes
+^^^^^^^^^^^^^^^
+
+On some systems, modules are automatically prefixed with a certain
+string; ``spack module loads`` needs to know about that prefix when it
+issues ``module load`` commands. Add the ``--prefix`` option to your
+``spack module loads`` commands if this is necessary.
+
+For example, consider the following on one system:
+
+.. code-block:: console
+
+ $ module avail
+ linux-SuSE11-x86_64/antlr-2.7.7-gcc-5.3.0-bdpl46y
+
+ $ spack module loads antlr # WRONG!
+ # antlr@2.7.7%gcc@5.3.0~csharp+cxx~java~python arch=linux-SuSE11-x86_64
+ module load antlr-2.7.7-gcc-5.3.0-bdpl46y
+
+ $ spack module loads --prefix linux-SuSE11-x86_64/ antlr
+ # antlr@2.7.7%gcc@5.3.0~csharp+cxx~java~python arch=linux-SuSE11-x86_64
+ module load linux-SuSE11-x86_64/antlr-2.7.7-gcc-5.3.0-bdpl46y
+
+----------------------------
+Auto-generating Module Files
+----------------------------
+
+Module files are generated by post-install hooks after the successful
+installation of a package. The following table summarizes the essential
+information associated with the different file formats
+that can be generated by Spack:
+
+ +-----------------------------+--------------------+-------------------------------+----------------------+
+ | | **Hook name** | **Default root directory** | **Compatible tools** |
+ +=============================+====================+===============================+======================+
+ | **Dotkit** | ``dotkit`` | share/spack/dotkit | DotKit |
+ +-----------------------------+--------------------+-------------------------------+----------------------+
+ | **TCL - Non-Hierarchical** | ``tcl`` | share/spack/modules | Env. Modules/LMod |
+ +-----------------------------+--------------------+-------------------------------+----------------------+
+ | **Lua - Hierarchical** | ``lmod`` | share/spack/lmod | LMod |
+ +-----------------------------+--------------------+-------------------------------+----------------------+
+
+
+Though Spack ships with sensible defaults for the generation of module files,
+one can customize many aspects of it to accommodate package or site specific needs.
+These customizations are enabled by either:
+
+ 1. overriding certain callback APIs in the Python packages
+ 2. writing specific rules in the ``modules.yaml`` configuration file
+
+The former method fits best cases that are site independent, e.g. injecting variables
+from language interpreters into their extensions. The latter instead permits to
+fine tune the content, naming and creation of module files to meet site specific conventions.
+
+^^^^^^^^^^^^^^^^^^^^
+``Package`` file API
+^^^^^^^^^^^^^^^^^^^^
+
+There are two methods that can be overridden in any ``package.py`` to affect the
+content of generated module files. The first one is:
+
+.. code-block:: python
+
+ def setup_environment(self, spack_env, run_env):
+ """Set up the compile and runtime environments for a package."""
+ pass
+
+and can alter the content of *the same package where it is overridden*
+by adding actions to ``run_env``. The second method is:
+
+.. code-block:: python
+
+ def setup_dependent_environment(self, spack_env, run_env, dependent_spec):
+ """Set up the environment of packages that depend on this one"""
+ pass
+
+and has similar effects on module file of dependees. Even in this case
+``run_env`` must be filled with the desired list of environment modifications.
+
+.. note::
+ The ``r`` package and callback APIs
+ A typical example in which overriding both methods prove to be useful
+ is given by the ``r`` package. This package installs libraries and headers
+ in non-standard locations and it is possible to prepend the appropriate directory
+ to the corresponding environment variables:
+
+ ================== =================================
+ LIBRARY_PATH ``self.prefix/rlib/R/lib``
+ LD_LIBRARY_PATH ``self.prefix/rlib/R/lib``
+ CPATH ``self.prefix/rlib/R/include``
+ ================== =================================
+
+ with the following snippet:
+
+ .. literalinclude:: ../../../var/spack/repos/builtin/packages/r/package.py
+ :pyobject: R.setup_environment
+
+ The ``r`` package also knows which environment variable should be modified
+ to make language extensions provided by other packages available, and modifies
+ it appropriately in the override of the second method:
+
+ .. literalinclude:: ../../../var/spack/repos/builtin/packages/r/package.py
+ :lines: 128-129,146-151
+
+.. _modules-yaml:
+
+---------------------------------
+Configuration in ``modules.yaml``
+---------------------------------
+
+The name of the configuration file that controls module generation behavior
+is ``modules.yaml``. The default configuration:
+
+.. literalinclude:: ../../../etc/spack/defaults/modules.yaml
+ :language: yaml
+
+activates generation for ``tcl`` and ``dotkit`` module files and inspects
+the installation folder of each package for the presence of a set of subdirectories
+(``bin``, ``man``, ``share/man``, etc.). If any is found its full path is prepended
+to the environment variables listed below the folder name.
+
+^^^^^^^^^^^^^^^^^^^^^^^^^^^
+Activation of other systems
+^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+Any other module file generator shipped with Spack can be activated adding it to the
+list under the ``enable`` key in the module file. Currently the only generator that
+is not activated by default is ``lmod``, which produces hierarchical lua module files.
+For each module system that can be enabled a finer configuration is possible.
+
+Directives that are aimed at driving the generation of a particular type of module files
+should be listed under a top level key that corresponds to the generator being
+customized:
+
+.. code-block:: yaml
+
+ modules:
+ enable:
+ - tcl
+ - dotkit
+ - lmod
+ tcl:
+ # contains environment modules specific customizations
+ dotkit:
+ # contains dotkit specific customizations
+ lmod:
+ # contains lmod specific customizations
+
+All these module sections allow for both:
+
+1. global directives that usually affect the whole layout of modules or the naming scheme
+2. directives that affect only a set of packages and modify their content
+
+For the latter point in particular it is possible to use anonymous specs
+to select an appropriate set of packages on which the modifications should be applied.
+
+.. _anonymous_specs:
+
+^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+Selection by anonymous specs
+^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+The procedure to select packages using anonymous specs is a natural
+extension of using them to install packages, the only difference being
+that specs in this case **are not required to have a root package**.
+Consider for instance this snippet:
+
+.. code-block:: yaml
+
+ modules:
+ tcl:
+ # The keyword `all` selects every package
+ all:
+ environment:
+ set:
+ BAR: 'bar'
+ # This anonymous spec selects any package that
+ # depends on openmpi. The double colon at the
+ # end clears the set of rules that matched so far.
+ ^openmpi::
+ environment:
+ set:
+ BAR: 'baz'
+ # Selects any zlib package
+ zlib:
+ environment:
+ prepend_path:
+ LD_LIBRARY_PATH: 'foo'
+ # Selects zlib compiled with gcc@4.8
+ zlib%gcc@4.8:
+ environment:
+ unset:
+ - FOOBAR
+
+During module file generation, the configuration above will instruct
+Spack to set the environment variable ``BAR=bar`` for every module,
+unless the associated spec satisfies ``^openmpi`` in which case ``BAR=baz``.
+In addition in any spec that satisfies ``zlib`` the value ``foo`` will be
+prepended to ``LD_LIBRARY_PATH`` and in any spec that satisfies ``zlib%gcc@4.8``
+the variable ``FOOBAR`` will be unset.
+
+.. note::
+ Order does matter
+ The modifications associated with the ``all`` keyword are always evaluated
+ first, no matter where they appear in the configuration file. All the other
+ spec constraints are instead evaluated top to bottom.
+
+^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+Blacklist or whitelist the generation of specific module files
+^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+Anonymous specs are also used to prevent module files from being written or
+to force them to be written. A common case for that at HPC centers is to hide
+from users all of the software that needs to be built with system compilers.
+Suppose for instance to have ``gcc@4.4.7`` provided by your system. Then
+with a configuration file like this one:
+
+.. code-block:: yaml
+
+ modules:
+ tcl:
+ whitelist: ['gcc', 'llvm'] # Whitelist will have precedence over blacklist
+ blacklist: ['%gcc@4.4.7'] # Assuming gcc@4.4.7 is the system compiler
+
+you will skip the generation of module files for any package that
+is compiled with ``gcc@4.4.7``, with the exception of any ``gcc``
+or any ``llvm`` installation.
+
+^^^^^^^^^^^^^^^^^^^^^^^^^^^
+Customize the naming scheme
+^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+The names of environment modules generated by spack are not always easy to
+fully comprehend due to the long hash in the name. There are two module
+configuration options to help with that. The first is a global setting to
+adjust the hash length. It can be set anywhere from 0 to 32 and has a default
+length of 7. This is the representation of the hash in the module file name and
+does not affect the size of the package hash. Be aware that the smaller the
+hash length the more likely naming conflicts will occur. The following snippet
+shows how to set hash length in the module file names:
+
+.. code-block:: yaml
+
+ modules:
+ tcl:
+ hash_length: 7
+
+To help make module names more readable, and to help alleviate name conflicts
+with a short hash, one can use the ``suffixes`` option in the modules
+configuration file. This option will add strings to modules that match a spec.
+For instance, the following config options,
+
+.. code-block:: yaml
+
+ modules:
+ tcl:
+ all:
+ suffixes:
+ ^python@2.7.12: 'python-2.7.12'
+ ^openblas: 'openblas'
+
+will add a ``python-2.7.12`` version string to any packages compiled with
+python matching the spec, ``python@2.7.12``. This is useful to know which
+version of python a set of python extensions is associated with. Likewise, the
+``openblas`` string is attached to any program that has openblas in the spec,
+most likely via the ``+blas`` variant specification.
+
+.. note::
+ TCL module files
+ A modification that is specific to ``tcl`` module files is the possibility
+ to change the naming scheme of modules.
+
+ .. code-block:: yaml
+
+ modules:
+ tcl:
+ naming_scheme: '${PACKAGE}/${VERSION}-${COMPILERNAME}-${COMPILERVER}'
+ all:
+ conflict: ['${PACKAGE}', 'intel/14.0.1']
+
+ will create module files that will conflict with ``intel/14.0.1`` and with the
+ base directory of the same module, effectively preventing the possibility to
+ load two or more versions of the same software at the same time. The tokens
+ that are available for use in this directive are the same understood by
+ the ``Spec.format`` method.
+
+
+.. note::
+ LMod hierarchical module files
+ When ``lmod`` is activated Spack will generate a set of hierarchical lua module
+ files that are understood by LMod. The generated hierarchy always contains the
+ three layers ``Core`` / ``Compiler`` / ``MPI`` but can be further extended to
+ any other virtual dependency present in Spack. A case that could be useful in
+ practice is for instance:
+
+ .. code-block:: yaml
+
+ modules:
+ enable:
+ - lmod
+ lmod:
+ core_compilers: ['gcc@4.8']
+ hierarchical_scheme: ['lapack']
+
+ that will generate a hierarchy in which the ``lapack`` layer is treated as the ``mpi``
+ one. This allows a site to build the same libraries or applications against different
+ implementations of ``mpi`` and ``lapack``, and let LMod switch safely from one to the
+ other.
+
+.. warning::
+ Deep hierarchies and ``lmod spider``
+ For hierarchies that are deeper than three layers ``lmod spider`` may have some issues.
+ See `this discussion on the LMod project <https://github.com/TACC/Lmod/issues/114>`_.
+
+^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+Filter out environment modifications
+^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+Modifications to certain environment variables in module files are generated by
+default, for instance by prefix inspections in the default configuration file.
+There are cases though where some of these modifications are unwanted.
+Suppose you need to avoid having ``CPATH`` and ``LIBRARY_PATH``
+modified by your ``dotkit`` modules:
+
+.. code-block:: yaml
+
+ modules:
+ dotkit:
+ all:
+ filter:
+ # Exclude changes to any of these variables
+ environment_blacklist: ['CPATH', 'LIBRARY_PATH']
+
+The configuration above will generate dotkit module files that will not contain
+modifications to either ``CPATH`` or ``LIBRARY_PATH`` and environment module
+files that instead will contain these modifications.
+
+^^^^^^^^^^^^^^^^^^^^^
+Autoload dependencies
+^^^^^^^^^^^^^^^^^^^^^
+
+In some cases it can be useful to have module files directly autoload
+their dependencies. This may be the case for Python extensions, if not
+activated using ``spack activate``:
+
+.. code-block:: yaml
+
+ modules:
+ tcl:
+ ^python:
+ autoload: 'direct'
+
+The configuration file above will produce module files that will
+automatically load their direct dependencies. The allowed values for the
+``autoload`` statement are either ``none``, ``direct`` or ``all``.
+
+.. note::
+ TCL prerequisites
+ In the ``tcl`` section of the configuration file it is possible to use
+ the ``prerequisites`` directive that accepts the same values as
+ ``autoload``. It will produce module files that have a ``prereq``
+ statement instead of automatically loading other modules.
+
+------------------------
+Maintaining Module Files
+------------------------
+
+Spack not only provides great flexibility in the generation of module files
+and in the customization of both their layout and content, but also ships with
+a tool to ease the burden of their maintenance in production environments.
+This tool is the ``spack module`` command:
+
+.. command-output:: spack module --help
+
+.. _cmd-spack-module-refresh:
+
+^^^^^^^^^^^^^^^^^^^^^^^^
+``spack module refresh``
+^^^^^^^^^^^^^^^^^^^^^^^^
+
+The command that regenerates module files to update their content or
+their layout is ``module refresh``:
+
+.. command-output:: spack module refresh --help
+
+A set of packages can be selected using anonymous specs for the optional
+``constraint`` positional argument. The argument ``--module-type`` identifies
+the type of module files to refresh. Optionally the entire tree can be deleted
+before regeneration if the change in layout is radical.
+
+.. _cmd-spack-module-rm:
+
+^^^^^^^^^^^^^^^^^^^
+``spack module rm``
+^^^^^^^^^^^^^^^^^^^
+
+If instead what you need is just to delete a few module files, then the right
+command is ``module rm``:
+
+.. command-output:: spack module rm --help
+
+.. note::
+ We care about your module files!
+ Every modification done on modules
+ that are already existing will ask for a confirmation by default. If
+ the command is used in a script it is possible though to pass the
+ ``-y`` argument, that will skip this safety measure.
diff --git a/lib/spack/docs/packaging_guide.rst b/lib/spack/docs/packaging_guide.rst
index 650e0ee3b2..8a39ee28e2 100644
--- a/lib/spack/docs/packaging_guide.rst
+++ b/lib/spack/docs/packaging_guide.rst
@@ -1,7 +1,8 @@
.. _packaging-guide:
+===============
Packaging Guide
-=====================
+===============
This guide is intended for developers or administrators who want to
package software so that Spack can install it. It assumes that you
@@ -11,12 +12,12 @@ have at least some familiarity with Python, and that you've read the
There are two key parts of Spack:
- #. **Specs**: expressions for describing builds of software, and
- #. **Packages**: Python modules that describe how to build
- software according to a spec.
+#. **Specs**: expressions for describing builds of software, and
+#. **Packages**: Python modules that describe how to build
+ software according to a spec.
Specs allow a user to describe a *particular* build in a way that a
-package author can understand. Packages allow a the packager to
+package author can understand. Packages allow the packager to
encapsulate the build logic for different versions, compilers,
options, platforms, and dependency combinations in one place.
Essentially, a package translates a spec into build logic.
@@ -28,249 +29,242 @@ ubiquitous in the scientific software community. Second, it's a modern
language and has many powerful features to help make package writing
easy.
+---------------------------
Creating & editing packages
-----------------------------------
+---------------------------
-.. _spack-create:
+.. _cmd-spack-create:
+^^^^^^^^^^^^^^^^
``spack create``
-~~~~~~~~~~~~~~~~~~~~~
+^^^^^^^^^^^^^^^^
-The ``spack create`` command generates a boilerplate package template
-from a URL. The URL should point to a tarball or other software
-archive. In most cases, ``spack create`` plus a few modifications is
+The ``spack create`` command creates a directory with the package name and
+generates a ``package.py`` file with a boilerplate package template. If given
+a URL pointing to a tarball or other software archive, ``spack create`` is
+smart enough to determine basic information about the package, including its name
+and build system. In most cases, ``spack create`` plus a few modifications is
all you need to get a package working.
Here's an example:
-.. code-block:: sh
+.. code-block:: console
- $ spack create http://www.cmake.org/files/v2.8/cmake-2.8.12.1.tar.gz
+ $ spack create https://gmplib.org/download/gmp/gmp-6.1.2.tar.bz2
-Spack examines the tarball URL and tries to figure out the name of the
-package to be created. It also tries to determine what version strings
-look like for this package. Using this information, it will try to
-find *additional* versions by spidering the package's webpage. If it
-finds multiple versions, Spack prompts you to tell it how many
-versions you want to download and checksum:
+Spack examines the tarball URL and tries to figure out the name of the package
+to be created. If the name contains uppercase letters, these are automatically
+converted to lowercase. If the name contains underscores or periods, these are
+automatically converted to dashes.
-.. code-block:: sh
+Spack also searches for *additional* versions located in the same directory of
+the website. Spack prompts you to tell you how many versions it found and asks
+you how many you would like to download and checksum:
+
+.. code-block:: console
- $ spack create http://www.cmake.org/files/v2.8/cmake-2.8.12.1.tar.gz
- ==> This looks like a URL for cmake version 2.8.12.1.
- ==> Creating template for package cmake
- ==> Found 18 versions of cmake.
- 2.8.12.1 http://www.cmake.org/files/v2.8/cmake-2.8.12.1.tar.gz
- 2.8.12 http://www.cmake.org/files/v2.8/cmake-2.8.12.tar.gz
- 2.8.11.2 http://www.cmake.org/files/v2.8/cmake-2.8.11.2.tar.gz
+ $ spack create https://gmplib.org/download/gmp/gmp-6.1.2.tar.bz2
+ ==> This looks like a URL for gmp
+ ==> Found 16 versions of gmp:
+
+ 6.1.2 https://gmplib.org/download/gmp/gmp-6.1.2.tar.bz2
+ 6.1.1 https://gmplib.org/download/gmp/gmp-6.1.1.tar.bz2
+ 6.1.0 https://gmplib.org/download/gmp/gmp-6.1.0.tar.bz2
...
- 2.8.0 http://www.cmake.org/files/v2.8/cmake-2.8.0.tar.gz
+ 5.0.0 https://gmplib.org/download/gmp/gmp-5.0.0.tar.bz2
- Include how many checksums in the package file? (default is 5, q to abort)
+ How many would you like to checksum? (default is 1, q to abort)
Spack will automatically download the number of tarballs you specify
(starting with the most recent) and checksum each of them.
You do not *have* to download all of the versions up front. You can
always choose to download just one tarball initially, and run
-:ref:`spack checksum <spack-checksum>` later if you need more.
-
-.. note::
-
- If ``spack create`` fails to detect the package name correctly,
- you can try supplying it yourself, e.g.::
-
- $ spack create --name cmake http://www.cmake.org/files/v2.8/cmake-2.8.12.1.tar.gz
-
- If it fails entirely, you can get minimal boilerplate by using
- :ref:`spack-edit-f`, or you can manually create a directory and
- ``package.py`` file for the package in ``var/spack/repos/builtin/packages``.
-
-.. note::
-
- Spack can fetch packages from source code repositories, but,
- ``spack create`` will *not* currently create a boilerplate package
- from a repository URL. You will need to use :ref:`spack-edit-f`
- and manually edit the ``version()`` directives to fetch from a
- repo. See :ref:`vcs-fetch` for details.
+:ref:`cmd-spack-checksum` later if you need more versions.
Let's say you download 3 tarballs:
-.. code-block:: sh
-
- Include how many checksums in the package file? (default is 5, q to abort) 3
- ==> Downloading...
- ==> Fetching http://www.cmake.org/files/v2.8/cmake-2.8.12.1.tar.gz
- ###################################################################### 98.6%
- ==> Fetching http://www.cmake.org/files/v2.8/cmake-2.8.12.tar.gz
- ##################################################################### 96.7%
- ==> Fetching http://www.cmake.org/files/v2.8/cmake-2.8.11.2.tar.gz
- #################################################################### 95.2%
-
-Now Spack generates boilerplate code and opens a new ``package.py``
-file in your favorite ``$EDITOR``:
+.. code-block:: console
+
+ How many would you like to checksum? (default is 1, q to abort) 3
+ ==> Downloading...
+ ==> Fetching https://gmplib.org/download/gmp/gmp-6.1.2.tar.bz2
+ ######################################################################## 100.0%
+ ==> Fetching https://gmplib.org/download/gmp/gmp-6.1.1.tar.bz2
+ ######################################################################## 100.0%
+ ==> Fetching https://gmplib.org/download/gmp/gmp-6.1.0.tar.bz2
+ ######################################################################## 100.0%
+ ==> Checksummed 3 versions of gmp:
+ ==> This package looks like it uses the autotools build system
+ ==> Created template for gmp package
+ ==> Created package file: /Users/Adam/spack/var/spack/repos/builtin/packages/gmp/package.py
+
+Spack automatically creates a directory in the appropriate repository,
+generates a boilerplate template for your package, and opens up the new
+``package.py`` in your favorite ``$EDITOR``:
.. code-block:: python
:linenos:
- # FIXME:
- # This is a template package file for Spack. We've conveniently
- # put "FIXME" labels next to all the things you'll want to change.
#
- # Once you've edited all the FIXME's, delete this whole message,
- # save this file, and test out your package like this:
+ # This is a template package file for Spack. We've put "FIXME"
+ # next to all the things you'll want to change. Once you've handled
+ # them, you can save this file and test your package like this:
#
- # spack install cmake
+ # spack install gmp
#
- # You can always get back here to change things with:
+ # You can edit this file again by typing:
#
- # spack edit cmake
+ # spack edit gmp
#
- # See the spack documentation for more information on building
- # packages.
+ # See the Spack documentation for more information on packaging.
+ # If you submit this package back to Spack as a pull request,
+ # please first remove this boilerplate and all FIXME comments.
#
from spack import *
- class Cmake(Package):
- """FIXME: put a proper description of your package here."""
- # FIXME: add a proper url for your package's homepage here.
- homepage = "http://www.example.com"
- url = "http://www.cmake.org/files/v2.8/cmake-2.8.12.1.tar.gz"
-
- version('2.8.12.1', '9d38cd4e2c94c3cea97d0e2924814acc')
- version('2.8.12', '105bc6d21cc2e9b6aff901e43c53afea')
- version('2.8.11.2', '6f5d7b8e7534a5d9e1a7664ba63cf882')
- # FIXME: Add dependencies if this package requires them.
- # depends_on("foo")
+ class Gmp(AutotoolsPackage):
+ """FIXME: Put a proper description of your package here."""
- def install(self, spec, prefix):
- # FIXME: Modify the configure line to suit your build system here.
- configure("--prefix=" + prefix)
+ # FIXME: Add a proper url for your package's homepage here.
+ homepage = "http://www.example.com"
+ url = "https://gmplib.org/download/gmp/gmp-6.1.2.tar.bz2"
- # FIXME: Add logic to build and install here
- make()
- make("install")
+ version('6.1.2', '8ddbb26dc3bd4e2302984debba1406a5')
+ version('6.1.1', '4c175f86e11eb32d8bf9872ca3a8e11d')
+ version('6.1.0', '86ee6e54ebfc4a90b643a65e402c4048')
-The tedious stuff (creating the class, checksumming archives) has been
-done for you.
+ # FIXME: Add dependencies if required.
+ # depends_on('foo')
-In the generated package, the download ``url`` attribute is already
-set. All the things you still need to change are marked with
-``FIXME`` labels. The first ``FIXME`` refers to the commented
-instructions at the top of the file. You can delete these
-instructions after reading them. The rest of them are as follows:
+ def configure_args(self):
+ # FIXME: Add arguments other than --prefix
+ # FIXME: If not needed delete the function
+ args = []
+ return args
- #. Add a description.
+The tedious stuff (creating the class, checksumming archives) has been
+done for you. You'll notice that ``spack create`` correctly detected that
+``gmp`` uses the Autotools build system. It created a new ``Gmp`` package
+that subclasses the ``AutotoolsPackage`` base class. This base class
+provides basic installation methods common to all Autotools packages:
- Immediately inside the package class is a *docstring* in
- triple-quotes (``"""``). It's used to generate the description
- shown when users run ``spack info``.
+.. code-block:: bash
- #. Change the ``homepage`` to a useful URL.
+ ./configure --prefix=/path/to/installation/directory
- The ``homepage`` is displayed when users run ``spack info`` so
- that they can learn about packages.
+ make
+ make check
+ make install
- #. Add ``depends_on()`` calls for the package's dependencies.
+For most Autotools packages, this is sufficient. If you need to add
+additional arguments to the ``./configure`` call, add them via the
+``configure_args`` function.
- ``depends_on`` tells Spack that other packages need to be built
- and installed before this one. See `dependencies_`.
+In the generated package, the download ``url`` attribute is already
+set. All the things you still need to change are marked with
+``FIXME`` labels. You can delete the commented instructions between
+the license and the first import statement after reading them.
+The rest of the tasks you need to do are as follows:
- #. Get the ``install()`` method working.
+#. Add a description.
- The ``install()`` method implements the logic to build a
- package. The code should look familiar; it is designed to look
- like a shell script. Specifics will differ depending on the package,
- and :ref:`implementing the install method <install-method>` is
- covered in detail later.
+ Immediately inside the package class is a *docstring* in
+ triple-quotes (``"""``). It is used to generate the description
+ shown when users run ``spack info``.
-Before going into details, we'll cover a few more basics.
+#. Change the ``homepage`` to a useful URL.
-.. _spack-edit:
+ The ``homepage`` is displayed when users run ``spack info`` so
+ that they can learn more about your package.
-``spack edit``
-~~~~~~~~~~~~~~~~~~~~
+#. Add ``depends_on()`` calls for the package's dependencies.
-One of the easiest ways to learn to write packages is to look at
-existing ones. You can edit a package file by name with the ``spack
-edit`` command:
+ ``depends_on`` tells Spack that other packages need to be built
+ and installed before this one. See :ref:`dependencies`.
-.. code-block:: sh
+#. Get the installation working.
- spack edit cmake
+ Your new package may require specific flags during ``configure``.
+ These can be added via ``configure_args``. Specifics will differ
+ depending on the package and its build system.
+ :ref:`Implementing the install method <install-method>` is
+ covered in detail later.
-So, if you used ``spack create`` to create a package, then saved and
-closed the resulting file, you can get back to it with ``spack edit``.
-The ``cmake`` package actually lives in
-``$SPACK_ROOT/var/spack/repos/builtin/packages/cmake/package.py``, but this provides
-a much simpler shortcut and saves you the trouble of typing the full
-path.
+Passing a URL to ``spack create`` is a convenient and easy way to get
+a basic package template, but what if your software is licensed and
+cannot be downloaded from a URL? You can still create a boilerplate
+``package.py`` by telling ``spack create`` what name you want to use:
-If you try to edit a package that doesn't exist, Spack will recommend
-using ``spack create`` or ``spack edit -f``:
+.. code-block:: console
-.. code-block:: sh
+ $ spack create --name intel
- $ spack edit foo
- ==> Error: No package 'foo'. Use spack create, or supply -f/--force to edit a new file.
+This will create a simple ``intel`` package with an ``install()``
+method that you can craft to install your package.
-.. _spack-edit-f:
+What if ``spack create <url>`` guessed the wrong name or build system?
+For example, if your package uses the Autotools build system but does
+not come with a ``configure`` script, Spack won't realize it uses
+Autotools. You can overwrite the old package with ``--force`` and specify
+a name with ``--name`` or a build system template to use with ``--template``:
-``spack edit -f``
-~~~~~~~~~~~~~~~~~~~~
+.. code-block:: console
-``spack edit -f`` can be used to create a new, minimal boilerplate
-package:
+ $ spack create --name gmp https://gmplib.org/download/gmp/gmp-6.1.2.tar.bz2
+ $ spack create --force --template autotools https://gmplib.org/download/gmp/gmp-6.1.2.tar.bz2
-.. code-block:: sh
+.. note::
- $ spack edit -f foo
+ If you are creating a package that uses the Autotools build system
+ but does not come with a ``configure`` script, you'll need to add an
+ ``autoreconf`` method to your package that explains how to generate
+ the ``configure`` script. You may also need the following dependencies:
-Unlike ``spack create``, which infers names and versions, and which
-actually downloads the tarball and checksums it for you, ``spack edit
--f`` has no such fanciness. It will substitute dummy values for you
-to fill in yourself:
+ .. code-block:: python
-.. code-block:: python
- :linenos:
+ depends_on('autoconf', type='build')
+ depends_on('automake', type='build')
+ depends_on('libtool', type='build')
+ depends_on('m4', type='build')
- from spack import *
+A complete list of available build system templates can be found by running
+``spack create --help``.
- class Foo(Package):
- """Description"""
+.. _cmd-spack-edit:
- homepage = "http://www.example.com"
- url = "http://www.example.com/foo-1.0.tar.gz"
+^^^^^^^^^^^^^^
+``spack edit``
+^^^^^^^^^^^^^^
- version('1.0', '0123456789abcdef0123456789abcdef')
+One of the easiest ways to learn how to write packages is to look at
+existing ones. You can edit a package file by name with the ``spack
+edit`` command:
- def install(self, spec, prefix):
- configure("--prefix=" + prefix)
- make()
- make("install")
+.. code-block:: console
-This is useful when ``spack create`` cannot figure out the name and
-version of your package from the archive URL.
+ $ spack edit gmp
+So, if you used ``spack create`` to create a package, then saved and
+closed the resulting file, you can get back to it with ``spack edit``.
+The ``gmp`` package actually lives in
+``$SPACK_ROOT/var/spack/repos/builtin/packages/gmp/package.py``,
+but ``spack edit`` provides a much simpler shortcut and saves you the
+trouble of typing the full path.
+----------------------------
Naming & directory structure
---------------------------------------
-
-.. note::
-
- Spack's default naming and directory structure will change in
- version 0.9. Specifically, 0.9 will stop using directory names
- with special characters like ``@``, to avoid interfering with
- certain packages that do not handle this well.
+----------------------------
This section describes how packages need to be named, and where they
-live in Spack's directory structure. In general, `spack-create`_ and
-`spack-edit`_ handle creating package files for you, so you can skip
-most of the details here.
+live in Spack's directory structure. In general, :ref:`cmd-spack-create`
+handles creating package files for you, so you can skip most of the
+details here.
+^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
``var/spack/repos/builtin/packages``
-~~~~~~~~~~~~~~~~~~~~~~~
+^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
A Spack installation directory is structured like a standard UNIX
install prefix (``bin``, ``lib``, ``include``, ``var``, ``opt``,
@@ -280,27 +274,29 @@ Packages themselves live in ``$SPACK_ROOT/var/spack/repos/builtin/packages``.
If you ``cd`` to that directory, you will see directories for each
package:
-.. command-output:: cd $SPACK_ROOT/var/spack/repos/builtin/packages; ls -CF
+.. command-output:: cd $SPACK_ROOT/var/spack/repos/builtin/packages && ls
:shell:
:ellipsis: 10
Each directory contains a file called ``package.py``, which is where
all the python code for the package goes. For example, the ``libelf``
-package lives in::
+package lives in:
+
+.. code-block:: none
$SPACK_ROOT/var/spack/repos/builtin/packages/libelf/package.py
Alongside the ``package.py`` file, a package may contain extra
directories or files (like patches) that it needs to build.
-
+^^^^^^^^^^^^^
Package Names
-~~~~~~~~~~~~~~~~~~
+^^^^^^^^^^^^^
-Packages are named after the directory containing ``package.py``. So,
+Packages are named after the directory containing ``package.py``. So,
``libelf``'s ``package.py`` lives in a directory called ``libelf``.
The ``package.py`` file defines a class called ``Libelf``, which
-extends Spack's ``Package`` class. for example, here is
+extends Spack's ``Package`` class. For example, here is
``$SPACK_ROOT/var/spack/repos/builtin/packages/libelf/package.py``:
.. code-block:: python
@@ -322,49 +318,176 @@ The **directory name** (``libelf``) determines the package name that
users should provide on the command line. e.g., if you type any of
these:
-.. code-block:: sh
+.. code-block:: console
- $ spack install libelf
+ $ spack info libelf
+ $ spack versions libelf
$ spack install libelf@0.8.13
Spack sees the package name in the spec and looks for
-``libelf/package.py`` in ``var/spack/repos/builtin/packages``. Likewise, if you say
-``spack install py-numpy``, then Spack looks for
+``libelf/package.py`` in ``var/spack/repos/builtin/packages``.
+Likewise, if you run ``spack install py-numpy``, Spack looks for
``py-numpy/package.py``.
Spack uses the directory name as the package name in order to give
-packagers more freedom in naming their packages. Package names can
-contain letters, numbers, dashes, and underscores. Using a Python
-identifier (e.g., a class name or a module name) would make it
-difficult to support these options. So, you can name a package
-``3proxy`` or ``_foo`` and Spack won't care. It just needs to see
-that name in the package spec.
-
+packagers more freedom in naming their packages. Package names can
+contain letters, numbers, and dashes. Using a Python identifier
+(e.g., a class name or a module name) would make it difficult to
+support these options. So, you can name a package ``3proxy`` or
+``foo-bar`` and Spack won't care. It just needs to see that name
+in the packages directory.
+
+^^^^^^^^^^^^^^^^^^^
Package class names
-~~~~~~~~~~~~~~~~~~~~~~~
+^^^^^^^^^^^^^^^^^^^
Spack loads ``package.py`` files dynamically, and it needs to find a
special class name in the file for the load to succeed. The **class
name** (``Libelf`` in our example) is formed by converting words
-separated by `-` or ``_`` in the file name to camel case. If the name
+separated by ``-`` in the file name to CamelCase. If the name
starts with a number, we prefix the class name with ``_``. Here are
some examples:
================= =================
Module Name Class Name
================= =================
- ``foo_bar`` ``FooBar``
- ``docbook-xml`` ``DocbookXml``
- ``FooBar`` ``Foobar``
+ ``foo-bar`` ``FooBar``
``3proxy`` ``_3proxy``
================= =================
In general, you won't have to remember this naming convention because
-`spack-create`_ and `spack-edit`_ handle the details for you.
+:ref:`cmd-spack-create` and :ref:`cmd-spack-edit` handle the details for you.
+
+-----------------
+Trusted Downloads
+-----------------
+
+Spack verifies that the source code it downloads is not corrupted or
+compromised; or at least, that it is the same version the author of
+the Spack package saw when the package was created. If Spack uses a
+download method it can verify, we say the download method is
+*trusted*. Trust is important for *all downloads*: Spack
+has no control over the security of the various sites from which it
+downloads source code, and can never assume that any particular site
+hasn't been compromised.
+
+Trust is established in different ways for different download methods.
+For the most common download method --- a single-file tarball --- the
+tarball is checksummed. Git downloads using ``commit=`` are trusted
+implicitly, as long as a hash is specified.
+
+Spack also provides untrusted download methods: tarball URLs may be
+supplied without a checksum, or Git downloads may specify a branch or
+tag instead of a hash. If the user does not control or trust the
+source of an untrusted download, it is a security risk. Unless otherwise
+specified by the user for special cases, Spack should by default use
+*only* trusted download methods.
+
+Unfortunately, Spack does not currently provide that guarantee. It
+does provide the following mechanisms for safety:
+
+#. By default, Spack will only install a tarball package if it has a
+ checksum and that checksum matches. You can override this with
+ ``spack install --no-checksum``.
+
+#. Numeric versions are almost always tarball downloads, whereas
+ non-numeric versions not named ``develop`` frequently download
+ untrusted branches or tags from a version control system. As long
+ as a package has at least one numeric version, and no non-numeric
+ version named ``develop``, Spack will prefer it over any
+ non-numeric versions.
+
+^^^^^^^^^
+Checksums
+^^^^^^^^^
+
+For tarball downloads, Spack can currently support checksums using the
+MD5, SHA-1, SHA-224, SHA-256, SHA-384, and SHA-512 algorithms. It
+determines the algorithm to use based on the hash length.
+-----------------------
+Package Version Numbers
+-----------------------
+Most Spack versions are numeric, a tuple of integers; for example,
+``apex@0.1``, ``ferret@6.96`` or ``py-netcdf@1.2.3.1``. Spack knows
+how to compare and sort numeric versions.
+
+Some Spack versions involve slight extensions of numeric syntax; for
+example, ``py-sphinx-rtd-theme@0.1.10a0``. In this case, numbers are
+always considered to be "newer" than letters. This is for consistency
+with `RPM <https://bugzilla.redhat.com/show_bug.cgi?id=50977>`_.
+
+Spack versions may also be arbitrary non-numeric strings; any string
+here will suffice; for example, ``@develop``, ``@master``, ``@local``.
+The following rules determine the sort order of numeric
+vs. non-numeric versions:
+
+#. The non-numeric versions ``@develop`` is considered greatest (newest).
+
+#. Numeric versions are all less than ``@develop`` version, and are
+ sorted numerically.
+
+#. All other non-numeric versions are less than numeric versions, and
+ are sorted alphabetically.
+
+The logic behind this sort order is two-fold:
+
+#. Non-numeric versions are usually used for special cases while
+ developing or debugging a piece of software. Keeping most of them
+ less than numeric versions ensures that Spack choose numeric
+ versions by default whenever possible.
+
+#. The most-recent development version of a package will usually be
+ newer than any released numeric versions. This allows the
+ ``develop`` version to satisfy dependencies like ``depends_on(abc,
+ when="@x.y.z:")``
+
+^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+Concretization Version Selection
+^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+When concretizing, many versions might match a user-supplied spec.
+For example, the spec ``python`` matches all available versions of the
+package ``python``. Similarly, ``python@3:`` matches all versions of
+Python3. Given a set of versions that match a spec, Spack
+concretization uses the following priorities to decide which one to
+use:
+
+#. If the user provided a list of versions in ``packages.yaml``, the
+ first matching version in that list will be used.
+
+#. If one or more versions is specified as ``preferred=True``, in
+ either ``packages.yaml`` or ``package.py``, the largest matching
+ version will be used. ("Latest" is defined by the sort order
+ above).
+
+#. If no preferences in particular are specified in the package or in
+ ``packages.yaml``, then the largest matching non-develop version
+ will be used. By avoiding ``@develop``, this prevents users from
+ accidentally installing a ``@develop`` version.
+
+#. If all else fails and ``@develop`` is the only matching version, it
+ will be used.
+
+^^^^^^^^^^^^^
+Date Versions
+^^^^^^^^^^^^^
+
+If you wish to use dates as versions, it is best to use the format
+``@date-yyyy-mm-dd``. This will ensure they sort in the correct
+order. If you want your date versions to be numeric (assuming they
+don't conflict with other numeric versions), you can use just
+``yyyy.mm.dd``.
+
+Alternately, you might use a hybrid release-version / date scheme.
+For example, ``@1.3.2016.08.31`` would mean the version from the
+``1.3`` branch, as of August 31, 2016.
+
+
+-------------------
Adding new versions
-------------------------
+-------------------
The most straightforward way to add new versions to your package is to
add a line like this in the package class:
@@ -377,32 +500,75 @@ add a line like this in the package class:
version('8.2.1', '4136d7b4c04df68b686570afa26988ac')
...
+Versions should be listed with the newest version first.
+
+^^^^^^^^^^^^
Version URLs
-~~~~~~~~~~~~~~~~~
+^^^^^^^^^^^^
By default, each version's URL is extrapolated from the ``url`` field
in the package. For example, Spack is smart enough to download
version ``8.2.1.`` of the ``Foo`` package above from
``http://example.com/foo-8.2.1.tar.gz``.
-If spack *cannot* extrapolate the URL from the ``url`` field, or if
-the package doesn't have a ``url`` field, you can add a URL explicitly
-for a particular version:
+If the URL is particularly complicated or changes based on the release,
+you can override the default URL generation algorithm by defining your
+own ``url_for_version()`` function. For example, the developers of HDF5
+keep changing the archive layout, so the ``url_for_version()`` function
+looks like:
+
+.. literalinclude:: ../../../var/spack/repos/builtin/packages/hdf5/package.py
+ :pyobject: Hdf5.url_for_version
+
+With the use of this ``url_for_version()``, Spack knows to download HDF5 ``1.8.16``
+from ``http://www.hdfgroup.org/ftp/HDF5/releases/hdf5-1.8.16/src/hdf5-1.8.16.tar.gz``
+but download HDF5 ``1.10.0`` from ``http://www.hdfgroup.org/ftp/HDF5/releases/hdf5-1.10/hdf5-1.10.0/src/hdf5-1.10.0.tar.gz``.
+
+You'll notice that HDF5's ``url_for_version()`` function makes use of a special
+``Version`` function called ``up_to()``. When you call ``version.up_to(2)`` on a
+version like ``1.10.0``, it returns ``1.10``. ``version.up_to(1)`` would return
+``1``. This can be very useful for packages that place all ``X.Y.*`` versions in
+a single directory and then places all ``X.Y.Z`` versions in a subdirectory.
+
+There are a few ``Version`` properties you should be aware of. We generally
+prefer numeric versions to be separated by dots for uniformity, but not all
+tarballs are named that way. For example, ``icu4c`` separates its major and minor
+versions with underscores, like ``icu4c-57_1-src.tgz``. The value ``57_1`` can be
+obtained with the use of the ``version.underscored`` property. Note that Python
+properties don't need parentheses. There are other separator properties as well:
+
+=================== ======
+Property Result
+=================== ======
+version.dotted 1.2.3
+version.dashed 1-2-3
+version.underscored 1_2_3
+version.joined 123
+=================== ======
+
+.. note::
+
+ Python properties don't need parentheses. ``version.dashed`` is correct.
+ ``version.dashed()`` is incorrect.
+
+If a URL cannot be derived systematically, or there is a special URL for one
+of its versions, you can add an explicit URL for a particular version:
.. code-block:: python
version('8.2.1', '4136d7b4c04df68b686570afa26988ac',
url='http://example.com/foo-8.2.1-special-version.tar.gz')
-For the URL above, you might have to add an explicit URL because the
-version can't simply be substituted in the original ``url`` to
-construct the new one for ``8.2.1``.
+This is common for Python packages that download from PyPi. Since newer
+download URLs often contain a unique hash for each version, there is no
+way to guess the URL systematically.
When you supply a custom URL for a version, Spack uses that URL
*verbatim* and does not perform extrapolation.
+^^^^^^^^^^^^^^^^^^^^^^^^
Skipping the expand step
-~~~~~~~~~~~~~~~~~~~~~~~~~~
+^^^^^^^^^^^^^^^^^^^^^^^^
Spack normally expands archives automatically after downloading
them. If you want to skip this step (e.g., for self-extracting
@@ -412,7 +578,7 @@ executables and other custom archive types), you can add
.. code-block:: python
version('8.2.1', '4136d7b4c04df68b686570afa26988ac',
- url='http://example.com/foo-8.2.1-special-version.tar.gz', 'expand=False')
+ url='http://example.com/foo-8.2.1-special-version.tar.gz', expand=False)
When ``expand`` is set to ``False``, Spack sets the current working
directory to the directory containing the downloaded archive before it
@@ -430,46 +596,47 @@ it executable, then runs it with some arguments.
installer = Executable(self.stage.archive_file)
installer('--prefix=%s' % prefix, 'arg1', 'arg2', 'etc.')
-Checksums
-~~~~~~~~~~~~~~~~~
-Spack uses a checksum to ensure that the downloaded package version is
-not corrupted or compromised. This is especially important when
-fetching from insecure sources, like unencrypted http. By default, a
-package will *not* be installed if it doesn't pass a checksum test
-(though you can override this with ``spack install --no-checksum``).
+^^^^^^^^^^^^^
+``spack md5``
+^^^^^^^^^^^^^
-Spack can currently support checksums using the MD5, SHA-1, SHA-224,
-SHA-256, SHA-384, and SHA-512 algorithms. It determines the algorithm
-to use based on the hash length.
+If you have one or more files to checksum, you can use the ``spack md5``
+command to do it:
-``spack md5``
-^^^^^^^^^^^^^^^^^^^^^^
+.. code-block:: console
-If you have a single file to checksum, you can use the ``spack md5``
-command to do it. Here's how you might download an archive and get a
-checksum for it:
+ $ spack md5 foo-8.2.1.tar.gz foo-8.2.2.tar.gz
+ ==> 2 MD5 checksums:
+ 4136d7b4c04df68b686570afa26988ac foo-8.2.1.tar.gz
+ 1586b70a49dfe05da5fcc29ef239dce0 foo-8.2.2.tar.gz
-.. code-block:: sh
+``spack md5`` also accepts one or more URLs and automatically downloads
+the files for you:
- $ curl -O http://exmaple.com/foo-8.2.1.tar.gz'
- $ spack md5 foo-8.2.1.tar.gz
+.. code-block:: console
+
+ $ spack md5 http://example.com/foo-8.2.1.tar.gz
+ ==> Trying to fetch from http://example.com/foo-8.2.1.tar.gz
+ ######################################################################## 100.0%
+ ==> 1 MD5 checksum:
4136d7b4c04df68b686570afa26988ac foo-8.2.1.tar.gz
Doing this for lots of files, or whenever a new package version is
released, is tedious. See ``spack checksum`` below for an automated
version of this process.
-.. _spack-checksum:
+.. _cmd-spack-checksum:
+^^^^^^^^^^^^^^^^^^
``spack checksum``
-^^^^^^^^^^^^^^^^^^^^^^
+^^^^^^^^^^^^^^^^^^
If you want to add new versions to a package you've already created,
this is automated with the ``spack checksum`` command. Here's an
example for ``libelf``:
-.. code-block:: sh
+.. code-block:: console
$ spack checksum libelf
==> Found 16 versions of libelf.
@@ -493,7 +660,7 @@ they're released). It fetches the tarballs you ask for and prints out
a list of ``version`` commands ready to copy/paste into your package
file:
-.. code-block:: sh
+.. code-block:: console
==> Checksummed new versions of libelf:
version('0.8.13', '4136d7b4c04df68b686570afa26988ac')
@@ -526,8 +693,9 @@ versions. See the documentation on `attribute_list_url`_ and
.. _vcs-fetch:
+------------------------------
Fetching from VCS repositories
---------------------------------------
+------------------------------
For some packages, source code is provided in a Version Control System
(VCS) repository rather than in a tarball. Spack can fetch packages
@@ -540,38 +708,19 @@ call to your package with parameters indicating the repository URL and
any branch, tag, or revision to fetch. See below for the parameters
you'll need for each VCS system.
-Repositories and versions
-~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
-
-The package author is responsible for coming up with a sensible name
-for each version to be fetched from a repository. For example, if
-you're fetching from a tag like ``v1.0``, you might call that ``1.0``.
-If you're fetching a nameless git commit or an older subversion
-revision, you might give the commit an intuitive name, like ``dev``
-for a development version, or ``some-fancy-new-feature`` if you want
-to be more specific.
-
-In general, it's recommended to fetch tags or particular
-commits/revisions, NOT branches or the repository mainline, as
-branches move forward over time and you aren't guaranteed to get the
-same thing every time you fetch a particular version. Life isn't
-always simple, though, so this is not strictly enforced.
-
-In some future release, Spack may support extrapolating repository
-versions as it does for tarball URLs, but currently this is not
-supported.
-
.. _git-fetch:
+^^^
Git
-~~~~~~~~~~~~~~~~~~~~
+^^^
Git fetching is enabled with the following parameters to ``version``:
- * ``git``: URL of the git repository.
- * ``tag``: name of a tag to fetch.
- * ``branch``: name of a branch to fetch.
- * ``commit``: SHA hash (or prefix) of a commit to fetch.
+* ``git``: URL of the git repository.
+* ``tag``: name of a tag to fetch.
+* ``branch``: name of a branch to fetch.
+* ``commit``: SHA hash (or prefix) of a commit to fetch.
+* ``submodules``: Also fetch submodules when checking out this repository.
Only one of ``tag``, ``branch``, or ``commit`` can be used at a time.
@@ -582,10 +731,9 @@ Default branch
class Example(Package):
...
- version('dev', git='https://github.com/example-project/example.git')
+ version('develop', git='https://github.com/example-project/example.git')
- This is not recommended, as the contents of the default branch
- change over time.
+ This download method is untrusted, and is not recommended.
Tags
To fetch from a particular tag, use the ``tag`` parameter along with
@@ -596,6 +744,8 @@ Tags
version('1.0.1', git='https://github.com/example-project/example.git',
tag='v1.0.1')
+ This download method is untrusted, and is not recommended.
+
Branches
To fetch a particular branch, use ``branch`` instead:
@@ -604,8 +754,7 @@ Branches
version('experimental', git='https://github.com/example-project/example.git',
branch='experimental')
- This is not recommended, as the contents of branches change over
- time.
+ This download method is untrusted, and is not recommended.
Commits
Finally, to fetch a particular commit, use ``commit``:
@@ -615,7 +764,7 @@ Commits
version('2014-10-08', git='https://github.com/example-project/example.git',
commit='9d38cd4e2c94c3cea97d0e2924814acc')
- This doesn't have to be a full hash; You can abbreviate it as you'd
+ This doesn't have to be a full hash; you can abbreviate it as you'd
expect with git:
.. code-block:: python
@@ -623,29 +772,48 @@ Commits
version('2014-10-08', git='https://github.com/example-project/example.git',
commit='9d38cd')
+ This download method *is trusted*. It is the recommended way to
+ securely download from a Git repository.
+
It may be useful to provide a saner version for commits like this,
e.g. you might use the date as the version, as done above. Or you
could just use the abbreviated commit hash. It's up to the package
author to decide what makes the most sense.
-Installing
-^^^^^^^^^^^^^^
+Submodules
-You can fetch and install any of the versions above as you'd expect,
-by using ``@<version>`` in a spec:
+ You can supply ``submodules=True`` to cause Spack to fetch submodules
+ along with the repository at fetch time.
-.. code-block:: sh
+ .. code-block:: python
+
+ version('1.0.1', git='https://github.com/example-project/example.git',
+ tag='v1.0.1', submdoules=True)
- spack install example@2014-10-08
-Git and other VCS versions will show up in the list of versions when
-a user runs ``spack info <package name>``.
+.. _github-fetch:
+""""""
+GitHub
+""""""
+
+If a project is hosted on GitHub, *any* valid Git branch, tag or hash
+may be downloaded as a tarball. This is accomplished simply by
+constructing an appropriate URL. Spack can checksum any package
+downloaded this way, thereby producing a trusted download. For
+example, the following downloads a particular hash, and then applies a
+checksum.
+
+.. code-block:: python
+
+ version('1.9.5.1.1', 'd035e4bc704d136db79b43ab371b27d2',
+ url='https://www.github.com/jswhit/pyproj/tarball/0be612cc9f972e38b50a90c946a9b353e2ab140f')
.. _hg-fetch:
+^^^^^^^^^
Mercurial
-~~~~~~~~~~~~~~~~~~~~~~~~~
+^^^^^^^^^
Fetching with mercurial works much like `git <git-fetch>`_, but you
use the ``hg`` parameter.
@@ -655,10 +823,9 @@ Default
.. code-block:: python
- version('hg-head', hg='https://jay.grs.rwth-aachen.de/hg/example')
+ version('develop', hg='https://jay.grs.rwth-aachen.de/hg/example')
- Note that this is not recommended; try to fetch a particular
- revision instead.
+ This download method is untrusted, and is not recommended.
Revisions
Add ``hg`` and ``revision`` parameters:
@@ -668,6 +835,8 @@ Revisions
version('1.0', hg='https://jay.grs.rwth-aachen.de/hg/example',
revision='v1.0')
+ This download method is untrusted, and is not recommended.
+
Unlike ``git``, which has special parameters for different types of
revisions, you can use ``revision`` for branches, tags, and commits
when you fetch with Mercurial.
@@ -677,8 +846,9 @@ example@<version>`` command-line syntax.
.. _svn-fetch:
+^^^^^^^^^^
Subversion
-~~~~~~~~~~~~~~~~~~~~~~~~~~
+^^^^^^^^^^
To fetch with subversion, use the ``svn`` and ``revision`` parameters:
@@ -687,9 +857,9 @@ Fetching the head
.. code-block:: python
- version('svn-head', svn='https://outreach.scidac.gov/svn/libmonitor/trunk')
+ version('develop', svn='https://outreach.scidac.gov/svn/libmonitor/trunk')
- This is not recommended, as the head will move forward over time.
+ This download method is untrusted, and is not recommended.
Fetching a revision
To fetch a particular revision, add a ``revision`` to the
@@ -697,51 +867,118 @@ Fetching a revision
.. code-block:: python
- version('svn-head', svn='https://outreach.scidac.gov/svn/libmonitor/trunk',
+ version('develop', svn='https://outreach.scidac.gov/svn/libmonitor/trunk',
revision=128)
+ This download method is untrusted, and is not recommended.
+
Subversion branches are handled as part of the directory structure, so
you can check out a branch or tag by changing the ``url``.
+-----------------------------------------
+Standard repositories for python packages
+-----------------------------------------
+
+In addition to their developer websites, many python packages are hosted at the
+`Python Package Index (PyPi) <https://pypi.python.org/pypi>`_. Although links to
+these individual files are typically `generated using a hash
+<https://bitbucket.org/pypa/pypi/issues/438>`_ it is often possible to find a
+reliable link of the format
+
+.. code-block:: sh
+
+ https://pypi.python.org/packages/source/<first letter of package>/<package>/<package>-<version>.<extension>
+
+Packages hosted on GitHub and the like are often developer versions that do not
+contain all of the files (e.g. configuration scripts) necessary to support
+compilation. For this reason it is ideal to link to a repository such as PyPi
+if possible.
+
+More recently, sources are being indexed at `pypi.io <https://pypi.io>`_ as
+well. Links obtained from this site follow a similar pattern, namely
+
+.. code-block:: sh
+
+ https://pypi.io/packages/source/<first letter of package>/<package>/<package>-<version>.<extension>
+
+These links currently redirect back to `pypi.python.org
+<https://pypi.python.org>`_, but this `may change in the future
+<https://bitbucket.org/pypa/pypi/issues/438#comment-27243225>`_.
+
+-------------------------------------------------
+Expanding additional resources in the source tree
+-------------------------------------------------
+
+Some packages (most notably compilers) provide optional features if additional
+resources are expanded within their source tree before building. In Spack it is
+possible to describe such a need with the ``resource`` directive :
+
+ .. code-block:: python
+
+ resource(
+ name='cargo',
+ git='https://github.com/rust-lang/cargo.git',
+ tag='0.10.0',
+ destination='cargo'
+ )
+
+Based on the keywords present among the arguments the appropriate ``FetchStrategy``
+will be used for the resource. The keyword ``destination`` is relative to the source
+root of the package and should point to where the resource is to be expanded.
+
+------------------------------------------------------
+Automatic caching of files fetched during installation
+------------------------------------------------------
+
+Spack maintains a cache (described :ref:`here <caching>`) which saves files
+retrieved during package installations to avoid re-downloading in the case that
+a package is installed with a different specification (but the same version) or
+reinstalled on account of a change in the hashing scheme.
.. _license:
+-----------------
Licensed software
-------------------------------------------
+-----------------
In order to install licensed software, Spack needs to know a few more
details about a package. The following class attributes should be defined.
+^^^^^^^^^^^^^^^^^^^^
``license_required``
-~~~~~~~~~~~~~~~~~~~~~
+^^^^^^^^^^^^^^^^^^^^
Boolean. If set to ``True``, this software requires a license. If set to
``False``, all of the following attributes will be ignored. Defaults to
``False``.
+^^^^^^^^^^^^^^^^^^^
``license_comment``
-~~~~~~~~~~~~~~~~~~~~~
+^^^^^^^^^^^^^^^^^^^
String. Contains the symbol used by the license manager to denote a comment.
Defaults to ``#``.
+^^^^^^^^^^^^^^^^^
``license_files``
-~~~~~~~~~~~~~~~~~~~~~
+^^^^^^^^^^^^^^^^^
List of strings. These are files that the software searches for when
looking for a license. All file paths must be relative to the installation
directory. More complex packages like Intel may require multiple
licenses for individual components. Defaults to the empty list.
+^^^^^^^^^^^^^^^^
``license_vars``
-~~~~~~~~~~~~~~~~~~~~~
+^^^^^^^^^^^^^^^^
List of strings. Environment variables that can be set to tell the software
where to look for a license if it is not in the usual location. Defaults
to the empty list.
+^^^^^^^^^^^^^^^
``license_url``
-~~~~~~~~~~~~~~~~~~~~~
+^^^^^^^^^^^^^^^
String. A URL pointing to license setup instructions for the software.
Defaults to the empty string.
@@ -750,12 +987,12 @@ For example, let's take a look at the package for the PGI compilers.
.. code-block:: python
- # Licensing
- license_required = True
- license_comment = '#'
- license_files = ['license.dat']
- license_vars = ['PGROUPD_LICENSE_FILE', 'LM_LICENSE_FILE']
- license_url = 'http://www.pgroup.com/doc/pgiinstall.pdf'
+ # Licensing
+ license_required = True
+ license_comment = '#'
+ license_files = ['license.dat']
+ license_vars = ['PGROUPD_LICENSE_FILE', 'LM_LICENSE_FILE']
+ license_url = 'http://www.pgroup.com/doc/pgiinstall.pdf'
As you can see, PGI requires a license. Its license manager, FlexNet, uses
the ``#`` symbol to denote a comment. It expects the license file to be
@@ -776,41 +1013,41 @@ Spack will create a global license file located at
file using the editor set in ``$EDITOR``, or vi if unset. It will look like
this:
-.. code-block::
-
- # A license is required to use pgi.
- #
- # The recommended solution is to store your license key in this global
- # license file. After installation, the following symlink(s) will be
- # added to point to this file (relative to the installation prefix):
- #
- # license.dat
- #
- # Alternatively, use one of the following environment variable(s):
- #
- # PGROUPD_LICENSE_FILE
- # LM_LICENSE_FILE
- #
- # If you choose to store your license in a non-standard location, you may
- # set one of these variable(s) to the full pathname to the license file, or
- # port@host if you store your license keys on a dedicated license server.
- # You will likely want to set this variable in a module file so that it
- # gets loaded every time someone tries to use pgi.
- #
- # For further information on how to acquire a license, please refer to:
- #
- # http://www.pgroup.com/doc/pgiinstall.pdf
- #
- # You may enter your license below.
+.. code-block:: sh
+
+ # A license is required to use pgi.
+ #
+ # The recommended solution is to store your license key in this global
+ # license file. After installation, the following symlink(s) will be
+ # added to point to this file (relative to the installation prefix):
+ #
+ # license.dat
+ #
+ # Alternatively, use one of the following environment variable(s):
+ #
+ # PGROUPD_LICENSE_FILE
+ # LM_LICENSE_FILE
+ #
+ # If you choose to store your license in a non-standard location, you may
+ # set one of these variable(s) to the full pathname to the license file, or
+ # port@host if you store your license keys on a dedicated license server.
+ # You will likely want to set this variable in a module file so that it
+ # gets loaded every time someone tries to use pgi.
+ #
+ # For further information on how to acquire a license, please refer to:
+ #
+ # http://www.pgroup.com/doc/pgiinstall.pdf
+ #
+ # You may enter your license below.
You can add your license directly to this file, or tell FlexNet to use a
license stored on a separate license server. Here is an example that
points to a license server called licman1:
-.. code-block::
+.. code-block:: none
- SERVER licman1.mcs.anl.gov 00163eb7fba5 27200
- USE_SERVER
+ SERVER licman1.mcs.anl.gov 00163eb7fba5 27200
+ USE_SERVER
If your package requires the license to install, you can reference the
location of this global license using ``self.global_license_file``.
@@ -826,8 +1063,9 @@ documentation.
.. _patching:
+-------
Patches
-------------------------------------------
+-------
Depending on the host architecture, package version, known bugs, or
other issues, you may need to patch your software to get it to build
@@ -835,11 +1073,12 @@ correctly. Like many other package systems, spack allows you to store
patches alongside your package files and apply them to source code
after it's downloaded.
+^^^^^^^^^
``patch``
-~~~~~~~~~~~~~~~~~~~~~
+^^^^^^^^^
You can specify patches in your package file with the ``patch()``
-function. ``patch`` looks like this:
+directive. ``patch`` looks like this:
.. code-block:: python
@@ -851,7 +1090,9 @@ The first argument can be either a URL or a filename. It specifies a
patch file that should be applied to your source. If the patch you
supply is a filename, then the patch needs to live within the spack
source tree. For example, the patch above lives in a directory
-structure like this::
+structure like this:
+
+.. code-block:: none
$SPACK_ROOT/var/spack/repos/builtin/packages/
mvapich2/
@@ -872,52 +1113,59 @@ from the URL and then applied to your source code.
``patch`` can take two options keyword arguments. They are:
+""""""""
``when``
- If supplied, this is a spec that tells spack when to apply
- the patch. If the installed package spec matches this spec, the
- patch will be applied. In our example above, the patch is applied
- when mvapich is at version ``1.9`` or higher.
+""""""""
+
+If supplied, this is a spec that tells spack when to apply
+the patch. If the installed package spec matches this spec, the
+patch will be applied. In our example above, the patch is applied
+when mvapich is at version ``1.9`` or higher.
+"""""""""
``level``
- This tells spack how to run the ``patch`` command. By default,
- the level is 1 and spack runs ``patch -p1``. If level is 2,
- spack will run ``patch -p2``, and so on.
-
- A lot of people are confused by level, so here's a primer. If you
- look in your patch file, you may see something like this:
-
- .. code-block:: diff
- :linenos:
-
- --- a/src/mpi/romio/adio/ad_lustre/ad_lustre_rwcontig.c 2013-12-10 12:05:44.806417000 -0800
- +++ b/src/mpi/romio/adio/ad_lustre/ad_lustre_rwcontig.c 2013-12-10 11:53:03.295622000 -0800
- @@ -8,7 +8,7 @@
- * Copyright (C) 2008 Sun Microsystems, Lustre group
- */
-
- -#define _XOPEN_SOURCE 600
- +//#define _XOPEN_SOURCE 600
- #include <stdlib.h>
- #include <malloc.h>
- #include "ad_lustre.h"
-
- Lines 1-2 show paths with synthetic ``a/`` and ``b/`` prefixes. These
- are placeholders for the two ``mvapich2`` source directories that
- ``diff`` compared when it created the patch file. This is git's
- default behavior when creating patch files, but other programs may
- behave differently.
-
- ``-p1`` strips off the first level of the prefix in both paths,
- allowing the patch to be applied from the root of an expanded mvapich2
- archive. If you set level to ``2``, it would strip off ``src``, and
- so on.
-
- It's generally easier to just structure your patch file so that it
- applies cleanly with ``-p1``, but if you're using a patch you didn't
- create yourself, ``level`` can be handy.
-
-``patch()`` functions
-~~~~~~~~~~~~~~~~~~~~~~~~
+"""""""""
+
+This tells spack how to run the ``patch`` command. By default,
+the level is 1 and spack runs ``patch -p 1``. If level is 2,
+spack will run ``patch -p 2``, and so on.
+
+A lot of people are confused by level, so here's a primer. If you
+look in your patch file, you may see something like this:
+
+.. code-block:: diff
+ :linenos:
+
+ --- a/src/mpi/romio/adio/ad_lustre/ad_lustre_rwcontig.c 2013-12-10 12:05:44.806417000 -0800
+ +++ b/src/mpi/romio/adio/ad_lustre/ad_lustre_rwcontig.c 2013-12-10 11:53:03.295622000 -0800
+ @@ -8,7 +8,7 @@
+ * Copyright (C) 2008 Sun Microsystems, Lustre group
+ \*/
+
+ -#define _XOPEN_SOURCE 600
+ +//#define _XOPEN_SOURCE 600
+ #include <stdlib.h>
+ #include <malloc.h>
+ #include "ad_lustre.h"
+
+Lines 1-2 show paths with synthetic ``a/`` and ``b/`` prefixes. These
+are placeholders for the two ``mvapich2`` source directories that
+``diff`` compared when it created the patch file. This is git's
+default behavior when creating patch files, but other programs may
+behave differently.
+
+``-p1`` strips off the first level of the prefix in both paths,
+allowing the patch to be applied from the root of an expanded mvapich2
+archive. If you set level to ``2``, it would strip off ``src``, and
+so on.
+
+It's generally easier to just structure your patch file so that it
+applies cleanly with ``-p1``, but if you're using a patch you didn't
+create yourself, ``level`` can be handy.
+
+^^^^^^^^^^^^^^^^^^^^^
+Patch functions
+^^^^^^^^^^^^^^^^^^^^^
In addition to supplying patch files, you can write a custom function
to patch a package's source. For example, the ``py-pyside`` package
@@ -926,35 +1174,10 @@ handles ``RPATH``:
.. _pyside-patch:
-.. code-block:: python
+.. literalinclude:: ../../../var/spack/repos/builtin/packages/py-pyside/package.py
+ :pyobject: PyPyside.patch
:linenos:
- class PyPyside(Package):
- ...
-
- def patch(self):
- """Undo PySide RPATH handling and add Spack RPATH."""
- # Figure out the special RPATH
- pypkg = self.spec['python'].package
- rpath = self.rpath
- rpath.append(os.path.join(self.prefix, pypkg.site_packages_dir, 'PySide'))
-
- # Add Spack's standard CMake args to the sub-builds.
- # They're called BY setup.py so we have to patch it.
- filter_file(
- r'OPTION_CMAKE,',
- r'OPTION_CMAKE, ' + (
- '"-DCMAKE_INSTALL_RPATH_USE_LINK_PATH=FALSE", '
- '"-DCMAKE_INSTALL_RPATH=%s",' % ':'.join(rpath)),
- 'setup.py')
-
- # PySide tries to patch ELF files to remove RPATHs
- # Disable this and go with the one we set.
- filter_file(
- r'^\s*rpath_cmd\(pyside_path, srcpath\)',
- r'#rpath_cmd(pyside_path, srcpath)',
- 'pyside_postinstall.py')
-
A ``patch`` function, if present, will be run after patch files are
applied and before ``install()`` is run.
@@ -963,56 +1186,57 @@ function gives you some benefits. First, spack ensures that the
``patch()`` function is run once per code checkout. That means that
if you run install, hit ctrl-C, and run install again, the code in the
patch function is only run once. Also, you can tell Spack to run only
-the patching part of the build using the :ref:`spack-patch` command.
+the patching part of the build using the :ref:`cmd-spack-patch` command.
+---------------
Handling RPATHs
-----------------------------
+---------------
Spack installs each package in a way that ensures that all of its
dependencies are found when it runs. It does this using `RPATHs
<http://en.wikipedia.org/wiki/Rpath>`_. An RPATH is a search
path, stored in a binary (an executable or library), that tells the
dynamic loader where to find its dependencies at runtime. You may be
-familiar with ```LD_LIBRARY_PATH``
+familiar with `LD_LIBRARY_PATH
<http://tldp.org/HOWTO/Program-Library-HOWTO/shared-libraries.html>`_
-on Linux or ```DYLD_LIBRARY_PATH``
-<https://developer.apple.com/library/mac/documentation/Darwin/Reference/ManPages/man1/dyld.1.html>`
+on Linux or `DYLD_LIBRARY_PATH
+<https://developer.apple.com/library/mac/documentation/Darwin/Reference/ManPages/man1/dyld.1.html>`_
on Mac OS X. RPATH is similar to these paths, in that it tells
the loader where to find libraries. Unlike them, it is embedded in
the binary and not set in each user's environment.
RPATHs in Spack are handled in one of three ways:
- 1. For most packages, RPATHs are handled automatically using Spack's
- :ref:`compiler wrappers <compiler-wrappers>`. These wrappers are
- set in standard variables like ``CC``, ``CXX``, and ``FC``, so
- most build systems (autotools and many gmake systems) pick them
- up and use them.
- 2. CMake also respects Spack's compiler wrappers, but many CMake
- builds have logic to overwrite RPATHs when binaries are
- installed. Spack provides the ``std_cmake_args`` variable, which
- includes parameters necessary for CMake build use the right
- installation RPATH. It can be used like this when ``cmake`` is
- invoked:
-
- .. code-block:: python
-
- class MyPackage(Package):
- ...
- def install(self, spec, prefix):
- cmake('..', *std_cmake_args)
- make()
- make('install')
-
- 3. If you need to modify the build to add your own RPATHs, you can
- use the ``self.rpath`` property of your package, which will
- return a list of all the RPATHs that Spack will use when it
- links. You can see this how this is used in the :ref:`PySide
- example <pyside-patch>` above.
-
-
+#. For most packages, RPATHs are handled automatically using Spack's
+ :ref:`compiler wrappers <compiler-wrappers>`. These wrappers are
+ set in standard variables like ``CC``, ``CXX``, ``F77``, and ``FC``,
+ so most build systems (autotools and many gmake systems) pick them
+ up and use them.
+#. CMake also respects Spack's compiler wrappers, but many CMake
+ builds have logic to overwrite RPATHs when binaries are
+ installed. Spack provides the ``std_cmake_args`` variable, which
+ includes parameters necessary for CMake build use the right
+ installation RPATH. It can be used like this when ``cmake`` is
+ invoked:
+
+ .. code-block:: python
+
+ class MyPackage(Package):
+ ...
+ def install(self, spec, prefix):
+ cmake('..', *std_cmake_args)
+ make()
+ make('install')
+
+#. If you need to modify the build to add your own RPATHs, you can
+ use the ``self.rpath`` property of your package, which will
+ return a list of all the RPATHs that Spack will use when it
+ links. You can see this how this is used in the :ref:`PySide
+ example <pyside-patch>` above.
+
+--------------------
Finding new versions
-----------------------------
+--------------------
You've already seen the ``homepage`` and ``url`` package attributes:
@@ -1021,6 +1245,7 @@ You've already seen the ``homepage`` and ``url`` package attributes:
from spack import *
+
class Mpich(Package):
"""MPICH is a high performance and widely portable implementation of
the Message Passing Interface (MPI) standard."""
@@ -1032,17 +1257,18 @@ information about the package, and to determine where to download its
source code.
Spack uses the tarball URL to extrapolate where to find other tarballs
-of the same package (e.g. in `spack checksum <spack-checksum_>`_, but
+of the same package (e.g. in :ref:`cmd-spack-checksum`, but
this does not always work. This section covers ways you can tell
Spack to find tarballs elsewhere.
.. _attribute_list_url:
+^^^^^^^^^^^^
``list_url``
-~~~~~~~~~~~~~~~~~~~~~
+^^^^^^^^^^^^
When spack tries to find available versions of packages (e.g. with
-`spack checksum <spack-checksum_>`_), it spiders the parent directory
+:ref:`cmd-spack-checksum`), it spiders the parent directory
of the tarball in the ``url`` attribute. For example, for libelf, the
url is:
@@ -1073,14 +1299,17 @@ the ``list_url``, because that is where links to old versions are:
.. _attribute_list_depth:
+^^^^^^^^^^^^^^
``list_depth``
-~~~~~~~~~~~~~~~~~~~~~
+^^^^^^^^^^^^^^
``libdwarf`` and many other packages have a listing of available
versions on a single webpage, but not all do. For example, ``mpich``
has a tarball URL that looks like this:
- url = "http://www.mpich.org/static/downloads/3.0.4/mpich-3.0.4.tar.gz"
+.. code-block:: python
+
+ url = "http://www.mpich.org/static/downloads/3.0.4/mpich-3.0.4.tar.gz"
But its downloads are in many different subdirectories of
``http://www.mpich.org/static/downloads/``. So, we need to add a
@@ -1104,8 +1333,9 @@ when spidering the page.
.. _attribute_parallel:
+---------------
Parallel builds
-------------------
+---------------
By default, Spack will invoke ``make()`` with a ``-j <njobs>``
argument, so that builds run in parallel. It figures out how many
@@ -1157,11 +1387,11 @@ you set ``parallel`` to ``False`` at the package level, then each call
to ``make()`` will be sequential by default, but packagers can call
``make(parallel=True)`` to override it.
-
.. _dependencies:
+------------
Dependencies
-------------------------------
+------------
We've covered how to build a simple package, but what if one package
relies on another package to build? How do you express that in a
@@ -1188,8 +1418,9 @@ Spack makes this relatively easy. Let's take a look at the
def install(self, spec, prefix):
...
+^^^^^^^^^^^^^^^^
``depends_on()``
-~~~~~~~~~~~~~~~~~~~~~
+^^^^^^^^^^^^^^^^
The highlighted ``depends_on('libelf')`` call tells Spack that it
needs to build and install the ``libelf`` package before it builds
@@ -1197,8 +1428,9 @@ needs to build and install the ``libelf`` package before it builds
guaranteed that ``libelf`` has been built and installed successfully,
so you can rely on it for your libdwarf build.
+^^^^^^^^^^^^^^^^
Dependency specs
-~~~~~~~~~~~~~~~~~~~~~~
+^^^^^^^^^^^^^^^^
``depends_on`` doesn't just take the name of another package. It
takes a full spec. This means that you can restrict the versions or
@@ -1221,11 +1453,13 @@ just as easily provide a version range:
depends_on("libelf@0.8.2:0.8.4:")
-Or a requirement for a particular variant:
+Or a requirement for a particular variant or compiler flags:
.. code-block:: python
depends_on("libelf@0.8+debug")
+ depends_on('libelf debug=True')
+ depends_on('libelf cppflags="-fPIC"')
Both users *and* package authors can use the same spec syntax to refer
to different package configurations. Users use the spec syntax on the
@@ -1233,10 +1467,46 @@ command line to find installed packages or to install packages with
particular constraints, and package authors can use specs to describe
relationships between packages.
+Additionally, dependencies may be specified for specific use cases:
+
+.. code-block:: python
+
+ depends_on("cmake", type="build")
+ depends_on("libelf", type=("build", "link"))
+ depends_on("python", type="run")
+
+The dependency types are:
+
+ * **"build"**: made available during the project's build. The package will
+ be added to ``PATH``, the compiler include paths, and ``PYTHONPATH``.
+ Other projects which depend on this one will not have these modified
+ (building project X doesn't need project Y's build dependencies).
+ * **"link"**: the project is linked to by the project. The package will be
+ added to the current package's ``rpath``.
+ * **"run"**: the project is used by the project at runtime. The package will
+ be added to ``PATH`` and ``PYTHONPATH``.
+
+Additional hybrid dependency types are (note the lack of quotes):
+
+ * **<not specified>**: ``type`` assumed to be ``("build",
+ "link")``. This is the common case for compiled language usage.
+
+"""""""""""""""""""
+Dependency Formulas
+"""""""""""""""""""
+
+This section shows how to write appropriate ``depends_on()``
+declarations for some common cases.
+
+* Python 2 only: ``depends_on('python@:2.8')``
+* Python 2.7 only: ``depends_on('python@2.7:2.8')``
+* Python 3 only: ``depends_on('python@3:')``
+
.. _setup-dependent-environment:
+^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
``setup_dependent_environment()``
-~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
Spack provides a mechanism for dependencies to provide variables that
can be used in their dependents' build. Any package can declare a
@@ -1260,34 +1530,16 @@ packages that depend on a particular Qt installation will find it.
The arguments to this function are:
- * **module**: the module of the dependent package, where global
- properties can be assigned.
- * **spec**: the spec of the *dependency package* (the one the function is called on).
- * **dep_spec**: the spec of the dependent package (i.e. dep_spec depends on spec).
+* **module**: the module of the dependent package, where global
+ properties can be assigned.
+* **spec**: the spec of the *dependency package* (the one the function is called on).
+* **dep_spec**: the spec of the dependent package (i.e. dep_spec depends on spec).
A good example of using these is in the Python package:
-.. code-block:: python
-
- def setup_dependent_environment(self, module, spec, dep_spec):
- # Python extension builds can have a global python executable function
- module.python = Executable(join_path(spec.prefix.bin, 'python'))
-
- # Add variables for lib/pythonX.Y and lib/pythonX.Y/site-packages dirs.
- module.python_lib_dir = os.path.join(dep_spec.prefix, self.python_lib_dir)
- module.python_include_dir = os.path.join(dep_spec.prefix, self.python_include_dir)
- module.site_packages_dir = os.path.join(dep_spec.prefix, self.site_packages_dir)
-
- # Make the site packages directory if it does not exist already.
- mkdirp(module.site_packages_dir)
-
- # Set PYTHONPATH to include site-packages dir for the
- # extension and any other python extensions it depends on.
- python_paths = []
- for d in dep_spec.traverse():
- if d.package.extends(self.spec):
- python_paths.append(os.path.join(d.prefix, self.site_packages_dir))
- os.environ['PYTHONPATH'] = ':'.join(python_paths)
+.. literalinclude:: ../../../var/spack/repos/builtin/packages/python/package.py
+ :pyobject: Python.setup_dependent_environment
+ :linenos:
The first thing that happens here is that the ``python`` command is
inserted into module scope of the dependent. This allows most python
@@ -1296,17 +1548,17 @@ packages to have a very simple install method, like this:
.. code-block:: python
def install(self, spec, prefix):
- python('setup.py', 'install', '--prefix=%s' % prefix)
+ python('setup.py', 'install', '--prefix={0}'.format(prefix))
Python's ``setup_dependent_environment`` method also sets up some
other variables, creates a directory, and sets up the ``PYTHONPATH``
so that dependent packages can find their dependencies at build time.
-
.. _packaging_extensions:
+----------
Extensions
--------------------------
+----------
Spack's support for package extensions is documented extensively in
:ref:`extensions`. This section documents how to make your own
@@ -1337,6 +1589,30 @@ Now, the ``py-numpy`` package can be used as an argument to ``spack
activate``. When it is activated, all the files in its prefix will be
symbolically linked into the prefix of the python package.
+Some packages produce a Python extension, but are only compatible with
+Python 3, or with Python 2. In those cases, a ``depends_on()``
+declaration should be made in addition to the ``extends()``
+declaration:
+
+.. code-block:: python
+
+ class Icebin(Package):
+ extends('python', when='+python')
+ depends_on('python@3:', when='+python')
+
+Many packages produce Python extensions for *some* variants, but not
+others: they should extend ``python`` only if the appropriate
+variant(s) are selected. This may be accomplished with conditional
+``extends()`` declarations:
+
+.. code-block:: python
+
+ class FooLib(Package):
+ variant('python', default=True, description= \
+ 'Build the Python extension Module')
+ extends('python', when='+python')
+ ...
+
Sometimes, certain files in one package will conflict with those in
another, which means they cannot both be activated (symlinked) at the
same time. In this case, you can tell Spack to ignore those files
@@ -1344,13 +1620,15 @@ when it does the activation:
.. code-block:: python
- class PyNose(Package):
+ class PySncosmo(Package):
...
- extends('python', ignore=r'bin/nosetests.*$')
+ # py-sncosmo binaries are duplicates of those from py-astropy
+ extends('python', ignore=r'bin/.*')
+ depends_on('py-astropy')
...
-The code above will prevent ``$prefix/bin/nosetests`` from being
-linked in at activation time.
+The code above will prevent everything in the ``$prefix/bin/`` directory
+from being linked in at activation time.
.. note::
@@ -1359,10 +1637,9 @@ linked in at activation time.
``depends_on('python')`` and ``extends(python)`` in the same
package. ``extends`` implies ``depends_on``.
-
-
+^^^^^^^^^^^^^^^^^^^^^^^^^
Activation & deactivation
-~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+^^^^^^^^^^^^^^^^^^^^^^^^^
Spack's ``Package`` class has default ``activate`` and ``deactivate``
implementations that handle symbolically linking extensions' prefixes
@@ -1380,15 +1657,9 @@ same way that Python does.
Let's look at Python's activate function:
-.. code-block:: python
-
- def activate(self, ext_pkg, **kwargs):
- kwargs.update(ignore=self.python_ignore(ext_pkg, kwargs))
- super(Python, self).activate(ext_pkg, **kwargs)
-
- exts = spack.install_layout.extension_map(self.spec)
- exts[ext_pkg.name] = ext_pkg.spec
- self.write_easy_install_pth(exts)
+.. literalinclude:: ../../../var/spack/repos/builtin/packages/python/package.py
+ :pyobject: Python.activate
+ :linenos:
This function is called on the *extendee* (Python). It first calls
``activate`` in the superclass, which handles symlinking the
@@ -1398,23 +1669,16 @@ Python's setuptools.
Deactivate behaves similarly to activate, but it unlinks files:
-.. code-block:: python
-
- def deactivate(self, ext_pkg, **kwargs):
- kwargs.update(ignore=self.python_ignore(ext_pkg, kwargs))
- super(Python, self).deactivate(ext_pkg, **kwargs)
-
- exts = spack.install_layout.extension_map(self.spec)
- if ext_pkg.name in exts: # Make deactivate idempotent.
- del exts[ext_pkg.name]
- self.write_easy_install_pth(exts)
+.. literalinclude:: ../../../var/spack/repos/builtin/packages/python/package.py
+ :pyobject: Python.deactivate
+ :linenos:
Both of these methods call some custom functions in the Python
package. See the source for Spack's Python package for details.
-
+^^^^^^^^^^^^^^^^^^^^
Activation arguments
-~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+^^^^^^^^^^^^^^^^^^^^
You may have noticed that the ``activate`` function defined above
takes keyword arguments. These are the keyword arguments from
@@ -1429,11 +1693,11 @@ The only keyword argument supported by default is the ``ignore``
argument, which can take a regex, list of regexes, or a predicate to
determine which files *not* to symlink during activation.
-
.. _virtual-dependencies:
+--------------------
Virtual dependencies
------------------------------
+--------------------
In some cases, more than one package can satisfy another package's
dependency. One way this can happen is if a package depends on a
@@ -1454,8 +1718,9 @@ similar package files, e.g., ``foo``, ``foo-mvapich``, ``foo-mpich``,
but Spack avoids this explosion of package files by providing support
for *virtual dependencies*.
+^^^^^^^^^^^^
``provides``
-~~~~~~~~~~~~~~~~~~~~~
+^^^^^^^^^^^^
In Spack, ``mpi`` is handled as a *virtual package*. A package like
``mpileaks`` can depend on it just like any other package, by
@@ -1491,8 +1756,9 @@ The ``provides("mpi")`` call tells Spack that the ``mpich`` package
can be used to satisfy the dependency of any package that
``depends_on('mpi')``.
+^^^^^^^^^^^^^^^^^^^^
Versioned Interfaces
-~~~~~~~~~~~~~~~~~~~~~~
+^^^^^^^^^^^^^^^^^^^^
Just as you can pass a spec to ``depends_on``, so can you pass a spec
to ``provides`` to add constraints. This allows Spack to support the
@@ -1513,8 +1779,9 @@ This says that ``mpich2`` provides MPI support *up to* version 2, but
if a package ``depends_on("mpi@3")``, then Spack will *not* build that
package with ``mpich2``.
+^^^^^^^^^^^^^^^^^
``provides when``
-~~~~~~~~~~~~~~~~~~~~~~~~~~
+^^^^^^^^^^^^^^^^^
The same package may provide different versions of an interface
depending on *its* version. Above, we simplified the ``provides``
@@ -1548,27 +1815,27 @@ the package ``foo`` declares this:
Suppose a user invokes ``spack install`` like this:
-.. code-block:: sh
+.. code-block:: console
$ spack install foo ^mpich@1.0
Spack will fail with a constraint violation, because the version of
MPICH requested is too low for the ``mpi`` requirement in ``foo``.
-
.. _abstract-and-concrete:
+-------------------------
Abstract & concrete specs
-------------------------------------------
+-------------------------
Now that we've seen how spec constraints can be specified :ref:`on the
command line <sec-specs>` and within package definitions, we can talk
about how Spack puts all of this information together. When you run
this:
-.. code-block:: sh
+.. code-block:: console
- spack install mpileaks ^callpath@1.0+debug ^libelf@0.8.11
+ $ spack install mpileaks ^callpath@1.0+debug ^libelf@0.8.11
Spack parses the command line and builds a spec from the description.
The spec says that ``mpileaks`` should be built with the ``callpath``
@@ -1584,7 +1851,9 @@ abstract spec is partially specified. In other words, it could
describe more than one build of a package. Spack does this to make
things easier on the user: they should only have to specify as much of
the package spec as they care about. Here's an example partial spec
-DAG, based on the constraints above::
+DAG, based on the constraints above:
+
+.. code-block:: none
mpileaks
^callpath@1.0+debug
@@ -1593,7 +1862,6 @@ DAG, based on the constraints above::
^libelf@0.8.11
^mpi
-
.. graphviz::
digraph {
@@ -1604,7 +1872,6 @@ DAG, based on the constraints above::
dyninst -> "libelf@0.8.11"
}
-
This diagram shows a spec DAG output as a tree, where successive
levels of indentation represent a depends-on relationship. In the
above DAG, we can see some packages annotated with their constraints,
@@ -1612,8 +1879,9 @@ and some packages with no annotations at all. When there are no
annotations, it means the user doesn't care what configuration of that
package is built, just so long as it works.
+^^^^^^^^^^^^^^
Concretization
-~~~~~~~~~~~~~~~~~~~
+^^^^^^^^^^^^^^
An abstract spec is useful for the user, but you can't install an
abstract spec. Spack has to take the abstract spec and "fill in" the
@@ -1621,23 +1889,25 @@ remaining unspecified parts in order to install. This process is
called **concretization**. Concretization happens in between the time
the user runs ``spack install`` and the time the ``install()`` method
is called. The concretized version of the spec above might look like
-this::
+this:
+
+.. code-block:: none
- mpileaks@2.3%gcc@4.7.3=linux-ppc64
- ^callpath@1.0%gcc@4.7.3+debug=linux-ppc64
- ^dyninst@8.1.2%gcc@4.7.3=linux-ppc64
- ^libdwarf@20130729%gcc@4.7.3=linux-ppc64
- ^libelf@0.8.11%gcc@4.7.3=linux-ppc64
- ^mpich@3.0.4%gcc@4.7.3=linux-ppc64
+ mpileaks@2.3%gcc@4.7.3 arch=linux-debian7-x86_64
+ ^callpath@1.0%gcc@4.7.3+debug arch=linux-debian7-x86_64
+ ^dyninst@8.1.2%gcc@4.7.3 arch=linux-debian7-x86_64
+ ^libdwarf@20130729%gcc@4.7.3 arch=linux-debian7-x86_64
+ ^libelf@0.8.11%gcc@4.7.3 arch=linux-debian7-x86_64
+ ^mpich@3.0.4%gcc@4.7.3 arch=linux-debian7-x86_64
.. graphviz::
digraph {
- "mpileaks@2.3\n%gcc@4.7.3\n=linux-ppc64" -> "mpich@3.0.4\n%gcc@4.7.3\n=linux-ppc64"
- "mpileaks@2.3\n%gcc@4.7.3\n=linux-ppc64" -> "callpath@1.0\n%gcc@4.7.3+debug\n=linux-ppc64" -> "mpich@3.0.4\n%gcc@4.7.3\n=linux-ppc64"
- "callpath@1.0\n%gcc@4.7.3+debug\n=linux-ppc64" -> "dyninst@8.1.2\n%gcc@4.7.3\n=linux-ppc64"
- "dyninst@8.1.2\n%gcc@4.7.3\n=linux-ppc64" -> "libdwarf@20130729\n%gcc@4.7.3\n=linux-ppc64" -> "libelf@0.8.11\n%gcc@4.7.3\n=linux-ppc64"
- "dyninst@8.1.2\n%gcc@4.7.3\n=linux-ppc64" -> "libelf@0.8.11\n%gcc@4.7.3\n=linux-ppc64"
+ "mpileaks@2.3\n%gcc@4.7.3\n arch=linux-debian7-x86_64" -> "mpich@3.0.4\n%gcc@4.7.3\n arch=linux-debian7-x86_64"
+ "mpileaks@2.3\n%gcc@4.7.3\n arch=linux-debian7-x86_64" -> "callpath@1.0\n%gcc@4.7.3+debug\n arch=linux-debian7-x86_64" -> "mpich@3.0.4\n%gcc@4.7.3\n arch=linux-debian7-x86_64"
+ "callpath@1.0\n%gcc@4.7.3+debug\n arch=linux-debian7-x86_64" -> "dyninst@8.1.2\n%gcc@4.7.3\n arch=linux-debian7-x86_64"
+ "dyninst@8.1.2\n%gcc@4.7.3\n arch=linux-debian7-x86_64" -> "libdwarf@20130729\n%gcc@4.7.3\n arch=linux-debian7-x86_64" -> "libelf@0.8.11\n%gcc@4.7.3\n arch=linux-debian7-x86_64"
+ "dyninst@8.1.2\n%gcc@4.7.3\n arch=linux-debian7-x86_64" -> "libelf@0.8.11\n%gcc@4.7.3\n arch=linux-debian7-x86_64"
}
Here, all versions, compilers, and platforms are filled in, and there
@@ -1648,121 +1918,109 @@ point will Spack call the ``install()`` method for your package.
Concretization in Spack is based on certain selection policies that
tell Spack how to select, e.g., a version, when one is not specified
explicitly. Concretization policies are discussed in more detail in
-:ref:`site-configuration`. Sites using Spack can customize them to
-match the preferences of their own users.
+:ref:`configuration`. Sites using Spack can customize them to match
+the preferences of their own users.
-.. _spack-spec:
+.. _cmd-spack-spec:
+^^^^^^^^^^^^^^
``spack spec``
-~~~~~~~~~~~~~~~~~~~~
+^^^^^^^^^^^^^^
For an arbitrary spec, you can see the result of concretization by
running ``spack spec``. For example:
-.. code-block:: sh
+.. code-block:: console
$ spack spec dyninst@8.0.1
dyninst@8.0.1
^libdwarf
^libelf
- dyninst@8.0.1%gcc@4.7.3=linux-ppc64
- ^libdwarf@20130729%gcc@4.7.3=linux-ppc64
- ^libelf@0.8.13%gcc@4.7.3=linux-ppc64
+ dyninst@8.0.1%gcc@4.7.3 arch=linux-debian7-x86_64
+ ^libdwarf@20130729%gcc@4.7.3 arch=linux-debian7-x86_64
+ ^libelf@0.8.13%gcc@4.7.3 arch=linux-debian7-x86_64
This is useful when you want to know exactly what Spack will do when
you ask for a particular spec.
+.. _concretization-policies:
+^^^^^^^^^^^^^^^^^^^^^^^^^^^
``Concretization Policies``
-~~~~~~~~~~~~~~~~~~~~~~~~~~~
+^^^^^^^^^^^^^^^^^^^^^^^^^^^
A user may have certain preferences for how packages should
be concretized on their system. For example, one user may prefer packages
built with OpenMPI and the Intel compiler. Another user may prefer
packages be built with MVAPICH and GCC.
-Spack can be configured to prefer certain compilers, package
-versions, depends_on, and variants during concretization.
-The preferred configuration can be controlled via the
-``~/.spack/packages.yaml`` file for user configuations, or the
-``etc/spack/packages.yaml`` site configuration.
+See the :ref:`concretization-preferences` section for more details.
+.. _install-method:
-Here's an example packages.yaml file that sets preferred packages:
+------------------
+Inconsistent Specs
+------------------
-.. code-block:: sh
+Suppose a user needs to install package C, which depends on packages A
+and B. Package A builds a library with a Python2 extension, and
+package B builds a library with a Python3 extension. Packages A and B
+cannot be loaded together in the same Python runtime:
- packages:
- dyninst:
- compiler: [gcc@4.9]
- variants: +debug
- gperftools:
- version: [2.2, 2.4, 2.3]
- all:
- compiler: [gcc@4.4.7, gcc@4.6:, intel, clang, pgi]
- providers:
- mpi: [mvapich, mpich, openmpi]
-
-
-At a high level, this example is specifying how packages should be
-concretized. The dyninst package should prefer using gcc 4.9 and
-be built with debug options. The gperftools package should prefer version
-2.2 over 2.4. Every package on the system should prefer mvapich for
-its MPI and gcc 4.4.7 (except for Dyninst, which overrides this by preferring gcc 4.9).
-These options are used to fill in implicit defaults. Any of them can be overwritten
-on the command line if explicitly requested.
-
-Each packages.yaml file begins with the string ``packages:`` and
-package names are specified on the next level. The special string ``all``
-applies settings to each package. Underneath each package name is
-one or more components: ``compiler``, ``variants``, ``version``,
-or ``providers``. Each component has an ordered list of spec
-``constraints``, with earlier entries in the list being preferred over
-later entries.
-
-Sometimes a package installation may have constraints that forbid
-the first concretization rule, in which case Spack will use the first
-legal concretization rule. Going back to the example, if a user
-requests gperftools 2.3 or later, then Spack will install version 2.4
-as the 2.4 version of gperftools is preferred over 2.3.
-
-An explicit concretization rule in the preferred section will always
-take preference over unlisted concretizations. In the above example,
-xlc isn't listed in the compiler list. Every listed compiler from
-gcc to pgi will thus be preferred over the xlc compiler.
-
-The syntax for the ``provider`` section differs slightly from other
-concretization rules. A provider lists a value that packages may
-``depend_on`` (e.g, mpi) and a list of rules for fulfilling that
-dependency.
+.. code-block:: python
-.. _install-method:
+ class A(Package):
+ variant('python', default=True, 'enable python bindings')
+ depends_on('python@2.7', when='+python')
+ def install(self, spec, prefix):
+ # do whatever is necessary to enable/disable python
+ # bindings according to variant
+
+ class B(Package):
+ variant('python', default=True, 'enable python bindings')
+ depends_on('python@3.2:', when='+python')
+ def install(self, spec, prefix):
+ # do whatever is necessary to enable/disable python
+ # bindings according to variant
+
+Package C needs to use the libraries from packages A and B, but does
+not need either of the Python extensions. In this case, package C
+should simply depend on the ``~python`` variant of A and B:
+
+.. code-block:: python
+ class C(Package):
+ depends_on('A~python')
+ depends_on('B~python')
+
+This may require that A or B be built twice, if the user wishes to use
+the Python extensions provided by them: once for ``+python`` and once
+for ``~python``. Other than using a little extra disk space, that
+solution has no serious problems.
+
+-----------------------------------
Implementing the ``install`` method
-------------------------------------------
+-----------------------------------
The last element of a package is its ``install()`` method. This is
where the real work of installation happens, and it's the main part of
the package you'll need to customize for each piece of software.
-.. literalinclude:: ../../../var/spack/repos/builtin/packages/libelf/package.py
- :start-after: 0.8.12
+.. literalinclude:: ../../../var/spack/repos/builtin/packages/mpfr/package.py
+ :pyobject: Mpfr.install
:linenos:
``install`` takes a ``spec``: a description of how the package should
be built, and a ``prefix``: the path to the directory where the
software should be installed.
-
Spack provides wrapper functions for ``configure`` and ``make`` so
that you can call them in a similar way to how you'd call a shell
command. In reality, these are Python functions. Spack provides
these functions to make writing packages more natural. See the section
on :ref:`shell wrappers <shell-wrappers>`.
-
-
Now that the metadata is out of the way, we can move on to the
``install()`` method. When a user runs ``spack install``, Spack
fetches an archive for the correct version of the software, expands
@@ -1812,8 +2070,9 @@ information.
.. _install-environment:
+-----------------------
The install environment
---------------------------
+-----------------------
In general, you should not have to do much differently in your install
method than you would when installing a package on the command line.
@@ -1830,14 +2089,15 @@ custom Makefiles, you may need to add logic to modify the makefiles.
The remainder of the section covers the way Spack's build environment
works.
+^^^^^^^^^^^^^^^^^^^^^
Environment variables
-~~~~~~~~~~~~~~~~~~~~~~~~~~
+^^^^^^^^^^^^^^^^^^^^^
Spack sets a number of standard environment variables that serve two
purposes:
- #. Make build systems use Spack's compiler wrappers for their builds.
- #. Allow build systems to find dependencies more easily
+#. Make build systems use Spack's compiler wrappers for their builds.
+#. Allow build systems to find dependencies more easily
The Compiler environment variables that Spack sets are:
@@ -1851,7 +2111,7 @@ The Compiler environment variables that Spack sets are:
============ ===============================
All of these are standard variables respected by most build systems.
-If your project uses ``autotools`` or ``CMake``, then it should pick
+If your project uses ``Autotools`` or ``CMake``, then it should pick
them up automatically when you run ``configure`` or ``cmake`` in the
``install()`` function. Many traditional builds using GNU Make and
BSD make also respect these variables, so they may work with these
@@ -1868,12 +2128,12 @@ In addition to the compiler variables, these variables are set before
entering ``install()`` so that packages can locate dependencies
easily:
- ======================= =============================
- ``PATH`` Set to point to ``/bin`` directories of dependencies
- ``CMAKE_PREFIX_PATH`` Path to dependency prefixes for CMake
- ``PKG_CONFIG_PATH`` Path to any pkgconfig directories for dependencies
- ``PYTHONPATH`` Path to site-packages dir of any python dependencies
- ======================= =============================
+===================== ====================================================
+``PATH`` Set to point to ``/bin`` directories of dependencies
+``CMAKE_PREFIX_PATH`` Path to dependency prefixes for CMake
+``PKG_CONFIG_PATH`` Path to any pkgconfig directories for dependencies
+``PYTHONPATH`` Path to site-packages dir of any python dependencies
+===================== ====================================================
``PATH`` is set up to point to dependencies ``/bin`` directories so
that you can use tools installed by dependency packages at build time.
@@ -1884,7 +2144,7 @@ For example, ``$MPICH_ROOT/bin/mpicc`` is frequently used by dependencies of
where ``cmake`` will search for dependency libraries and headers.
This causes all standard CMake find commands to look in the paths of
your dependencies, so you *do not* have to manually specify arguments
-like ``-D DEPENDENCY_DIR=/path/to/dependency`` to ``cmake``. More on
+like ``-DDEPENDENCY_DIR=/path/to/dependency`` to ``cmake``. More on
this is `in the CMake documentation <http://www.cmake.org/cmake/help/v3.0/variable/CMAKE_PREFIX_PATH.html>`_.
``PKG_CONFIG_PATH`` is for packages that attempt to discover
@@ -1894,13 +2154,14 @@ discover its dependencies.
If you want to see the environment that a package will build with, or
if you want to run commands in that environment to test them out, you
-can use the :ref:```spack env`` <spack-env>` command, documented
+can use the :ref:`cmd-spack-env` command, documented
below.
.. _compiler-wrappers:
+^^^^^^^^^^^^^^^^^^^^^
Compiler interceptors
-~~~~~~~~~~~~~~~~~~~~~~~~~
+^^^^^^^^^^^^^^^^^^^^^
As mentioned, ``CC``, ``CXX``, ``F77``, and ``FC`` are set to point to
Spack's compiler wrappers. These are simply called ``cc``, ``c++``,
@@ -1921,13 +2182,15 @@ flags to the compile line so that dependencies can be easily found.
These flags are added for each dependency, if they exist:
Compile-time library search paths
- * ``-L$dep_prefix/lib``
- * ``-L$dep_prefix/lib64``
+* ``-L$dep_prefix/lib``
+* ``-L$dep_prefix/lib64``
+
Runtime library search paths (RPATHs)
- * ``$rpath_flag$dep_prefix/lib``
- * ``$rpath_flag$dep_prefix/lib64``
+* ``$rpath_flag$dep_prefix/lib``
+* ``$rpath_flag$dep_prefix/lib64``
+
Include search paths
- * ``-I$dep_prefix/include``
+* ``-I$dep_prefix/include``
An example of this would be the ``libdwarf`` build, which has one
dependency: ``libelf``. Every call to ``cc`` in the ``libdwarf``
@@ -1960,12 +2223,20 @@ the command line.
``$rpath_flag`` can be overriden on a compiler specific basis in
``lib/spack/spack/compilers/$compiler.py``.
+The compiler wrappers also pass the compiler flags specified by the user from
+the command line (``cflags``, ``cxxflags``, ``fflags``, ``cppflags``, ``ldflags``,
+and/or ``ldlibs``). They do not override the canonical autotools flags with the
+same names (but in ALL-CAPS) that may be passed into the build by particularly
+challenging package scripts.
+
+^^^^^^^^^^^^^^
Compiler flags
-~~~~~~~~~~~~~~
+^^^^^^^^^^^^^^
+
In rare circumstances such as compiling and running small unit tests, a package
developer may need to know what are the appropriate compiler flags to enable
features like ``OpenMP``, ``c++11``, ``c++14`` and alike. To that end the
-compiler classes in ``spack`` implement the following _properties_ :
+compiler classes in ``spack`` implement the following **properties**:
``openmp_flag``, ``cxx11_flag``, ``cxx14_flag``, which can be accessed in a
package by ``self.compiler.cxx11_flag`` and alike. Note that the implementation
is such that if a given compiler version does not support this feature, an
@@ -1977,12 +2248,14 @@ package supports additional variants like
variant('openmp', default=True, description="Enable OpenMP support.")
+^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
Message Parsing Interface (MPI)
-~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
It is common for high performance computing software/packages to use ``MPI``.
As a result of conretization, a given package can be built using different
implementations of MPI such as ``Openmpi``, ``MPICH`` or ``IntelMPI``.
-In some scenarios to configure a package one have to provide it with appropriate MPI
+In some scenarios, to configure a package, one has to provide it with appropriate MPI
compiler wrappers such as ``mpicc``, ``mpic++``.
However different implementations of ``MPI`` may have different names for those
wrappers. In order to make package's ``install()`` method indifferent to the
@@ -1993,9 +2266,22 @@ Package developers are advised to use these variables, for example ``self.spec['
instead of hard-coding ``join_path(self.spec['mpi'].prefix.bin, 'mpicc')`` for
the reasons outlined above.
+^^^^^^^^^^^^^^^^^^^^^^^^^
+Blas and Lapack libraries
+^^^^^^^^^^^^^^^^^^^^^^^^^
+Different packages provide implementation of ``Blas`` and ``Lapack`` routines.
+The names of the resulting static and/or shared libraries differ from package
+to package. In order to make the ``install()`` method independent of the
+choice of ``Blas`` implementation, each package which provides it
+sets up ``self.spec.blas_libs`` to point to the correct ``Blas`` libraries.
+The same applies to packages which provide ``Lapack``. Package developers are advised to
+use these variables, for example ``spec['blas'].blas_libs.joined()`` instead of
+hard-coding ``join_path(spec['blas'].prefix.lib, 'libopenblas.so')``.
+
+^^^^^^^^^^^^^^^^^^^^^
Forking ``install()``
-~~~~~~~~~~~~~~~~~~~~~
+^^^^^^^^^^^^^^^^^^^^^
To give packagers free reign over their install environment, Spack
forks a new process each time it invokes a package's ``install()``
@@ -2007,9 +2293,9 @@ dedicated process.
.. _prefix-objects:
-
+-----------------
Failing the build
-----------------------
+-----------------
Sometimes you don't want a package to successfully install unless some
condition is true. You can explicitly cause the build to fail from
@@ -2020,9 +2306,9 @@ condition is true. You can explicitly cause the build to fail from
if spec.architecture.startswith('darwin'):
raise InstallError('This package does not build on Mac OS X!')
-
+--------------
Prefix objects
-----------------------
+--------------
Spack passes the ``prefix`` parameter to the install method so that
you can pass it to ``configure``, ``cmake``, or some other installer,
@@ -2032,7 +2318,6 @@ e.g.:
configure('--prefix=' + prefix)
-
For the most part, prefix objects behave exactly like strings. For
packages that do not have their own install target, or for those that
implement it poorly (like ``libdwarf``), you may need to manually copy
@@ -2052,7 +2337,6 @@ yourself, e.g.:
mkdirp(prefix.lib)
install('libfoo.a', prefix.lib)
-
Most of the standard UNIX directory names are attributes on the
``prefix`` object. Here is a full list:
@@ -2079,8 +2363,9 @@ Most of the standard UNIX directory names are attributes on the
.. _spec-objects:
+------------
Spec objects
--------------------------
+------------
When ``install`` is called, most parts of the build process are set up
for you. The correct version's tarball has been downloaded and
@@ -2097,8 +2382,9 @@ special parameters to ``configure``, like
need to supply special compiler flags depending on the compiler. All
of this information is available in the spec.
+^^^^^^^^^^^^^^^^^^^^^^^^
Testing spec constraints
-~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+^^^^^^^^^^^^^^^^^^^^^^^^
You can test whether your spec is configured a certain way by using
the ``satisfies`` method. For example, if you want to check whether
@@ -2107,9 +2393,14 @@ do that, e.g.:
.. code-block:: python
+ configure_args = [
+ '--prefix={0}'.format(prefix)
+ ]
+
if spec.satisfies('@1.2:1.4'):
configure_args.append("CXXFLAGS='-DWITH_FEATURE'")
- configure('--prefix=' + prefix, *configure_args)
+
+ configure(*configure_args)
This works for compilers, too:
@@ -2163,39 +2454,40 @@ the two functions is that ``satisfies()`` tests whether spec
constraints overlap at all, while ``in`` tests whether a spec or any
of its dependencies satisfy the provided spec.
-
+^^^^^^^^^^^^^^^^^^^^^^
Accessing Dependencies
-~~~~~~~~~~~~~~~~~~~~~~~~~~
+^^^^^^^^^^^^^^^^^^^^^^
You may need to get at some file or binary that's in the prefix of one
of your dependencies. You can do that by sub-scripting the spec:
.. code-block:: python
- my_mpi = spec['mpich']
+ my_mpi = spec['mpi']
The value in the brackets needs to be some package name, and spec
needs to depend on that package, or the operation will fail. For
example, the above code will fail if the ``spec`` doesn't depend on
-``mpich``. The value returned and assigned to ``my_mpi``, is itself
+``mpi``. The value returned and assigned to ``my_mpi``, is itself
just another ``Spec`` object, so you can do all the same things you
would do with the package's own spec:
.. code-block:: python
- mpicc = new_path(my_mpi.prefix.bin, 'mpicc')
+ mpicc = join_path(my_mpi.prefix.bin, 'mpicc')
.. _multimethods:
+^^^^^^^^^^^^^^^^^^^^^^^^^^
Multimethods and ``@when``
-~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+^^^^^^^^^^^^^^^^^^^^^^^^^^
Spack allows you to make multiple versions of instance functions in
packages, based on whether the package's spec satisfies particular
criteria.
The ``@when`` annotation lets packages declare multiple versions of
-methods like install() that depend on the package's spec. For
+methods like ``install()`` that depend on the package's spec. For
example:
.. code-block:: python
@@ -2206,16 +2498,17 @@ example:
def install(self, prefix):
# Do default install
- @when('=chaos_5_x86_64_ib')
+ @when('arch=chaos_5_x86_64_ib')
def install(self, prefix):
# This will be executed instead of the default install if
# the package's sys_type() is chaos_5_x86_64_ib.
- @when('=bgqos_0")
+ @when('arch=linux-debian7-x86_64')
def install(self, prefix):
- # This will be executed if the package's sys_type is bgqos_0
+ # This will be executed if the package's sys_type() is
+ # linux-debian7-x86_64.
-In the above code there are three versions of install(), two of which
+In the above code there are three versions of ``install()``, two of which
are specialized for particular platforms. The version that is called
depends on the architecture of the package spec.
@@ -2287,22 +2580,15 @@ method (the one without the ``@when`` decorator) will be called.
.. _shell-wrappers:
+-----------------------
Shell command functions
-----------------------------
+-----------------------
Recall the install method from ``libelf``:
-.. code-block:: python
-
- def install(self, spec, prefix):
- configure("--prefix=" + prefix,
- "--enable-shared",
- "--disable-dependency-tracking",
- "--disable-debug")
- make()
-
- # The mkdir commands in libelf's install can fail in parallel
- make("install", parallel=False)
+.. literalinclude:: ../../../var/spack/repos/builtin/packages/libelf/package.py
+ :pyobject: Libelf.install
+ :linenos:
Normally in Python, you'd have to write something like this in order
to execute shell commands:
@@ -2310,7 +2596,7 @@ to execute shell commands:
.. code-block:: python
import subprocess
- subprocess.check_call('configure', '--prefix=' + prefix)
+ subprocess.check_call('configure', '--prefix={0}'.format(prefix))
We've tried to make this a bit easier by providing callable wrapper
objects for some shell commands. By default, ``configure``,
@@ -2330,17 +2616,17 @@ Callable wrappers also allow spack to provide some special features.
For example, in Spack, ``make`` is parallel by default, and Spack
figures out the number of cores on your machine and passes an
appropriate value for ``-j<numjobs>`` when it calls ``make`` (see the
-``parallel`` package attribute under :ref:`metadata <metadata>`). In
+``parallel`` `package attribute <attribute_parallel>`). In
a package file, you can supply a keyword argument, ``parallel=False``,
to the ``make`` wrapper to disable parallel make. In the ``libelf``
package, this allows us to avoid race conditions in the library's
build system.
-
.. _sanity-checks:
-Sanity checking an intallation
---------------------------------
+-------------------------------
+Sanity checking an installation
+-------------------------------
By default, Spack assumes that a build has failed if nothing is
written to the install prefix, and that it has succeeded if anything
@@ -2352,7 +2638,7 @@ Consider a simple autotools build like this:
.. code-block:: python
def install(self, spec, prefix):
- configure("--prefix=" + prefix)
+ configure("--prefix={0}".format(prefix))
make()
make("install")
@@ -2365,9 +2651,9 @@ like this can falsely report that they were successfully installed if
an error occurs before the install is complete but after files have
been written to the ``prefix``.
-
+^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
``sanity_check_is_file`` and ``sanity_check_is_dir``
-~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
You can optionally specify *sanity checks* to deal with this problem.
Add properties like this to your package:
@@ -2389,14 +2675,14 @@ Now, after ``install()`` runs, Spack will check whether
``$prefix/include/libelf.h`` exists and is a file, and whether
``$prefix/lib`` exists and is a directory. If the checks fail, then
the build will fail and the install prefix will be removed. If they
-succeed, Spack considers the build succeeful and keeps the prefix in
+succeed, Spack considers the build successful and keeps the prefix in
place.
-
.. _file-manipulation:
+---------------------------
File manipulation functions
-------------------------------
+---------------------------
Many builds are not perfect. If a build lacks an install target, or if
it does not use systems like CMake or autotools, which have standard
@@ -2416,11 +2702,11 @@ running:
from spack import *
This is already part of the boilerplate for packages created with
-``spack create`` or ``spack edit``.
-
+``spack create``.
+^^^^^^^^^^^^^^^^^^^
Filtering functions
-~~~~~~~~~~~~~~~~~~~~~~
+^^^^^^^^^^^^^^^^^^^
:py:func:`filter_file(regex, repl, *filenames, **kwargs) <spack.filter_file>`
Works like ``sed`` but with Python regular expression syntax. Takes
@@ -2478,9 +2764,9 @@ Filtering functions
change_sed_delimiter('@', ';', 'utils/FixMakefile')
change_sed_delimiter('@', ';', 'utils/FixMakefile.sed.default')
-
+^^^^^^^^^^^^^^
File functions
-~~~~~~~~~~~~~~~~~~~~~~
+^^^^^^^^^^^^^^
:py:func:`ancestor(dir, n=1) <spack.ancestor>`
Get the n\ :sup:`th` ancestor of the directory ``dir``.
@@ -2497,8 +2783,8 @@ File functions
install('my-header.h', join_path(prefix.include))
-:py:func:`join_path(prefix, *args) <spack.join_path>` Like
- ``os.path.join``, this joins paths using the OS path separator.
+:py:func:`join_path(prefix, *args) <spack.join_path>`
+ Like ``os.path.join``, this joins paths using the OS path separator.
However, this version allows an arbitrary number of arguments, so
you can string together many path components.
@@ -2548,15 +2834,78 @@ File functions
The ``create=True`` keyword argument causes the command to create
the directory if it does not exist.
-
:py:func:`touch(path) <spack.touch>`
Create an empty file at ``path``.
-
.. _package-lifecycle:
+-----------------------
+Coding Style Guidelines
+-----------------------
+
+The following guidelines are provided, in the interests of making
+Spack packages work in a consistent manner:
+
+^^^^^^^^^^^^^
+Variant Names
+^^^^^^^^^^^^^
+
+Spack packages with variants similar to already-existing Spack
+packages should use the same name for their variants. Standard
+variant names are:
+
+ ======= ======== ========================
+ Name Default Description
+ ======= ======== ========================
+ shared True Build shared libraries
+ static True Build static libraries
+ mpi True Use MPI
+ python False Build Python extension
+ ======= ======== ========================
+
+If specified in this table, the corresponding default should be used
+when declaring a variant.
+
+^^^^^^^^^^^^^
+Version Lists
+^^^^^^^^^^^^^
+
+Spack packages should list supported versions with the newest first.
+
+^^^^^^^^^^^^^^^^
+Special Versions
+^^^^^^^^^^^^^^^^
+
+The following *special* version names may be used when building a package:
+
+"""""""""""
+``@system``
+"""""""""""
+
+Indicates a hook to the OS-installed version of the
+package. This is useful, for example, to tell Spack to use the
+OS-installed version in ``packages.yaml``:
+
+.. code-block:: yaml
+
+ openssl:
+ paths:
+ openssl@system: /usr
+ buildable: False
+
+Certain Spack internals look for the ``@system`` version and do
+appropriate things in that case.
+
+""""""""""
+``@local``
+""""""""""
+
+Indicates the version was built manually from some source
+tree of unknown provenance (see ``spack setup``).
+
+---------------------------
Packaging workflow commands
----------------------------------
+---------------------------
When you are building packages, you will likely not get things
completely right the first time.
@@ -2571,7 +2920,7 @@ of the build.
A typical package workflow might look like this:
-.. code-block:: sh
+.. code-block:: console
$ spack edit mypackage
$ spack install mypackage
@@ -2584,10 +2933,11 @@ A typical package workflow might look like this:
Below are some commands that will allow you some finer-grained
control over the install process.
-.. _spack-fetch:
+.. _cmd-spack-fetch:
+^^^^^^^^^^^^^^^
``spack fetch``
-~~~~~~~~~~~~~~~~~
+^^^^^^^^^^^^^^^
The first step of ``spack install``. Takes a spec and determines the
correct download URL to use for the requested package version, then
@@ -2598,20 +2948,22 @@ directory will be located under ``$SPACK_HOME/var/spack``.
When run after the archive has already been downloaded, ``spack
fetch`` is idempotent and will not download the archive again.
-.. _spack-stage:
+.. _cmd-spack-stage:
+^^^^^^^^^^^^^^^
``spack stage``
-~~~~~~~~~~~~~~~~~
+^^^^^^^^^^^^^^^
The second step in ``spack install`` after ``spack fetch``. Expands
the downloaded archive in its temporary directory, where it will be
built by ``spack install``. Similar to ``fetch``, if the archive has
already been expanded, ``stage`` is idempotent.
-.. _spack-patch:
+.. _cmd-spack-patch:
+^^^^^^^^^^^^^^^
``spack patch``
-~~~~~~~~~~~~~~~~~
+^^^^^^^^^^^^^^^
After staging, Spack applies patches to downloaded packages, if any
have been specified in the package file. This command will run the
@@ -2621,223 +2973,211 @@ this step if they have been. If Spack discovers that patches didn't
apply cleanly on some previous run, then it will restage the entire
package before patching.
-.. _spack-restage:
+.. _cmd-spack-restage:
+^^^^^^^^^^^^^^^^^
``spack restage``
-~~~~~~~~~~~~~~~~~
+^^^^^^^^^^^^^^^^^
+
Restores the source code to pristine state, as it was before building.
Does this in one of two ways:
- 1. If the source was fetched as a tarball, deletes the entire build
- directory and re-expands the tarball.
+#. If the source was fetched as a tarball, deletes the entire build
+ directory and re-expands the tarball.
- 2. If the source was checked out from a repository, this deletes the
- build directory and checks it out again.
+#. If the source was checked out from a repository, this deletes the
+ build directory and checks it out again.
-.. _spack-clean:
+.. _cmd-spack-clean:
+^^^^^^^^^^^^^^^
``spack clean``
-~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+^^^^^^^^^^^^^^^
+
Cleans up temporary files for a particular package, by deleting the
expanded/checked out source code *and* any downloaded archive. If
``fetch``, ``stage``, or ``install`` are run again after this, Spack's
build process will start from scratch.
+.. _cmd-spack-purge:
-.. _spack-purge:
-
+^^^^^^^^^^^^^^^
``spack purge``
-~~~~~~~~~~~~~~~~~
-Cleans up all of Spack's temporary files. Use this to recover disk
-space if temporary files from interrupted or failed installs
-accumulate in the staging area. This is equivalent to running ``spack
-clean`` for every package you have fetched or staged.
+^^^^^^^^^^^^^^^
+Cleans up all of Spack's temporary and cached files. This can be used to
+recover disk space if temporary files from interrupted or failed installs
+accumulate in the staging area.
+When called with ``--stage`` or without arguments this removes all staged
+files and will be equivalent to running ``spack clean`` for every package
+you have fetched or staged.
+
+When called with ``--downloads`` this will clear all resources
+:ref:`cached <caching>` during installs.
+
+When called with ``--user-cache`` this will remove caches in the user home
+directory, including cached virtual indices.
+
+To remove all of the above, the command can be called with ``--all``.
+
+^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
Keeping the stage directory on success
-~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
By default, ``spack install`` will delete the staging area once a
package has been successfully built and installed. Use
``--keep-stage`` to leave the build directory intact:
-.. code-block:: sh
+.. code-block:: console
- spack install --keep-stage <spec>
+ $ spack install --keep-stage <spec>
This allows you to inspect the build directory and potentially debug
the build. You can use ``purge`` or ``clean`` later to get rid of the
unwanted temporary files.
-
+^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
Keeping the install prefix on failure
-~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
By default, ``spack install`` will delete any partially constructed
install prefix if anything fails during ``install()``. If you want to
keep the prefix anyway (e.g. to diagnose a bug), you can use
``--keep-prefix``:
-.. code-block:: sh
+.. code-block:: console
- spack install --keep-prefix <spec>
+ $ spack install --keep-prefix <spec>
Note that this may confuse Spack into thinking that the package has
-been installed properly, so you may need to use ``spack uninstall -f``
+been installed properly, so you may need to use ``spack uninstall --force``
to get rid of the install prefix before you build again:
-.. code-block:: sh
-
- spack uninstall -f <spec>
+.. code-block:: console
+ $ spack uninstall --force <spec>
+---------------------
Graphing dependencies
---------------------------
+---------------------
-.. _spack-graph:
+.. _cmd-spack-graph:
+^^^^^^^^^^^^^^^
``spack graph``
-~~~~~~~~~~~~~~~~~~~
+^^^^^^^^^^^^^^^
Spack provides the ``spack graph`` command for graphing dependencies.
The command by default generates an ASCII rendering of a spec's
-dependency graph. For example::
-
- $ spack graph mpileaks
- o mpileaks
- |\
- | |\
- | o | callpath
- |/| |
- | |\|
- | |\ \
- | | |\ \
- | | | | o adept-utils
- | |_|_|/|
- |/| | | |
- o | | | | mpi
- / / / /
- | | o | dyninst
- | |/| |
- |/|/| |
- | | |/
- | o | libdwarf
- |/ /
- o | libelf
- /
- o boost
-
-At the top is the root package in the DAG, with dependency edges
-emerging from it. On a color terminal, the edges are colored by which
-dependency they lead to.
+dependency graph. For example:
+
+.. command-output:: spack graph mpileaks
+
+At the top is the root package in the DAG, with dependency edges emerging
+from it. On a color terminal, the edges are colored by which dependency
+they lead to.
+
+.. command-output:: spack graph --deptype=all mpileaks
+
+The ``deptype`` argument tells Spack what types of dependencies to graph.
+By default it includes link and run dependencies but not build
+dependencies. Supplying ``--deptype=all`` will show the build
+dependencies as well. This is equivalent to
+``--deptype=build,link,run``. Options for ``deptype`` include:
+
+* Any combination of ``build``, ``link``, and ``run`` separated by
+ commas.
+* ``all`` or ``alldeps`` for all types of dependencies.
You can also use ``spack graph`` to generate graphs in the widely used
`Dot <http://www.graphviz.org/doc/info/lang.html>`_ format. For
-example::
-
- $ spack graph --dot mpileaks
- digraph G {
- label = "Spack Dependencies"
- labelloc = "b"
- rankdir = "LR"
- ranksep = "5"
-
- "boost" [label="boost"]
- "callpath" [label="callpath"]
- "libdwarf" [label="libdwarf"]
- "mpileaks" [label="mpileaks"]
- "mpi" [label="mpi"]
- "adept-utils" [label="adept-utils"]
- "dyninst" [label="dyninst"]
- "libelf" [label="libelf"]
-
- "callpath" -> "dyninst"
- "callpath" -> "adept-utils"
- "callpath" -> "mpi"
- "callpath" -> "libelf"
- "callpath" -> "libdwarf"
- "libdwarf" -> "libelf"
- "mpileaks" -> "adept-utils"
- "mpileaks" -> "callpath"
- "mpileaks" -> "mpi"
- "adept-utils" -> "boost"
- "adept-utils" -> "mpi"
- "dyninst" -> "boost"
- "dyninst" -> "libelf"
- "dyninst" -> "libdwarf"
- }
+example:
+
+.. command-output:: spack graph --dot mpileaks
This graph can be provided as input to other graphing tools, such as
those in `Graphviz <http://www.graphviz.org>`_.
+.. _packaging-shell-support:
+
+-------------------------
Interactive shell support
---------------------------
+-------------------------
Spack provides some limited shell support to make life easier for
packagers. You can enable these commands by sourcing a setup file in
-the ``/share/spack`` directory. For ``bash`` or ``ksh``, run::
+the ``share/spack`` directory. For ``bash`` or ``ksh``, run:
- . $SPACK_ROOT/share/spack/setup-env.sh
+.. code-block:: sh
+
+ export SPACK_ROOT=/path/to/spack
+ . $SPACK_ROOT/share/spack/setup-env.sh
For ``csh`` and ``tcsh`` run:
- setenv SPACK_ROOT /path/to/spack
- source $SPACK_ROOT/share/spack/setup-env.csh
+.. code-block:: csh
+
+ setenv SPACK_ROOT /path/to/spack
+ source $SPACK_ROOT/share/spack/setup-env.csh
``spack cd`` will then be available.
-.. _spack-cd:
+.. _cmd-spack-cd:
+^^^^^^^^^^^^
``spack cd``
-~~~~~~~~~~~~~~~~~
+^^^^^^^^^^^^
``spack cd`` allows you to quickly cd to pertinent directories in Spack.
Suppose you've staged a package but you want to modify it before you
build it:
-.. code-block:: sh
+.. code-block:: console
$ spack stage libelf
==> Trying to fetch from http://www.mr511.de/software/libelf-0.8.13.tar.gz
######################################################################## 100.0%
- ==> Staging archive: /Users/gamblin2/src/spack/var/spack/stage/libelf@0.8.13%gcc@4.8.3=linux-ppc64/libelf-0.8.13.tar.gz
- ==> Created stage in /Users/gamblin2/src/spack/var/spack/stage/libelf@0.8.13%gcc@4.8.3=linux-ppc64.
+ ==> Staging archive: ~/spack/var/spack/stage/libelf@0.8.13%gcc@4.8.3 arch=linux-debian7-x86_64/libelf-0.8.13.tar.gz
+ ==> Created stage in ~/spack/var/spack/stage/libelf@0.8.13%gcc@4.8.3 arch=linux-debian7-x86_64.
$ spack cd libelf
$ pwd
- /Users/gamblin2/src/spack/var/spack/stage/libelf@0.8.13%gcc@4.8.3=linux-ppc64/libelf-0.8.13
+ ~/spack/var/spack/stage/libelf@0.8.13%gcc@4.8.3 arch=linux-debian7-x86_64/libelf-0.8.13
-``spack cd`` here changed he current working directory to the
+``spack cd`` here changed the current working directory to the
directory containing the expanded ``libelf`` source code. There are a
number of other places you can cd to in the spack directory hierarchy:
-.. command-output:: spack cd -h
+.. command-output:: spack cd --help
Some of these change directory into package-specific locations (stage
directory, install directory, package directory) and others change to
-core spack locations. For example, ``spack cd -m`` will take you to
+core spack locations. For example, ``spack cd --module-dir`` will take you to
the main python source directory of your spack install.
-.. _spack-env:
+.. _cmd-spack-env:
+^^^^^^^^^^^^^
``spack env``
-~~~~~~~~~~~~~~~~~~~~~~
+^^^^^^^^^^^^^
``spack env`` functions much like the standard unix ``env`` command,
but it takes a spec as an argument. You can use it to see the
environment variables that will be set when a particular build runs,
for example:
-.. code-block:: sh
+.. code-block:: console
$ spack env mpileaks@1.1%intel
This will display the entire environment that will be set when the
``mpileaks@1.1%intel`` build runs.
-To run commands in a package's build environment, you can simply provided them after the spec argument to ``spack env``:
+To run commands in a package's build environment, you can simply
+provide them after the spec argument to ``spack env``:
-.. code-block:: sh
+.. code-block:: console
$ spack cd mpileaks@1.1%intel
$ spack env mpileaks@1.1%intel ./configure
@@ -2845,21 +3185,25 @@ To run commands in a package's build environment, you can simply provided them a
This will cd to the build directory and then run ``configure`` in the
package's build environment.
+.. _cmd-spack-location:
-.. _spack-location:
-
+^^^^^^^^^^^^^^^^^^
``spack location``
-~~~~~~~~~~~~~~~~~~~~~~
+^^^^^^^^^^^^^^^^^^
``spack location`` is the same as ``spack cd`` but it does not require
shell support. It simply prints out the path you ask for, rather than
-cd'ing to it. In bash, this::
+cd'ing to it. In bash, this:
+
+.. code-block:: console
- cd $(spack location -b <spec>)
+ $ cd $(spack location --build-dir <spec>)
-is the same as::
+is the same as:
- spack cd -b <spec>
+.. code-block:: console
+
+ $ spack cd --build-dir <spec>
``spack location`` is intended for use in scripts or makefiles that
need to know where packages are installed. e.g., in a makefile you
@@ -2867,6 +3211,117 @@ might write:
.. code-block:: makefile
- DWARF_PREFIX = $(spack location -i libdwarf)
+ DWARF_PREFIX = $(spack location --install-dir libdwarf)
CXXFLAGS += -I$DWARF_PREFIX/include
CXXFLAGS += -L$DWARF_PREFIX/lib
+
+^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+Build System Configuration Support
+^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+Imagine a developer creating a CMake or Autotools-based project in a
+local directory, which depends on libraries A-Z. Once Spack has
+installed those dependencies, one would like to run ``cmake`` with
+appropriate command line and environment so CMake can find them. The
+``spack setup`` command does this conveniently, producing a CMake
+configuration that is essentially the same as how Spack *would have*
+configured the project. This can be demonstrated with a usage
+example:
+
+.. code-block:: console
+
+ $ cd myproject
+ $ spack setup myproject@local
+ $ mkdir build; cd build
+ $ ../spconfig.py ..
+ $ make
+ $ make install
+
+Notes:
+
+* Spack must have ``myproject/package.py`` in its repository for
+ this to work.
+* ``spack setup`` produces the executable script ``spconfig.py`` in
+ the local directory, and also creates the module file for the
+ package. ``spconfig.py`` is normally run from the user's
+ out-of-source build directory.
+* The version number given to ``spack setup`` is arbitrary, just
+ like ``spack diy``. ``myproject/package.py`` does not need to
+ have any valid downloadable versions listed (typical when a
+ project is new).
+* spconfig.py produces a CMake configuration that *does not* use the
+ Spack wrappers. Any resulting binaries *will not* use RPATH,
+ unless the user has enabled it. This is recommended for
+ development purposes, not production.
+* ``spconfig.py`` is human readable, and can serve as a developer
+ reference of what dependencies are being used.
+* ``make install`` installs the package into the Spack repository,
+ where it may be used by other Spack packages.
+* CMake-generated makefiles re-run CMake in some circumstances. Use
+ of ``spconfig.py`` breaks this behavior, requiring the developer
+ to manually re-run ``spconfig.py`` when a ``CMakeLists.txt`` file
+ has changed.
+
+^^^^^^^^^^^^
+CMakePackage
+^^^^^^^^^^^^
+
+In order to enable ``spack setup`` functionality, the author of
+``myproject/package.py`` must subclass from ``CMakePackage`` instead
+of the standard ``Package`` superclass. Because CMake is
+standardized, the packager does not need to tell Spack how to run
+``cmake; make; make install``. Instead the packager only needs to
+create (optional) methods ``configure_args()`` and ``configure_env()``, which
+provide the arguments (as a list) and extra environment variables (as
+a dict) to provide to the ``cmake`` command. Usually, these will
+translate variant flags into CMake definitions. For example:
+
+.. code-block:: python
+
+ def configure_args(self):
+ spec = self.spec
+ return [
+ '-DUSE_EVERYTRACE=%s' % ('YES' if '+everytrace' in spec else 'NO'),
+ '-DBUILD_PYTHON=%s' % ('YES' if '+python' in spec else 'NO'),
+ '-DBUILD_GRIDGEN=%s' % ('YES' if '+gridgen' in spec else 'NO'),
+ '-DBUILD_COUPLER=%s' % ('YES' if '+coupler' in spec else 'NO'),
+ '-DUSE_PISM=%s' % ('YES' if '+pism' in spec else 'NO')
+ ]
+
+If needed, a packager may also override methods defined in
+``StagedPackage`` (see below).
+
+^^^^^^^^^^^^^
+StagedPackage
+^^^^^^^^^^^^^
+
+``CMakePackage`` is implemented by subclassing the ``StagedPackage``
+superclass, which breaks down the standard ``Package.install()``
+method into several sub-stages: ``setup``, ``configure``, ``build``
+and ``install``. Details:
+
+* Instead of implementing the standard ``install()`` method, package
+ authors implement the methods for the sub-stages
+ ``install_setup()``, ``install_configure()``,
+ ``install_build()``, and ``install_install()``.
+
+* The ``spack install`` command runs the sub-stages ``configure``,
+ ``build`` and ``install`` in order. (The ``setup`` stage is
+ not run by default; see below).
+* The ``spack setup`` command runs the sub-stages ``setup``
+ and a dummy install (to create the module file).
+* The sub-stage install methods take no arguments (other than
+ ``self``). The arguments ``spec`` and ``prefix`` to the standard
+ ``install()`` method may be accessed via ``self.spec`` and
+ ``self.prefix``.
+
+^^^^^^^^^^^^^
+GNU Autotools
+^^^^^^^^^^^^^
+
+The ``setup`` functionality is currently only available for
+CMake-based packages. Extending this functionality to GNU
+Autotools-based packages would be easy (and should be done by a
+developer who actively uses Autotools). Packages that use
+non-standard build systems can gain ``setup`` functionality by
+subclassing ``StagedPackage`` directly.
diff --git a/lib/spack/docs/repositories.rst b/lib/spack/docs/repositories.rst
new file mode 100644
index 0000000000..5e722e2139
--- /dev/null
+++ b/lib/spack/docs/repositories.rst
@@ -0,0 +1,456 @@
+.. _repositories:
+
+=============================
+Package Repositories
+=============================
+
+Spack comes with over 1,000 built-in package recipes in
+``var/spack/repos/builtin/``. This is a **package repository** -- a
+directory that Spack searches when it needs to find a package by name.
+You may need to maintain packages for restricted, proprietary or
+experimental software separately from the built-in repository. Spack
+allows you to configure local repositories using either the
+``repos.yaml`` or the ``spack repo`` command.
+
+A package repository a directory structured like this::
+
+ repo/
+ repo.yaml
+ packages/
+ hdf5/
+ package.py
+ mpich/
+ package.py
+ mpich-1.9-bugfix.patch
+ trilinos/
+ package.py
+ ...
+
+The top-level ``repo.yaml`` file contains configuration metadata for the
+repository, and the ``packages`` directory contains subdirectories for
+each package in the repository. Each package directory contains a
+``package.py`` file and any patches or other files needed to build the
+package.
+
+Package repositories allow you to:
+
+1. Maintain your own packages separately from Spack;
+
+2. Share your packages (e.g. by hosting them in a shared file system),
+ without committing them to the built-in Spack package repository; and
+
+3. Override built-in Spack packages with your own implementation.
+
+Packages in a separate repository can also *depend on* built-in Spack
+packages. So, you can leverage existing recipes without re-implementing
+them in your own repository.
+
+---------------------
+``repos.yaml``
+---------------------
+
+Spack uses the ``repos.yaml`` file in ``~/.spack`` (and :ref:`elsewhere
+<configuration>`) to find repositories. Note that the ``repos.yaml``
+configuration file is distinct from the ``repo.yaml`` file in each
+repository. For more on the YAML format, and on how configuration file
+precedence works in Spack, see :ref:`configuration <configuration>`.
+
+The default ``etc/spack/defaults/repos.yaml`` file looks like this:
+
+.. code-block:: yaml
+
+ repos:
+ - $spack/var/spack/repos/builtin
+
+The file starts with ``repos:`` and contains a single ordered list of
+paths to repositories. Each path is on a separate line starting with
+``-``. You can add a repository by inserting another path into the list:
+
+.. code-block:: yaml
+
+ repos:
+ - /opt/local-repo
+ - $spack/var/spack/repos/builtin
+
+When Spack interprets a spec, e.g. ``mpich`` in ``spack install mpich``,
+it searches these repositories in order (first to last) to resolve each
+package name. In this example, Spack will look for the following
+packages and use the first valid file:
+
+1. ``/opt/local-repo/packages/mpich/package.py``
+2. ``$spack/var/spack/repos/builtin/packages/mpich/package.py``
+
+.. note::
+
+ Currently, Spack can only use repositories in the file system. We plan
+ to eventually support URLs in ``repos.yaml``, so that you can easily
+ point to remote package repositories, but that is not yet implemented.
+
+---------------------
+Namespaces
+---------------------
+
+Every repository in Spack has an associated **namespace** defined in its
+top-level ``repo.yaml`` file. If you look at
+``var/spack/repos/builtin/repo.yaml`` in the built-in repository, you'll
+see that its namespace is ``builtin``:
+
+.. code-block:: console
+
+ $ cat var/spack/repos/builtin/repo.yaml
+ repo:
+ namespace: builtin
+
+Spack records the repository namespace of each installed package. For
+example, if you install the ``mpich`` package from the ``builtin`` repo,
+Spack records its fully qualified name as ``builtin.mpich``. This
+accomplishes two things:
+
+1. You can have packages with the same name from different namespaces
+ installed at once.
+
+1. You can easily determine which repository a package came from after it
+ is installed (more :ref:`below <namespace-example>`).
+
+.. note::
+
+ It may seem redundant for a repository to have both a namespace and a
+ path, but repository *paths* may change over time, or, as mentioned
+ above, a locally hosted repository path may eventually be hosted at
+ some remote URL.
+
+ Namespaces are designed to allow *package authors* to associate a
+ unique identifier with their packages, so that the package can be
+ identified even if the repository moves. This is why the namespace is
+ determined by the ``repo.yaml`` file in the repository rather than the
+ local ``repos.yaml`` configuration: the *repository maintainer* sets
+ the name.
+
+^^^^^^^^^^^^^^^^^^^^^^^^^^^
+Uniqueness
+^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+You should choose a namespace that uniquely identifies your package
+repository. For example, if you make a repository for packages written
+by your organization, you could use your organization's name. You can
+also nest namespaces using periods, so you could identify a repository by
+a sub-organization. For example, LLNL might use a namespace for its
+internal repositories like ``llnl``. Packages from the Physical & Life
+Sciences directorate (PLS) might use the ``llnl.pls`` namespace, and
+packages created by the Computation directorate might use ``llnl.comp``.
+
+Spack cannot ensure that every repository is named uniquely, but it will
+prevent you from registering two repositories with the same namespace at
+the same time. If you try to add a repository that has the same name as
+an existing one, e.g. ``builtin``, Spack will print a warning message.
+
+.. _namespace-example:
+
+^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+Namespace example
+^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+Suppose that LLNL maintains its own version of ``mpich``, separate from
+Spack's built-in ``mpich`` package, and suppose you've installed both
+LLNL's and Spack's ``mpich`` packages. If you just use ``spack find``,
+you won't see a difference between these two packages:
+
+.. code-block:: console
+
+ $ spack find
+ ==> 2 installed packages.
+ -- linux-rhel6-x86_64 / gcc@4.4.7 -------------
+ mpich@3.2 mpich@3.2
+
+However, if you use ``spack find -N``, Spack will display the packages
+with their namespaces:
+
+.. code-block:: console
+
+ $ spack find -N
+ ==> 2 installed packages.
+ -- linux-rhel6-x86_64 / gcc@4.4.7 -------------
+ builtin.mpich@3.2 llnl.comp.mpich@3.2
+
+Now you know which one is LLNL's special version, and which one is the
+built-in Spack package. As you might guess, packages that are identical
+except for their namespace will still have different hashes:
+
+.. code-block:: console
+
+ $ spack find -lN
+ ==> 2 installed packages.
+ -- linux-rhel6-x86_64 / gcc@4.4.7 -------------
+ c35p3gc builtin.mpich@3.2 itoqmox llnl.comp.mpich@3.2
+
+All Spack commands that take a package :ref:`spec <sec-specs>` can also
+accept a fully qualified spec with a namespace. This means you can use
+the namespace to be more specific when designating, e.g., which package
+you want to uninstall:
+
+.. code-block:: console
+
+ spack uninstall llnl.comp.mpich
+
+----------------------------
+Overriding built-in packages
+----------------------------
+
+Spack's search semantics mean that you can make your own implementation
+of a built-in Spack package (like ``mpich``), put it in a repository, and
+use it to override the built-in package. As long as the repository
+containing your ``mpich`` is earlier any other in ``repos.yaml``, any
+built-in package that depends on ``mpich`` will be use the one in your
+repository.
+
+Suppose you have three repositories: the builtin Spack repo
+(``builtin``), a shared repo for your institution (e.g., ``llnl``), and a
+repo containing your own prototype packages (``proto``). Suppose they
+contain packages as follows:
+
+ +--------------+------------------------------------+-----------------------------+
+ | Namespace | Path to repo | Packages |
+ +==============+====================================+=============================+
+ | ``proto`` | ``~/proto`` | ``mpich`` |
+ +--------------+------------------------------------+-----------------------------+
+ | ``llnl`` | ``/usr/local/llnl`` | ``hdf5`` |
+ +--------------+------------------------------------+-----------------------------+
+ | ``builtin`` | ``$spack/var/spack/repos/builtin`` | ``mpich``, ``hdf5``, others |
+ +--------------+------------------------------------+-----------------------------+
+
+Suppose that ``hdf5`` depends on ``mpich``. You can override the
+built-in ``hdf5`` by adding the ``llnl`` repo to ``repos.yaml``:
+
+.. code-block:: yaml
+
+ repos:
+ - /usr/local/llnl
+ - $spack/var/spack/repos/builtin
+
+``spack install hdf5`` will install ``llnl.hdf5 ^builtin.mpich``.
+
+If, instead, ``repos.yaml`` looks like this:
+
+.. code-block:: yaml
+
+ repos:
+ - ~/proto
+ - /usr/local/llnl
+ - $spack/var/spack/repos/builtin
+
+``spack install hdf5`` will install ``llnl.hdf5 ^proto.mpich``.
+
+Any unqualified package name will be resolved by searching ``repos.yaml``
+from the first entry to the last. You can force a particular
+repository's package by using a fully qualified name. For example, if
+your ``repos.yaml`` is as above, and you want ``builtin.mpich`` instead
+of ``proto.mpich``, you can write::
+
+ spack install hdf5 ^builtin.mpich
+
+which will install ``llnl.hdf5 ^builtin.mpich``.
+
+Similarly, you can force the ``builtin.hdf5`` like this::
+
+ spack install builtin.hdf5 ^builtin.mpich
+
+This will not search ``repos.yaml`` at all, as the ``builtin`` repo is
+specified in both cases. It will install ``builtin.hdf5
+^builtin.mpich``.
+
+If you want to see which repositories will be used in a build *before*
+you install it, you can use ``spack spec -N``:
+
+.. code-block:: console
+
+ $ spack spec -N hdf5
+ Input spec
+ --------------------------------
+ hdf5
+
+ Normalized
+ --------------------------------
+ hdf5
+ ^zlib@1.1.2:
+
+ Concretized
+ --------------------------------
+ builtin.hdf5@1.10.0-patch1%clang@7.0.2-apple+cxx~debug+fortran+mpi+shared~szip~threadsafe arch=darwin-elcapitan-x86_64
+ ^builtin.openmpi@2.0.1%clang@7.0.2-apple~mxm~pmi~psm~psm2~slurm~sqlite3~thread_multiple~tm~verbs+vt arch=darwin-elcapitan-x86_64
+ ^builtin.hwloc@1.11.4%clang@7.0.2-apple arch=darwin-elcapitan-x86_64
+ ^builtin.libpciaccess@0.13.4%clang@7.0.2-apple arch=darwin-elcapitan-x86_64
+ ^builtin.libtool@2.4.6%clang@7.0.2-apple arch=darwin-elcapitan-x86_64
+ ^builtin.m4@1.4.17%clang@7.0.2-apple+sigsegv arch=darwin-elcapitan-x86_64
+ ^builtin.libsigsegv@2.10%clang@7.0.2-apple arch=darwin-elcapitan-x86_64
+ ^builtin.pkg-config@0.29.1%clang@7.0.2-apple+internal_glib arch=darwin-elcapitan-x86_64
+ ^builtin.util-macros@1.19.0%clang@7.0.2-apple arch=darwin-elcapitan-x86_64
+ ^builtin.zlib@1.2.8%clang@7.0.2-apple+pic arch=darwin-elcapitan-x86_64
+
+.. warning::
+
+ You *can* use a fully qualified package name in a ``depends_on``
+ directive in a ``package.py`` file, like so::
+
+ depends_on('proto.hdf5')
+
+ This is *not* recommended, as it makes it very difficult for
+ multiple repos to be composed and shared. A ``package.py`` like this
+ will fail if the ``proto`` repository is not registered in
+ ``repos.yaml``.
+
+.. _cmd-spack-repo:
+
+--------------------------
+``spack repo``
+--------------------------
+
+Spack's :ref:`configuration system <configuration>` allows repository
+settings to come from ``repos.yaml`` files in many locations. If you
+want to see the repositories registered as a result of all configuration
+files, use ``spack repo list``.
+
+^^^^^^^^^^^^^^^^^^^
+``spack repo list``
+^^^^^^^^^^^^^^^^^^^
+
+.. code-block:: console
+
+ $ spack repo list
+ ==> 2 package repositories.
+ myrepo ~/myrepo
+ builtin ~/spack/var/spack/repos/builtin
+
+Each repository is listed with its associated namespace. To get the raw,
+merged YAML from all configuration files, use ``spack config get repos``:
+
+.. code-block:: console
+
+ $ spack config get repos
+ repos:srepos:
+ - ~/myrepo
+ - $spack/var/spack/repos/builtin
+
+mNote that, unlike ``spack repo list``, this does not include the
+namespace, which is read from each repo's ``repo.yaml``.
+
+^^^^^^^^^^^^^^^^^^^^^
+``spack repo create``
+^^^^^^^^^^^^^^^^^^^^^
+
+To make your own repository, you don't need to construct a directory
+yourself; you can use the ``spack repo create`` command.
+
+.. code-block:: console
+
+ $ spack repo create myrepo
+ ==> Created repo with namespace 'myrepo'.
+ ==> To register it with spack, run this command:
+ spack repo add ~/myrepo
+
+ $ ls myrepo
+ packages/ repo.yaml
+
+ $ cat myrepo/repo.yaml
+ repo:
+ namespace: 'myrepo'
+
+By default, the namespace of a new repo matches its directory's name.
+You can supply a custom namespace with a second argument, e.g.:
+
+.. code-block:: console
+
+ $ spack repo create myrepo llnl.comp
+ ==> Created repo with namespace 'llnl.comp'.
+ ==> To register it with spack, run this command:
+ spack repo add ~/myrepo
+
+ $ cat myrepo/repo.yaml
+ repo:
+ namespace: 'llnl.comp'
+
+^^^^^^^^^^^^^^^^^^
+``spack repo add``
+^^^^^^^^^^^^^^^^^^
+
+Once your repository is created, you can register it with Spack with
+``spack repo add``:
+
+.. code-block:: console
+
+ $ spack repo add ./myrepo
+ ==> Added repo with namespace 'llnl.comp'.
+
+ $ spack repo list
+ ==> 2 package repositories.
+ llnl.comp ~/myrepo
+ builtin ~/spack/var/spack/repos/builtin
+
+This simply adds the repo to your ``repos.yaml`` file.
+
+Once a repository is registered like this, you should be able to see its
+packages' names in the output of ``spack list``, and you should be able
+to build them using ``spack install <name>`` as you would with any
+built-in package.
+
+^^^^^^^^^^^^^^^^^^^^^
+``spack repo remove``
+^^^^^^^^^^^^^^^^^^^^^
+
+You can remove an already-registered repository with ``spack repo rm``.
+This will work whether you pass the repository's namespace *or* its
+path.
+
+By namespace:
+
+.. code-block:: console
+
+ $ spack repo rm llnl.comp
+ ==> Removed repository ~/myrepo with namespace 'llnl.comp'.
+
+ $ spack repo list
+ ==> 1 package repository.
+ builtin ~/spack/var/spack/repos/builtin
+
+By path:
+
+.. code-block:: console
+
+ $ spack repo rm ~/myrepo
+ ==> Removed repository ~/myrepo
+
+ $ spack repo list
+ ==> 1 package repository.
+ builtin ~/spack/var/spack/repos/builtin
+
+--------------------------------
+Repo namespaces and Python
+--------------------------------
+
+You may have noticed that namespace notation for repositories is similar
+to the notation for namespaces in Python. As it turns out, you *can*
+treat Spack repositories like Python packages; this is how they are
+implemented.
+
+You could, for example, extend a ``builtin`` package in your own
+repository:
+
+.. code-block:: python
+
+ from spack.pkg.builtin.mpich import Mpich
+
+ class MyPackage(Mpich):
+ ...
+
+Spack repo namespaces are actually Python namespaces tacked on under
+``spack.pkg``. The search semantics of ``repos.yaml`` are actually
+implemented using Python's built-in `sys.path
+<https://docs.python.org/2/library/sys.html#sys.path>`_ search. The
+:py:mod:`spack.repository` module implements a custom `Python importer
+<https://docs.python.org/2/library/imp.html>`_.
+
+.. warning::
+
+ The mechanism for extending packages is not yet extensively tested,
+ and extending packages across repositories imposes inter-repo
+ dependencies, which may be hard to manage. Use this feature at your
+ own risk, but let us know if you have a use case for it.
diff --git a/lib/spack/docs/site_configuration.rst b/lib/spack/docs/site_configuration.rst
deleted file mode 100644
index 3abfa21a9d..0000000000
--- a/lib/spack/docs/site_configuration.rst
+++ /dev/null
@@ -1,173 +0,0 @@
-.. _site-configuration:
-
-Site configuration
-===================================
-
-.. _temp-space:
-
-Temporary space
-----------------------------
-
-.. warning:: Temporary space configuration will eventually be moved to
- configuration files, but currently these settings are in
- ``lib/spack/spack/__init__.py``
-
-By default, Spack will try to do all of its building in temporary
-space. There are two main reasons for this. First, Spack is designed
-to run out of a user's home directory, and on may systems the home
-directory is network mounted and potentially not a very fast
-filesystem. We create build stages in a temporary directory to avoid
-this. Second, many systems impose quotas on home directories, and
-``/tmp`` or similar directories often have more available space. This
-helps conserve space for installations in users' home directories.
-
-You can customize temporary directories by editing
-``lib/spack/spack/__init__.py``. Specifically, find this part of the file:
-
-.. code-block:: python
-
- # Whether to build in tmp space or directly in the stage_path.
- # If this is true, then spack will make stage directories in
- # a tmp filesystem, and it will symlink them into stage_path.
- use_tmp_stage = True
-
- # Locations to use for staging and building, in order of preference
- # Use a %u to add a username to the stage paths here, in case this
- # is a shared filesystem. Spack will use the first of these paths
- # that it can create.
- tmp_dirs = ['/nfs/tmp2/%u/spack-stage',
- '/var/tmp/%u/spack-stage',
- '/tmp/%u/spack-stage']
-
-The ``use_tmp_stage`` variable controls whether Spack builds
-**directly** inside the ``var/spack/`` directory. Normally, Spack
-will try to find a temporary directory for a build, then it *symlinks*
-that temporary directory into ``var/spack/`` so that you can keep
-track of what temporary directories Spack is using.
-
-The ``tmp_dirs`` variable is a list of paths Spack should search when
-trying to find a temporary directory. They can optionally contain a
-``%u``, which will substitute the current user's name into the path.
-The list is searched in order, and Spack will create a temporary stage
-in the first directory it finds to which it has write access. Add
-more elements to the list to indicate where your own site's temporary
-directory is.
-
-
-External Packages
-~~~~~~~~~~~~~~~~~~~~~
-Spack can be configured to use externally-installed
-packages rather than building its own packages. This may be desirable
-if machines ship with system packages, such as a customized MPI
-that should be used instead of Spack building its own MPI.
-
-External packages are configured through the ``packages.yaml`` file found
-in a Spack installation's ``etc/spack/`` or a user's ``~/.spack/``
-directory. Here's an example of an external configuration:
-
-.. code-block:: yaml
-
- packages:
- openmpi:
- paths:
- openmpi@1.4.3%gcc@4.4.7=chaos_5_x86_64_ib: /opt/openmpi-1.4.3
- openmpi@1.4.3%gcc@4.4.7=chaos_5_x86_64_ib+debug: /opt/openmpi-1.4.3-debug
- openmpi@1.6.5%intel@10.1=chaos_5_x86_64_ib: /opt/openmpi-1.6.5-intel
-
-This example lists three installations of OpenMPI, one built with gcc,
-one built with gcc and debug information, and another built with Intel.
-If Spack is asked to build a package that uses one of these MPIs as a
-dependency, it will use the the pre-installed OpenMPI in
-the given directory.
-
-Each ``packages.yaml`` begins with a ``packages:`` token, followed
-by a list of package names. To specify externals, add a ``paths``
-token under the package name, which lists externals in a
-``spec : /path`` format. Each spec should be as
-well-defined as reasonably possible. If a
-package lacks a spec component, such as missing a compiler or
-package version, then Spack will guess the missing component based
-on its most-favored packages, and it may guess incorrectly.
-
-Each package version and compilers listed in an external should
-have entries in Spack's packages and compiler configuration, even
-though the package and compiler may not every be built.
-
-The packages configuration can tell Spack to use an external location
-for certain package versions, but it does not restrict Spack to using
-external packages. In the above example, if an OpenMPI 1.8.4 became
-available Spack may choose to start building and linking with that version
-rather than continue using the pre-installed OpenMPI versions.
-
-To prevent this, the ``packages.yaml`` configuration also allows packages
-to be flagged as non-buildable. The previous example could be modified to
-be:
-
-.. code-block:: yaml
-
- packages:
- openmpi:
- paths:
- openmpi@1.4.3%gcc@4.4.7=chaos_5_x86_64_ib: /opt/openmpi-1.4.3
- openmpi@1.4.3%gcc@4.4.7=chaos_5_x86_64_ib+debug: /opt/openmpi-1.4.3-debug
- openmpi@1.6.5%intel@10.1=chaos_5_x86_64_ib: /opt/openmpi-1.6.5-intel
- buildable: False
-
-The addition of the ``buildable`` flag tells Spack that it should never build
-its own version of OpenMPI, and it will instead always rely on a pre-built
-OpenMPI. Similar to ``paths``, ``buildable`` is specified as a property under
-a package name.
-
-The ``buildable`` does not need to be paired with external packages.
-It could also be used alone to forbid packages that may be
-buggy or otherwise undesirable.
-
-
-Profiling
-~~~~~~~~~~~~~~~~~~~~~
-
-Spack has some limited built-in support for profiling, and can report
-statistics using standard Python timing tools. To use this feature,
-supply ``-p`` to Spack on the command line, before any subcommands.
-
-.. _spack-p:
-
-``spack -p``
-^^^^^^^^^^^^^^^^^^
-
-``spack -p`` output looks like this:
-
-.. code-block:: sh
-
- $ spack -p graph dyninst
- o dyninst
- |\
- | |\
- | o | libdwarf
- |/ /
- o | libelf
- /
- o boost
-
- 307670 function calls (305943 primitive calls) in 0.127 seconds
-
- Ordered by: internal time
-
- ncalls tottime percall cumtime percall filename:lineno(function)
- 853 0.021 0.000 0.066 0.000 inspect.py:472(getmodule)
- 51197 0.011 0.000 0.018 0.000 inspect.py:51(ismodule)
- 73961 0.010 0.000 0.010 0.000 {isinstance}
- 1762 0.006 0.000 0.053 0.000 inspect.py:440(getsourcefile)
- 32075 0.006 0.000 0.006 0.000 {hasattr}
- 1760 0.004 0.000 0.004 0.000 {posix.stat}
- 2240 0.004 0.000 0.004 0.000 {posix.lstat}
- 2602 0.004 0.000 0.011 0.000 inspect.py:398(getfile)
- 771 0.004 0.000 0.077 0.000 inspect.py:518(findsource)
- 2656 0.004 0.000 0.004 0.000 {method 'match' of '_sre.SRE_Pattern' objects}
- 30772 0.003 0.000 0.003 0.000 {method 'get' of 'dict' objects}
- ...
-
-The bottom of the output shows the top most time consuming functions,
-slowest on top. The profiling support is from Python's built-in tool,
-`cProfile
-<https://docs.python.org/2/library/profile.html#module-cProfile>`_.
diff --git a/lib/spack/docs/tutorial/examples/0.package.py b/lib/spack/docs/tutorial/examples/0.package.py
new file mode 100644
index 0000000000..7ff04d8f17
--- /dev/null
+++ b/lib/spack/docs/tutorial/examples/0.package.py
@@ -0,0 +1,63 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+#
+# This is a template package file for Spack. We've put "FIXME"
+# next to all the things you'll want to change. Once you've handled
+# them, you can save this file and test your package like this:
+#
+# spack install mpileaks
+#
+# You can edit this file again by typing:
+#
+# spack edit mpileaks
+#
+# See the Spack documentation for more information on packaging.
+# If you submit this package back to Spack as a pull request,
+# please first remove this boilerplate and all FIXME comments.
+#
+from spack import *
+
+
+class Mpileaks(AutotoolsPackage):
+ """FIXME: Put a proper description of your package here."""
+
+ # FIXME: Add a proper url for your package's homepage here.
+ homepage = "http://www.example.com"
+ url = "https://github.com/hpc/mpileaks/releases/download/v1.0/mpileaks-1.0.tar.gz"
+
+ version('1.0', '8838c574b39202a57d7c2d68692718aa')
+
+ # FIXME: Add dependencies if required.
+ # depends_on('m4', type='build')
+ # depends_on('autoconf', type='build')
+ # depends_on('automake', type='build')
+ # depends_on('libtool', type='build')
+ # depends_on('foo')
+
+ def configure_args(self):
+ # FIXME: Add arguments other than --prefix
+ # FIXME: If not needed delete the function
+ args = []
+ return args
diff --git a/lib/spack/docs/tutorial/examples/1.package.py b/lib/spack/docs/tutorial/examples/1.package.py
new file mode 100644
index 0000000000..ed156fb34b
--- /dev/null
+++ b/lib/spack/docs/tutorial/examples/1.package.py
@@ -0,0 +1,48 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class Mpileaks(AutotoolsPackage):
+ """Tool to detect and report MPI objects like MPI_Requests and
+ MPI_Datatypes."""
+
+ homepage = "https://github.com/hpc/mpileaks"
+ url = "https://github.com/hpc/mpileaks/releases/download/v1.0/mpileaks-1.0.tar.gz"
+
+ version('1.0', '8838c574b39202a57d7c2d68692718aa')
+
+ # FIXME: Add dependencies if required.
+ # depends_on('m4', type='build')
+ # depends_on('autoconf', type='build')
+ # depends_on('automake', type='build')
+ # depends_on('libtool', type='build')
+ # depends_on('foo')
+
+ def configure_args(self):
+ # FIXME: Add arguments other than --prefix
+ # FIXME: If not needed delete the function
+ args = []
+ return args
diff --git a/lib/spack/docs/tutorial/examples/2.package.py b/lib/spack/docs/tutorial/examples/2.package.py
new file mode 100644
index 0000000000..93274cb587
--- /dev/null
+++ b/lib/spack/docs/tutorial/examples/2.package.py
@@ -0,0 +1,45 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class Mpileaks(AutotoolsPackage):
+ """Tool to detect and report MPI objects like MPI_Requests and
+ MPI_Datatypes."""
+
+ homepage = "https://github.com/hpc/mpileaks"
+ url = "https://github.com/hpc/mpileaks/releases/download/v1.0/mpileaks-1.0.tar.gz"
+
+ version('1.0', '8838c574b39202a57d7c2d68692718aa')
+
+ depends_on('mpi')
+ depends_on('adept-utils')
+ depends_on('callpath')
+
+ def configure_args(self):
+ # FIXME: Add arguments other than --prefix
+ # FIXME: If not needed delete the function
+ args = []
+ return args
diff --git a/lib/spack/docs/tutorial/examples/3.package.py b/lib/spack/docs/tutorial/examples/3.package.py
new file mode 100644
index 0000000000..e732a7187d
--- /dev/null
+++ b/lib/spack/docs/tutorial/examples/3.package.py
@@ -0,0 +1,43 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class Mpileaks(AutotoolsPackage):
+ """Tool to detect and report MPI objects like MPI_Requests and
+ MPI_Datatypes."""
+ homepage = "https://github.com/hpc/mpileaks"
+ url = "https://github.com/hpc/mpileaks/releases/download/v1.0/mpileaks-1.0.tar.gz"
+
+ version('1.0', '8838c574b39202a57d7c2d68692718aa')
+
+ depends_on('mpi')
+ depends_on('adept-utils')
+ depends_on('callpath')
+
+ def configure_args(self):
+ args = ['--with-adept-utils=%s' % self.spec['adept-utils'].prefix,
+ '--with-callpath=%s' % self.spec['callpath'].prefix]
+ return args
diff --git a/lib/spack/docs/tutorial/examples/4.package.py b/lib/spack/docs/tutorial/examples/4.package.py
new file mode 100644
index 0000000000..8f3fae37ed
--- /dev/null
+++ b/lib/spack/docs/tutorial/examples/4.package.py
@@ -0,0 +1,50 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class Mpileaks(AutotoolsPackage):
+ """Tool to detect and report MPI objects like MPI_Requests and
+ MPI_Datatypes."""
+
+ homepage = "https://github.com/hpc/mpileaks"
+ url = "https://github.com/hpc/mpileaks/releases/download/v1.0/mpileaks-1.0.tar.gz"
+
+ version('1.0', '8838c574b39202a57d7c2d68692718aa')
+
+ variant('stackstart', default=0, description='Specify the number of stack frames to truncate.')
+
+ depends_on('mpi')
+ depends_on('adept-utils')
+ depends_on('callpath')
+
+ def configure_args(self):
+ args = ['--with-adept-utils=%s' % self.spec['adept-utils'].prefix,
+ '--with-callpath=%s' % self.spec['callpath'].prefix]
+ stackstart = int(self.spec.variants['stackstart'].value)
+ if stackstart:
+ args.extend(['--with-stack-start-c=%s' % stackstart,
+ '--with-stack-start-fortran=%s' % stackstart])
+ return args
diff --git a/lib/spack/docs/tutorial/sc16-tutorial-slide-preview.png b/lib/spack/docs/tutorial/sc16-tutorial-slide-preview.png
new file mode 100644
index 0000000000..e7f9b2e323
--- /dev/null
+++ b/lib/spack/docs/tutorial/sc16-tutorial-slide-preview.png
Binary files differ
diff --git a/lib/spack/docs/tutorial_sc16.rst b/lib/spack/docs/tutorial_sc16.rst
new file mode 100644
index 0000000000..a95eee989c
--- /dev/null
+++ b/lib/spack/docs/tutorial_sc16.rst
@@ -0,0 +1,48 @@
+.. _spack-101:
+
+=============================
+Tutorial: Spack 101
+=============================
+
+This is a 3-hour introduction to Spack with lectures and live demos. It
+was presented as a tutorial at `Supercomputing 2016
+<http://sc16.supercomputing.org>`_. You can use these materials to teach
+a course on Spack at your own site, or you can just skip ahead and read
+the live demo scripts to see how Spack is used in practice.
+
+.. _sc16-slides:
+
+.. rubric:: Slides
+
+.. figure:: tutorial/sc16-tutorial-slide-preview.png
+ :target: http://llnl.github.io/spack/files/Spack-SC16-Tutorial.pdf
+ :height: 72px
+ :align: left
+ :alt: Slide Preview
+
+`Download Slides <http://llnl.github.io/spack/files/Spack-SC16-Tutorial.pdf>`_.
+
+**Full citation:** Todd Gamblin, Massimiliano Culpo, Gregory Becker, Matt
+Legendre, Greg Lee, Elizabeth Fischer, and Benedikt Hegner.
+`Managing HPC Software Complexity with Spack
+<http://sc16.supercomputing.org/presentation/?id=tut166&sess=sess209>`_.
+Tutorial presented at Supercomputing 2016. November 13, 2016, Salt Lake
+City, UT, USA.
+
+.. _sc16-live-demos:
+
+.. rubric:: Live Demos
+
+These scripts will take you step-by-step through basic Spack tasks. They
+correspond to sections in the slides above.
+
+ 1. :ref:`basics-tutorial`
+ 2. :ref:`packaging-tutorial`
+ 3. :ref:`modules-tutorial`
+
+Full contents:
+
+.. toctree::
+ tutorial_sc16_spack_basics
+ tutorial_sc16_packaging
+ tutorial_sc16_modules
diff --git a/lib/spack/docs/tutorial_sc16_modules.rst b/lib/spack/docs/tutorial_sc16_modules.rst
new file mode 100644
index 0000000000..407f679ae6
--- /dev/null
+++ b/lib/spack/docs/tutorial_sc16_modules.rst
@@ -0,0 +1,982 @@
+.. _modules-tutorial:
+
+=============================
+Module Configuration Tutorial
+=============================
+
+This tutorial will guide you through the customization of both
+content and naming of module files generated by Spack.
+
+Starting from the default Spack settings you will add an increasing
+number of directives to the ``modules.yaml`` configuration file to
+satisfy a number of constraints that mimic those that you may encounter
+in a typical production environment at HPC sites.
+
+Even though the focus will be for the most part on customizing
+TCL non-hierarchical module files, everything
+you'll see applies also to other kinds of module files generated by Spack.
+
+The generation of Lua hierarchical
+module files will be addressed at the end of the tutorial,
+and you'll see that with minor modifications
+to an existing ``modules.yaml`` written for TCL
+non-hierarchical modules you'll get almost
+for free the possibility to try a hierarchical layout.
+
+Let's start!
+
+.. _module_file_tutorial_prerequisites:
+
+-------------
+Prerequisites
+-------------
+
+Before proceeding further ensure:
+
+- you have LMod or Environment Modules available
+- have :ref:`shell support <shell-support>` activated in Spack
+
+If you need to install Lmod or Environment module you can refer
+to the documentation :ref:`here <InstallEnvironmentModules>`.
+
+
+^^^^^^^^^^^^^^^^^^
+Add a new compiler
+^^^^^^^^^^^^^^^^^^
+
+Spack automatically scans the environment to search for available
+compilers on first use. On a Ubuntu 14.04 a fresh clone will show
+something like this:
+
+.. code-block:: console
+
+ $ uname -a
+ Linux nuvolari 4.4.0-45-generic #66~14.04.1-Ubuntu SMP Wed Oct 19 15:05:38 UTC 2016 x86_64 x86_64 x86_64 GNU/Linux
+
+ $ spack compilers
+ ==> Available compilers
+ -- gcc ----------------------------------------------------------
+ gcc@4.8
+
+For the purpose of building a limited set of packages with some features
+that will help showcasing the capabilities of
+module customization the first thing we need is to build a new compiler:
+
+.. code-block:: console
+
+ $ spack install gcc@6.2.0
+ # ...
+ # Wait a long time
+ # ...
+
+Then we can use shell support for modules to add it to the list of known compilers:
+
+.. code-block:: console
+
+ # The name of the generated module may vary
+ $ module load gcc-6.2.0-gcc-4.8-twd5nqg
+
+ $ spack compiler add
+ ==> Added 1 new compiler to ~/.spack/linux/compilers.yaml
+ gcc@6.2.0
+
+ $ spack compilers
+ ==> Available compilers
+ -- gcc ----------------------------------------------------------
+ gcc@6.2.0 gcc@4.8
+
+Note that the final 7 digits hash at the end of the generated module may vary depending
+on architecture or package version.
+
+^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+Build software that will be used in the tutorial
+^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+Next you should install a few modules that will be used in the tutorial:
+
+.. code-block:: console
+
+ $ spack install netlib-scalapack ^openmpi ^openblas
+ # ...
+
+The packages you need to install are:
+
+- ``netlib-scalapack ^openmpi ^openblas``
+- ``netlib-scalapack ^mpich ^openblas``
+- ``netlib-scalapack ^openmpi ^netlib-lapack``
+- ``netlib-scalapack ^mpich ^netlib-lapack``
+- ``py-scipy ^openblas``
+
+In the end your environment should look something like:
+
+.. code-block:: console
+
+ $ module avail
+
+ ------------------------------------------------------------------------ ~/spack/share/spack/modules/linux-Ubuntu14-x86_64 ------------------------------------------------------------------------
+ binutils-2.27-gcc-4.8-dz3xevw libpciaccess-0.13.4-gcc-6.2.0-eo2siet lzo-2.09-gcc-6.2.0-jcngz72 netlib-scalapack-2.0.2-gcc-6.2.0-wnimqhw python-2.7.12-gcc-6.2.0-qu7rc5p
+ bzip2-1.0.6-gcc-6.2.0-csoc2mq libsigsegv-2.10-gcc-4.8-avb6azw m4-1.4.17-gcc-4.8-iggewke netlib-scalapack-2.0.2-gcc-6.2.0-wojunhq sqlite-3.8.5-gcc-6.2.0-td3zfe7
+ cmake-3.5.2-gcc-6.2.0-6poypqg libsigsegv-2.10-gcc-6.2.0-g3qpmbi m4-1.4.17-gcc-6.2.0-lhgqa6s nettle-3.2-gcc-6.2.0-djdthlh tcl-8.6.5-gcc-4.8-atddxu7
+ curl-7.50.3-gcc-6.2.0-2ffacqm libtool-2.4.6-gcc-6.2.0-kiepac6 mpc-1.0.3-gcc-4.8-lylv7lk openblas-0.2.19-gcc-6.2.0-js33umc util-macros-1.19.0-gcc-6.2.0-uoukuqk
+ expat-2.2.0-gcc-6.2.0-bxqnjar libxml2-2.9.4-gcc-6.2.0-3k4ykbe mpfr-3.1.4-gcc-4.8-bldfx3w openmpi-2.0.1-gcc-6.2.0-s3qbtby xz-5.2.2-gcc-6.2.0-t5lk6in
+ gcc-6.2.0-gcc-4.8-twd5nqg lmod-6.4.5-gcc-4.8-7v7bh7b mpich-3.2-gcc-6.2.0-5n5xoep openssl-1.0.2j-gcc-6.2.0-hibnfda zlib-1.2.8-gcc-4.8-bds4ies
+ gmp-6.1.1-gcc-4.8-uq52e2n lua-5.3.2-gcc-4.8-xozf2hx ncurses-6.0-gcc-4.8-u62fit4 pkg-config-0.29.1-gcc-6.2.0-rslsgcs zlib-1.2.8-gcc-6.2.0-asydrba
+ gmp-6.1.1-gcc-6.2.0-3cfh3hi lua-luafilesystem-1_6_3-gcc-4.8-sbzejlz ncurses-6.0-gcc-6.2.0-7tb426s py-nose-1.3.7-gcc-6.2.0-4gl5c42
+ hwloc-1.11.4-gcc-6.2.0-3ostwel lua-luaposix-33.4.0-gcc-4.8-xf7y2p5 netlib-lapack-3.6.1-gcc-6.2.0-mirer2l py-numpy-1.11.1-gcc-6.2.0-i3rpk4e
+ isl-0.14-gcc-4.8-cq73t5m lz4-131-gcc-6.2.0-cagoem4 netlib-scalapack-2.0.2-gcc-6.2.0-6bqlxqy py-scipy-0.18.1-gcc-6.2.0-e6uljfi
+ libarchive-3.2.1-gcc-6.2.0-2b54aos lzma-4.32.7-gcc-6.2.0-sfmeynw netlib-scalapack-2.0.2-gcc-6.2.0-hpqb3dp py-setuptools-25.2.0-gcc-6.2.0-hkqauaa
+
+------------------------------------------------
+Filter unwanted modifications to the environment
+------------------------------------------------
+
+The non-hierarchical TCL module files that have been generated so far
+follow the default rules for module generation, which are given
+:ref:`here <modules-yaml>` in the reference part of the manual. Taking a
+look at the ``gcc`` module you'll see something like:
+
+.. code-block:: console
+
+ $ module show gcc-6.2.0-gcc-4.8-twd5nqg
+ ---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------
+ ~/spack/share/spack/modules/linux-Ubuntu14-x86_64/gcc-6.2.0-gcc-4.8-twd5nqg:
+ ---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------
+ whatis("gcc @6.2.0 ")
+ prepend_path("PATH","~/spack/opt/spack/linux-Ubuntu14-x86_64/gcc-4.8/gcc-6.2.0-twd5nqg33hrrssqclcfi5k42eccwxz5u/bin")
+ prepend_path("CMAKE_PREFIX_PATH","~/spack/opt/spack/linux-Ubuntu14-x86_64/gcc-4.8/gcc-6.2.0-twd5nqg33hrrssqclcfi5k42eccwxz5u/")
+ prepend_path("MANPATH","~/spack/opt/spack/linux-Ubuntu14-x86_64/gcc-4.8/gcc-6.2.0-twd5nqg33hrrssqclcfi5k42eccwxz5u/share/man")
+ prepend_path("PKG_CONFIG_PATH","~/spack/opt/spack/linux-Ubuntu14-x86_64/gcc-4.8/gcc-6.2.0-twd5nqg33hrrssqclcfi5k42eccwxz5u/lib64/pkgconfig")
+ prepend_path("LIBRARY_PATH","~/spack/opt/spack/linux-Ubuntu14-x86_64/gcc-4.8/gcc-6.2.0-twd5nqg33hrrssqclcfi5k42eccwxz5u/lib64")
+ prepend_path("LD_LIBRARY_PATH","~/spack/opt/spack/linux-Ubuntu14-x86_64/gcc-4.8/gcc-6.2.0-twd5nqg33hrrssqclcfi5k42eccwxz5u/lib64")
+ prepend_path("CPATH","~/spack/opt/spack/linux-Ubuntu14-x86_64/gcc-4.8/gcc-6.2.0-twd5nqg33hrrssqclcfi5k42eccwxz5u/include")
+ help([[The GNU Compiler Collection includes front ends for C, C++, Objective-C,
+ Fortran, and Java.
+ ]])
+
+As expected, a few environment variables representing paths will be modified
+by the modules according to the default prefix inspection rules.
+
+Consider now the case that your site has decided that e.g. ``CPATH`` and
+``LIBRARY_PATH`` modifications should not be present in module files. What you can
+do to abide by the rules is to create a configuration file ``~/.spack/modules.yaml``
+with the following content:
+
+.. code-block:: yaml
+
+ modules:
+ tcl:
+ all:
+ filter:
+ environment_blacklist: ['CPATH', 'LIBRARY_PATH']
+
+Next you should regenerate all the module files:
+
+.. code-block:: console
+
+ $ spack module refresh --module-type tcl
+ ==> You are about to regenerate tcl module files for:
+
+ -- linux-Ubuntu14-x86_64 / gcc@4.8 ------------------------------
+ dz3xevw binutils@2.27 uq52e2n gmp@6.1.1 avb6azw libsigsegv@2.10 xozf2hx lua@5.3.2 xf7y2p5 lua-luaposix@33.4.0 lylv7lk mpc@1.0.3 u62fit4 ncurses@6.0 bds4ies zlib@1.2.8
+ twd5nqg gcc@6.2.0 cq73t5m isl@0.14 7v7bh7b lmod@6.4.5 sbzejlz lua-luafilesystem@1_6_3 iggewke m4@1.4.17 bldfx3w mpfr@3.1.4 atddxu7 tcl@8.6.5
+
+ ...
+
+ ==> Do you want to proceed ? [y/n]
+ y
+ ==> Regenerating tcl module files
+
+If you take a look now at the module for ``gcc`` you'll see that the unwanted
+paths have disappeared:
+
+.. code-block:: console
+
+ $ module show gcc-6.2.0-gcc-4.8-twd5nqg
+ ---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------
+ ~/spack/share/spack/modules/linux-Ubuntu14-x86_64/gcc-6.2.0-gcc-4.8-twd5nqg:
+ ---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------
+ whatis("gcc @6.2.0 ")
+ prepend_path("PATH","~/spack/opt/spack/linux-Ubuntu14-x86_64/gcc-4.8/gcc-6.2.0-twd5nqg33hrrssqclcfi5k42eccwxz5u/bin")
+ prepend_path("CMAKE_PREFIX_PATH","~/spack/opt/spack/linux-Ubuntu14-x86_64/gcc-4.8/gcc-6.2.0-twd5nqg33hrrssqclcfi5k42eccwxz5u/")
+ prepend_path("MANPATH","~/spack/opt/spack/linux-Ubuntu14-x86_64/gcc-4.8/gcc-6.2.0-twd5nqg33hrrssqclcfi5k42eccwxz5u/share/man")
+ prepend_path("PKG_CONFIG_PATH","~/spack/opt/spack/linux-Ubuntu14-x86_64/gcc-4.8/gcc-6.2.0-twd5nqg33hrrssqclcfi5k42eccwxz5u/lib64/pkgconfig")
+ prepend_path("LD_LIBRARY_PATH","~/spack/opt/spack/linux-Ubuntu14-x86_64/gcc-4.8/gcc-6.2.0-twd5nqg33hrrssqclcfi5k42eccwxz5u/lib64")
+ help([[The GNU Compiler Collection includes front ends for C, C++, Objective-C,
+ Fortran, and Java.
+ ]])
+
+----------------------------------------------
+Prevent some module files from being generated
+----------------------------------------------
+
+Another common request at many sites is to avoid exposing software that
+is only needed as an intermediate step when building a newer stack.
+Let's try to prevent the generation of
+module files for anything that is compiled with ``gcc@4.8`` (the OS provided compiler).
+
+To do this you should add a ``blacklist`` keyword to the configuration file:
+
+.. code-block:: yaml
+ :emphasize-lines: 3,4
+
+ modules:
+ tcl:
+ blacklist:
+ - '%gcc@4.8'
+ all:
+ filter:
+ environment_blacklist: ['CPATH', 'LIBRARY_PATH']
+
+and regenerate the module files:
+
+.. code-block:: console
+
+ $ spack module refresh --module-type tcl --delete-tree
+ ==> You are about to regenerate tcl module files for:
+
+ -- linux-Ubuntu14-x86_64 / gcc@4.8 ------------------------------
+ dz3xevw binutils@2.27 uq52e2n gmp@6.1.1 avb6azw libsigsegv@2.10 xozf2hx lua@5.3.2 xf7y2p5 lua-luaposix@33.4.0 lylv7lk mpc@1.0.3 u62fit4 ncurses@6.0 bds4ies zlib@1.2.8
+ twd5nqg gcc@6.2.0 cq73t5m isl@0.14 7v7bh7b lmod@6.4.5 sbzejlz lua-luafilesystem@1_6_3 iggewke m4@1.4.17 bldfx3w mpfr@3.1.4 atddxu7 tcl@8.6.5
+
+ -- linux-Ubuntu14-x86_64 / gcc@6.2.0 ----------------------------
+ csoc2mq bzip2@1.0.6 2b54aos libarchive@3.2.1 sfmeynw lzma@4.32.7 wnimqhw netlib-scalapack@2.0.2 s3qbtby openmpi@2.0.1 hkqauaa py-setuptools@25.2.0
+ 6poypqg cmake@3.5.2 eo2siet libpciaccess@0.13.4 jcngz72 lzo@2.09 6bqlxqy netlib-scalapack@2.0.2 hibnfda openssl@1.0.2j qu7rc5p python@2.7.12
+ 2ffacqm curl@7.50.3 g3qpmbi libsigsegv@2.10 lhgqa6s m4@1.4.17 wojunhq netlib-scalapack@2.0.2 rslsgcs pkg-config@0.29.1 td3zfe7 sqlite@3.8.5
+ bxqnjar expat@2.2.0 kiepac6 libtool@2.4.6 5n5xoep mpich@3.2 hpqb3dp netlib-scalapack@2.0.2 4gl5c42 py-nose@1.3.7 uoukuqk util-macros@1.19.0
+ 3cfh3hi gmp@6.1.1 3k4ykbe libxml2@2.9.4 7tb426s ncurses@6.0 djdthlh nettle@3.2 i3rpk4e py-numpy@1.11.1 t5lk6in xz@5.2.2
+ 3ostwel hwloc@1.11.4 cagoem4 lz4@131 mirer2l netlib-lapack@3.6.1 js33umc openblas@0.2.19 e6uljfi py-scipy@0.18.1 asydrba zlib@1.2.8
+
+ ==> Do you want to proceed ? [y/n]
+ y
+
+ $ module avail
+
+ ------------------------------------------------------------------------ ~/spack/share/spack/modules/linux-Ubuntu14-x86_64 ------------------------------------------------------------------------
+ bzip2-1.0.6-gcc-6.2.0-csoc2mq libsigsegv-2.10-gcc-6.2.0-g3qpmbi ncurses-6.0-gcc-6.2.0-7tb426s openmpi-2.0.1-gcc-6.2.0-s3qbtby sqlite-3.8.5-gcc-6.2.0-td3zfe7
+ cmake-3.5.2-gcc-6.2.0-6poypqg libtool-2.4.6-gcc-6.2.0-kiepac6 netlib-lapack-3.6.1-gcc-6.2.0-mirer2l openssl-1.0.2j-gcc-6.2.0-hibnfda util-macros-1.19.0-gcc-6.2.0-uoukuqk
+ curl-7.50.3-gcc-6.2.0-2ffacqm libxml2-2.9.4-gcc-6.2.0-3k4ykbe netlib-scalapack-2.0.2-gcc-6.2.0-6bqlxqy pkg-config-0.29.1-gcc-6.2.0-rslsgcs xz-5.2.2-gcc-6.2.0-t5lk6in
+ expat-2.2.0-gcc-6.2.0-bxqnjar lz4-131-gcc-6.2.0-cagoem4 netlib-scalapack-2.0.2-gcc-6.2.0-hpqb3dp py-nose-1.3.7-gcc-6.2.0-4gl5c42 zlib-1.2.8-gcc-6.2.0-asydrba
+ gmp-6.1.1-gcc-6.2.0-3cfh3hi lzma-4.32.7-gcc-6.2.0-sfmeynw netlib-scalapack-2.0.2-gcc-6.2.0-wnimqhw py-numpy-1.11.1-gcc-6.2.0-i3rpk4e
+ hwloc-1.11.4-gcc-6.2.0-3ostwel lzo-2.09-gcc-6.2.0-jcngz72 netlib-scalapack-2.0.2-gcc-6.2.0-wojunhq py-scipy-0.18.1-gcc-6.2.0-e6uljfi
+ libarchive-3.2.1-gcc-6.2.0-2b54aos m4-1.4.17-gcc-6.2.0-lhgqa6s nettle-3.2-gcc-6.2.0-djdthlh py-setuptools-25.2.0-gcc-6.2.0-hkqauaa
+ libpciaccess-0.13.4-gcc-6.2.0-eo2siet mpich-3.2-gcc-6.2.0-5n5xoep openblas-0.2.19-gcc-6.2.0-js33umc python-2.7.12-gcc-6.2.0-qu7rc5p
+
+This time it is convenient to pass the option ``--delete-tree`` to the command that
+regenerates the module files to instruct it to delete the existing tree and regenerate
+a new one instead of overwriting the files in the existing directory.
+
+If you pay careful attention you'll see though that we went too far in blacklisting modules:
+the module for ``gcc@6.2.0`` disappeared as it was bootstrapped with ``gcc@4.8``. To specify
+exceptions to the blacklist rules you can use ``whitelist``:
+
+.. code-block:: yaml
+ :emphasize-lines: 3,4
+
+ modules:
+ tcl:
+ whitelist:
+ - gcc
+ blacklist:
+ - '%gcc@4.8'
+ all:
+ filter:
+ environment_blacklist: ['CPATH', 'LIBRARY_PATH']
+
+``whitelist`` rules always have precedence over ``blacklist`` rules. If you regenerate the modules again:
+
+.. code-block:: console
+
+ $ spack module refresh --module-type tcl -y
+
+you'll see that now the module for ``gcc@6.2.0`` has reappeared:
+
+.. code-block:: console
+
+ $ module avail gcc-6.2.0-gcc-4.8-twd5nqg
+
+ ------------------------------------------------------------------------ ~/spack/share/spack/modules/linux-Ubuntu14-x86_64 ------------------------------------------------------------------------
+ gcc-6.2.0-gcc-4.8-twd5nqg
+
+-------------------------
+Change module file naming
+-------------------------
+
+The next step in making module files more user-friendly is to
+improve their naming scheme.
+To reduce the length of the hash or remove it altogether you can
+use the ``hash_length`` keyword in the configuration file:
+
+.. TODO: give reasons to remove hashes if they are not evident enough?
+
+.. code-block:: yaml
+ :emphasize-lines: 3
+
+ modules:
+ tcl:
+ hash_length: 0
+ whitelist:
+ - gcc
+ blacklist:
+ - '%gcc@4.8'
+ all:
+ filter:
+ environment_blacklist: ['CPATH', 'LIBRARY_PATH']
+
+If you try to regenerate the module files now you will get an error:
+
+.. code-block:: console
+
+ $ spack module refresh --module-type tcl --delete-tree -y
+ ==> Error: Name clashes detected in module files:
+
+ file : ~/spack/share/spack/modules/linux-Ubuntu14-x86_64/netlib-scalapack-2.0.2-gcc-6.2.0
+ spec : netlib-scalapack@2.0.2%gcc@6.2.0~fpic+shared arch=linux-Ubuntu14-x86_64
+ spec : netlib-scalapack@2.0.2%gcc@6.2.0~fpic+shared arch=linux-Ubuntu14-x86_64
+ spec : netlib-scalapack@2.0.2%gcc@6.2.0~fpic+shared arch=linux-Ubuntu14-x86_64
+ spec : netlib-scalapack@2.0.2%gcc@6.2.0~fpic+shared arch=linux-Ubuntu14-x86_64
+
+ ==> Error: Operation aborted
+
+.. note::
+ We try to check for errors upfront!
+ In Spack we check for errors upfront whenever possible, so don't worry about your module files:
+ as a name clash was detected nothing has been changed on disk.
+
+The problem here is that without
+the hashes the four different flavors of ``netlib-scalapack`` map to the same module file
+name. We have the possibility to add suffixes to differentiate them:
+
+.. code-block:: yaml
+ :emphasize-lines: 9-11,14-17
+
+ modules:
+ tcl:
+ hash_length: 0
+ whitelist:
+ - gcc
+ blacklist:
+ - '%gcc@4.8'
+ all:
+ suffixes:
+ '^openblas': openblas
+ '^netlib-lapack': netlib
+ filter:
+ environment_blacklist: ['CPATH', 'LIBRARY_PATH']
+ netlib-scalapack:
+ suffixes:
+ '^openmpi': openmpi
+ '^mpich': mpich
+
+As you can see it is possible to specify rules that applies only to a
+restricted set of packages using :ref:`anonymous specs <anonymous_specs>`.
+Regenerating module files now we obtain:
+
+.. code-block:: console
+
+ $ spack module refresh --module-type tcl --delete-tree -y
+ ==> Regenerating tcl module files
+ $ module avail
+
+ ------------------------------------------------------------------------ ~/spack/share/spack/modules/linux-Ubuntu14-x86_64 ------------------------------------------------------------------------
+ bzip2-1.0.6-gcc-6.2.0 libpciaccess-0.13.4-gcc-6.2.0 mpich-3.2-gcc-6.2.0 openblas-0.2.19-gcc-6.2.0 python-2.7.12-gcc-6.2.0
+ cmake-3.5.2-gcc-6.2.0 libsigsegv-2.10-gcc-6.2.0 ncurses-6.0-gcc-6.2.0 openmpi-2.0.1-gcc-6.2.0 sqlite-3.8.5-gcc-6.2.0
+ curl-7.50.3-gcc-6.2.0 libtool-2.4.6-gcc-6.2.0 netlib-lapack-3.6.1-gcc-6.2.0 openssl-1.0.2j-gcc-6.2.0 util-macros-1.19.0-gcc-6.2.0
+ expat-2.2.0-gcc-6.2.0 libxml2-2.9.4-gcc-6.2.0 netlib-scalapack-2.0.2-gcc-6.2.0-netlib-mpich pkg-config-0.29.1-gcc-6.2.0 xz-5.2.2-gcc-6.2.0
+ gcc-6.2.0-gcc-4.8 lz4-131-gcc-6.2.0 netlib-scalapack-2.0.2-gcc-6.2.0-netlib-openmpi py-nose-1.3.7-gcc-6.2.0 zlib-1.2.8-gcc-6.2.0
+ gmp-6.1.1-gcc-6.2.0 lzma-4.32.7-gcc-6.2.0 netlib-scalapack-2.0.2-gcc-6.2.0-openblas-mpich py-numpy-1.11.1-gcc-6.2.0-openblas
+ hwloc-1.11.4-gcc-6.2.0 lzo-2.09-gcc-6.2.0 netlib-scalapack-2.0.2-gcc-6.2.0-openblas-openmpi py-scipy-0.18.1-gcc-6.2.0-openblas
+ libarchive-3.2.1-gcc-6.2.0 m4-1.4.17-gcc-6.2.0 nettle-3.2-gcc-6.2.0 py-setuptools-25.2.0-gcc-6.2.0
+
+Finally we can set a ``naming_scheme`` to prevent users from loading
+modules that refer to different flavors of the same library/application:
+
+.. code-block:: yaml
+ :emphasize-lines: 4,10,11
+
+ modules:
+ tcl:
+ hash_length: 0
+ naming_scheme: '${PACKAGE}/${VERSION}-${COMPILERNAME}-${COMPILERVER}'
+ whitelist:
+ - gcc
+ blacklist:
+ - '%gcc@4.8'
+ all:
+ conflict:
+ - '${PACKAGE}'
+ suffixes:
+ '^openblas': openblas
+ '^netlib-lapack': netlib
+ filter:
+ environment_blacklist: ['CPATH', 'LIBRARY_PATH']
+ netlib-scalapack:
+ suffixes:
+ '^openmpi': openmpi
+ '^mpich': mpich
+
+The final result should look like:
+
+.. code-block:: console
+
+ $ module avail
+
+ ------------------------------------------------------------------------ ~/spack/share/spack/modules/linux-Ubuntu14-x86_64 ------------------------------------------------------------------------
+ bzip2/1.0.6-gcc-6.2.0 libpciaccess/0.13.4-gcc-6.2.0 mpich/3.2-gcc-6.2.0 openblas/0.2.19-gcc-6.2.0 python/2.7.12-gcc-6.2.0
+ cmake/3.5.2-gcc-6.2.0 libsigsegv/2.10-gcc-6.2.0 ncurses/6.0-gcc-6.2.0 openmpi/2.0.1-gcc-6.2.0 sqlite/3.8.5-gcc-6.2.0
+ curl/7.50.3-gcc-6.2.0 libtool/2.4.6-gcc-6.2.0 netlib-lapack/3.6.1-gcc-6.2.0 openssl/1.0.2j-gcc-6.2.0 util-macros/1.19.0-gcc-6.2.0
+ expat/2.2.0-gcc-6.2.0 libxml2/2.9.4-gcc-6.2.0 netlib-scalapack/2.0.2-gcc-6.2.0-netlib-mpich pkg-config/0.29.1-gcc-6.2.0 xz/5.2.2-gcc-6.2.0
+ gcc/6.2.0-gcc-4.8 lz4/131-gcc-6.2.0 netlib-scalapack/2.0.2-gcc-6.2.0-netlib-openmpi py-nose/1.3.7-gcc-6.2.0 zlib/1.2.8-gcc-6.2.0
+ gmp/6.1.1-gcc-6.2.0 lzma/4.32.7-gcc-6.2.0 netlib-scalapack/2.0.2-gcc-6.2.0-openblas-mpich py-numpy/1.11.1-gcc-6.2.0-openblas
+ hwloc/1.11.4-gcc-6.2.0 lzo/2.09-gcc-6.2.0 netlib-scalapack/2.0.2-gcc-6.2.0-openblas-openmpi (D) py-scipy/0.18.1-gcc-6.2.0-openblas
+ libarchive/3.2.1-gcc-6.2.0 m4/1.4.17-gcc-6.2.0 nettle/3.2-gcc-6.2.0 py-setuptools/25.2.0-gcc-6.2.0
+
+.. note::
+ TCL specific directive
+ The directives ``naming_scheme`` and ``conflict`` are TCL specific and do not apply
+ to the ``dotkit`` or ``lmod`` sections in the configuration file.
+
+------------------------------------
+Add custom environment modifications
+------------------------------------
+
+At many sites it is customary to set an environment variable in a
+package's module file that points to the folder in which the package
+is installed. You can achieve this with Spack by adding an
+``environment`` directive to the configuration file:
+
+.. code-block:: yaml
+ :emphasize-lines: 17-19
+
+ modules:
+ tcl:
+ hash_length: 0
+ naming_scheme: '${PACKAGE}/${VERSION}-${COMPILERNAME}-${COMPILERVER}'
+ whitelist:
+ - gcc
+ blacklist:
+ - '%gcc@4.8'
+ all:
+ conflict:
+ - '${PACKAGE}'
+ suffixes:
+ '^openblas': openblas
+ '^netlib-lapack': netlib
+ filter:
+ environment_blacklist: ['CPATH', 'LIBRARY_PATH']
+ environment:
+ set:
+ '${PACKAGE}_ROOT': '${PREFIX}'
+ netlib-scalapack:
+ suffixes:
+ '^openmpi': openmpi
+ '^mpich': mpich
+
+There are many variable tokens available to use in the ``environment``
+and ``naming_scheme`` directives, such as ``${PACKAGE}``,
+``${VERSION}``, etc. (see the :meth:`~spack.spec.Spec.format` API
+documentation for the complete list).
+
+Regenerating the module files should result in something like:
+
+.. code-block:: console
+ :emphasize-lines: 14
+
+ $ spack module refresh -y --module-type tcl
+ ==> Regenerating tcl module files
+
+ $ module show gcc
+ ---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------
+ ~/spack/share/spack/modules/linux-Ubuntu14-x86_64/gcc/6.2.0-gcc-4.8:
+ ---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------
+ whatis("gcc @6.2.0 ")
+ prepend_path("PATH","~/spack/opt/spack/linux-Ubuntu14-x86_64/gcc-4.8/gcc-6.2.0-twd5nqg33hrrssqclcfi5k42eccwxz5u/bin")
+ prepend_path("CMAKE_PREFIX_PATH","~/spack/opt/spack/linux-Ubuntu14-x86_64/gcc-4.8/gcc-6.2.0-twd5nqg33hrrssqclcfi5k42eccwxz5u/")
+ prepend_path("MANPATH","~/spack/opt/spack/linux-Ubuntu14-x86_64/gcc-4.8/gcc-6.2.0-twd5nqg33hrrssqclcfi5k42eccwxz5u/share/man")
+ prepend_path("PKG_CONFIG_PATH","~/spack/opt/spack/linux-Ubuntu14-x86_64/gcc-4.8/gcc-6.2.0-twd5nqg33hrrssqclcfi5k42eccwxz5u/lib64/pkgconfig")
+ prepend_path("LD_LIBRARY_PATH","~/spack/opt/spack/linux-Ubuntu14-x86_64/gcc-4.8/gcc-6.2.0-twd5nqg33hrrssqclcfi5k42eccwxz5u/lib64")
+ setenv("GCC_ROOT","~/spack/opt/spack/linux-Ubuntu14-x86_64/gcc-4.8/gcc-6.2.0-twd5nqg33hrrssqclcfi5k42eccwxz5u")
+ conflict("gcc")
+ help([[The GNU Compiler Collection includes front ends for C, C++, Objective-C,
+ Fortran, and Java.
+ ]])
+
+As you see the ``gcc`` module has the environment variable ``GCC_ROOT`` set.
+
+Sometimes it's also useful to apply environment modifications selectively and target
+only certain packages. You can, for instance set the common variables ``CC``, ``CXX``,
+etc. in the ``gcc`` module file and apply other custom modifications to the
+``openmpi`` modules as follows:
+
+.. code-block:: yaml
+ :emphasize-lines: 20-32
+
+ modules:
+ tcl:
+ hash_length: 0
+ naming_scheme: '${PACKAGE}/${VERSION}-${COMPILERNAME}-${COMPILERVER}'
+ whitelist:
+ - gcc
+ blacklist:
+ - '%gcc@4.8'
+ all:
+ conflict:
+ - '${PACKAGE}'
+ suffixes:
+ '^openblas': openblas
+ '^netlib-lapack': netlib
+ filter:
+ environment_blacklist: ['CPATH', 'LIBRARY_PATH']
+ environment:
+ set:
+ '${PACKAGE}_ROOT': '${PREFIX}'
+ gcc:
+ environment:
+ set:
+ CC: gcc
+ CXX: g++
+ FC: gfortran
+ F90: gfortran
+ F77: gfortran
+ openmpi:
+ environment:
+ set:
+ SLURM_MPI_TYPE: pmi2
+ OMPI_MCA_btl_openib_warn_default_gid_prefix: '0'
+ netlib-scalapack:
+ suffixes:
+ '^openmpi': openmpi
+ '^mpich': mpich
+
+This time we will be more selective and regenerate only the ``gcc`` and
+``openmpi`` module files:
+
+.. code-block:: console
+
+ $ spack module refresh -y --module-type tcl gcc
+ ==> Regenerating tcl module files
+
+ $ spack module refresh -y --module-type tcl openmpi
+ ==> Regenerating tcl module files
+
+ $ module show gcc
+ ---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------
+ ~/spack/share/spack/modules/linux-Ubuntu14-x86_64/gcc/6.2.0-gcc-4.8:
+ ---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------
+ whatis("gcc @6.2.0 ")
+ prepend_path("PATH","~/spack/opt/spack/linux-Ubuntu14-x86_64/gcc-4.8/gcc-6.2.0-twd5nqg33hrrssqclcfi5k42eccwxz5u/bin")
+ prepend_path("CMAKE_PREFIX_PATH","~/spack/opt/spack/linux-Ubuntu14-x86_64/gcc-4.8/gcc-6.2.0-twd5nqg33hrrssqclcfi5k42eccwxz5u/")
+ prepend_path("MANPATH","~/spack/opt/spack/linux-Ubuntu14-x86_64/gcc-4.8/gcc-6.2.0-twd5nqg33hrrssqclcfi5k42eccwxz5u/share/man")
+ prepend_path("PKG_CONFIG_PATH","~/spack/opt/spack/linux-Ubuntu14-x86_64/gcc-4.8/gcc-6.2.0-twd5nqg33hrrssqclcfi5k42eccwxz5u/lib64/pkgconfig")
+ prepend_path("LD_LIBRARY_PATH","~/spack/opt/spack/linux-Ubuntu14-x86_64/gcc-4.8/gcc-6.2.0-twd5nqg33hrrssqclcfi5k42eccwxz5u/lib64")
+ setenv("GCC_ROOT","~/spack/opt/spack/linux-Ubuntu14-x86_64/gcc-4.8/gcc-6.2.0-twd5nqg33hrrssqclcfi5k42eccwxz5u")
+ setenv("CC","gcc")
+ setenv("CXX","g++")
+ setenv("F90","gfortran")
+ setenv("FC","gfortran")
+ setenv("F77","gfortran")
+ conflict("gcc")
+ help([[The GNU Compiler Collection includes front ends for C, C++, Objective-C,
+ Fortran, and Java.
+ ]])
+
+ $ module show openmpi
+ ---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------
+ ~/spack/share/spack/modules/linux-Ubuntu14-x86_64/openmpi/2.0.1-gcc-6.2.0:
+ ---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------
+ whatis("openmpi @2.0.1 ")
+ prepend_path("PATH","~/spack/opt/spack/linux-Ubuntu14-x86_64/gcc-6.2.0/openmpi-2.0.1-s3qbtbyh3y5y4gkchmhcuak7th44l53w/bin")
+ prepend_path("CMAKE_PREFIX_PATH","~/spack/opt/spack/linux-Ubuntu14-x86_64/gcc-6.2.0/openmpi-2.0.1-s3qbtbyh3y5y4gkchmhcuak7th44l53w/")
+ prepend_path("LD_LIBRARY_PATH","~/spack/opt/spack/linux-Ubuntu14-x86_64/gcc-6.2.0/openmpi-2.0.1-s3qbtbyh3y5y4gkchmhcuak7th44l53w/lib")
+ prepend_path("PKG_CONFIG_PATH","~/spack/opt/spack/linux-Ubuntu14-x86_64/gcc-6.2.0/openmpi-2.0.1-s3qbtbyh3y5y4gkchmhcuak7th44l53w/lib/pkgconfig")
+ prepend_path("MANPATH","~/spack/opt/spack/linux-Ubuntu14-x86_64/gcc-6.2.0/openmpi-2.0.1-s3qbtbyh3y5y4gkchmhcuak7th44l53w/share/man")
+ setenv("SLURM_MPI_TYPE","pmi2")
+ setenv("OMPI_MCA_BTL_OPENIB_WARN_DEFAULT_GID_PREFIX","0")
+ setenv("OPENMPI_ROOT","~/spack/opt/spack/linux-Ubuntu14-x86_64/gcc-6.2.0/openmpi-2.0.1-s3qbtbyh3y5y4gkchmhcuak7th44l53w")
+ conflict("openmpi")
+ help([[The Open MPI Project is an open source Message Passing Interface
+ implementation that is developed and maintained by a consortium of
+ academic, research, and industry partners. Open MPI is therefore able to
+ combine the expertise, technologies, and resources from all across the
+ High Performance Computing community in order to build the best MPI
+ library available. Open MPI offers advantages for system and software
+ vendors, application developers and computer science researchers.
+ ]])
+
+
+---------------------
+Autoload dependencies
+---------------------
+
+Spack can also generate module files that contain code to load the
+dependencies automatically. You can, for instance generate python
+modules that load their dependencies by adding the ``autoload``
+directive and assigning it the value ``direct``:
+
+.. code-block:: yaml
+ :emphasize-lines: 37,38
+
+ modules:
+ tcl:
+ hash_length: 0
+ naming_scheme: '${PACKAGE}/${VERSION}-${COMPILERNAME}-${COMPILERVER}'
+ whitelist:
+ - gcc
+ blacklist:
+ - '%gcc@4.8'
+ all:
+ conflict:
+ - '${PACKAGE}'
+ suffixes:
+ '^openblas': openblas
+ '^netlib-lapack': netlib
+ filter:
+ environment_blacklist: ['CPATH', 'LIBRARY_PATH']
+ environment:
+ set:
+ '${PACKAGE}_ROOT': '${PREFIX}'
+ gcc:
+ environment:
+ set:
+ CC: gcc
+ CXX: g++
+ FC: gfortran
+ F90: gfortran
+ F77: gfortran
+ openmpi:
+ environment:
+ set:
+ SLURM_MPI_TYPE: pmi2
+ OMPI_MCA_btl_openib_warn_default_gid_prefix: '0'
+ netlib-scalapack:
+ suffixes:
+ '^openmpi': openmpi
+ '^mpich': mpich
+ ^python:
+ autoload: 'direct'
+
+and regenerating the module files for every package that depends on ``python``:
+
+.. code-block:: console
+
+ $ spack module refresh -y --module-type tcl ^python
+ ==> Regenerating tcl module files
+
+Now the ``py-scipy`` module will be:
+
+.. code-block:: tcl
+
+ #%Module1.0
+ ## Module file created by spack (https://github.com/LLNL/spack) on 2016-11-02 20:53:21.283547
+ ##
+ ## py-scipy@0.18.1%gcc@6.2.0 arch=linux-Ubuntu14-x86_64-e6uljfi
+ ##
+ module-whatis "py-scipy @0.18.1"
+
+ proc ModulesHelp { } {
+ puts stderr "SciPy (pronounced "Sigh Pie") is a Scientific Library for Python. It"
+ puts stderr "provides many user-friendly and efficient numerical routines such as"
+ puts stderr "routines for numerical integration and optimization."
+ }
+
+ if ![ is-loaded python/2.7.12-gcc-6.2.0 ] {
+ puts stderr "Autoloading python/2.7.12-gcc-6.2.0"
+ module load python/2.7.12-gcc-6.2.0
+ }
+
+ if ![ is-loaded openblas/0.2.19-gcc-6.2.0 ] {
+ puts stderr "Autoloading openblas/0.2.19-gcc-6.2.0"
+ module load openblas/0.2.19-gcc-6.2.0
+ }
+
+ if ![ is-loaded py-numpy/1.11.1-gcc-6.2.0-openblas ] {
+ puts stderr "Autoloading py-numpy/1.11.1-gcc-6.2.0-openblas"
+ module load py-numpy/1.11.1-gcc-6.2.0-openblas
+ }
+
+ prepend-path CMAKE_PREFIX_PATH "~/spack/opt/spack/linux-Ubuntu14-x86_64/gcc-6.2.0/py-scipy-0.18.1-e6uljfiffgym4xvj6wveevqxfqnfb3gh/"
+ prepend-path LD_LIBRARY_PATH "~/spack/opt/spack/linux-Ubuntu14-x86_64/gcc-6.2.0/py-scipy-0.18.1-e6uljfiffgym4xvj6wveevqxfqnfb3gh/lib"
+ prepend-path PYTHONPATH "~/spack/opt/spack/linux-Ubuntu14-x86_64/gcc-6.2.0/py-scipy-0.18.1-e6uljfiffgym4xvj6wveevqxfqnfb3gh/lib/python2.7/site-packages"
+ setenv PY_SCIPY_ROOT "~/spack/opt/spack/linux-Ubuntu14-x86_64/gcc-6.2.0/py-scipy-0.18.1-e6uljfiffgym4xvj6wveevqxfqnfb3gh"
+ conflict py-scipy
+
+and will contain code to autoload all the dependencies:
+
+.. code-block:: console
+
+ $ module load py-scipy
+ Autoloading python/2.7.12-gcc-6.2.0
+ Autoloading openblas/0.2.19-gcc-6.2.0
+ Autoloading py-numpy/1.11.1-gcc-6.2.0-openblas
+
+-----------------------------
+Lua hierarchical module files
+-----------------------------
+
+In the final part of this tutorial you will modify ``modules.yaml`` to generate
+Lua hierarchical module files. You will see that most of the directives used before
+are also valid in the ``lmod`` context.
+
+^^^^^^^^^^^^^^^^^
+Core/Compiler/MPI
+^^^^^^^^^^^^^^^^^
+
+.. warning::
+ Only LMod supports Lua hierarchical module files
+ For this part of the tutorial you need to be using LMod to
+ manage your environment.
+
+The most common hierarchy is the so called ``Core/Compiler/MPI``. To have an idea
+how a hierarchy is organized you may refer to the
+`Lmod guide <https://www.tacc.utexas.edu/research-development/tacc-projects/lmod/user-guide/module-hierarchy>`_.
+Since ``lmod`` is not enabled by default, you need to add it to the list of
+enabled module file generators. The other things you need to do are:
+
+- change the ``tcl`` tag to ``lmod``
+- remove ``tcl`` specific directives (``naming_scheme`` and ``conflict``)
+- set which compilers are considered ``core``
+- remove the ``mpi`` related suffixes (as they will be substituted by hierarchies)
+
+After modifications the configuration file will be:
+
+.. code-block:: yaml
+ :emphasize-lines: 2-6
+
+ modules:
+ enable::
+ - lmod
+ lmod:
+ core_compilers:
+ - 'gcc@4.8'
+ hash_length: 0
+ whitelist:
+ - gcc
+ blacklist:
+ - '%gcc@4.8'
+ all:
+ suffixes:
+ '^openblas': openblas
+ '^netlib-lapack': netlib
+ filter:
+ environment_blacklist: ['CPATH', 'LIBRARY_PATH']
+ environment:
+ set:
+ '${PACKAGE}_ROOT': '${PREFIX}'
+ gcc:
+ environment:
+ set:
+ CC: gcc
+ CXX: g++
+ FC: gfortran
+ F90: gfortran
+ F77: gfortran
+ openmpi:
+ environment:
+ set:
+ SLURM_MPI_TYPE: pmi2
+ OMPI_MCA_btl_openib_warn_default_gid_prefix: '0'
+
+
+.. note::
+ The double colon
+ The double colon after ``enable`` is intentional and it serves the
+ purpose of overriding the default list of enabled generators so
+ that only ``lmod`` will be active (see :ref:`the reference
+ manual <config-overrides>` for a more detailed explanation of
+ config scopes).
+
+The directive ``core_compilers`` accepts a list of compilers : everything built
+using these compilers will create a module in the ``Core`` part of the hierarchy. It is
+common practice to put the OS provided compilers in the list and only build common utilities
+and other compilers in ``Core``.
+
+If you regenerate the module files
+
+.. code-block:: console
+
+ $ spack module refresh --module-type lmod --delete-tree -y
+
+and update ``MODULEPATH`` to point to the ``Core`` folder, and
+list the available modules, you'll see:
+
+.. code-block:: console
+
+ $ module unuse ~/spack/share/spack/modules/linux-Ubuntu14-x86_64
+ $ module use ~/spack/share/spack/lmod/linux-Ubuntu14-x86_64/Core
+ $ module avail
+
+ ----------------------------------------------------------------------- ~/spack/share/spack/lmod/linux-Ubuntu14-x86_64/Core -----------------------------------------------------------------------
+ gcc/6.2.0
+
+The only module visible now is ``gcc``. Loading that you will make
+visible the ``Compiler`` part of the software stack that was built with ``gcc/6.2.0``:
+
+.. code-block:: console
+
+ $ module load gcc
+ $ module avail
+
+ -------------------------------------------------------------------- ~/spack/share/spack/lmod/linux-Ubuntu14-x86_64/gcc/6.2.0 ---------------------------------------------------------------------
+ binutils/2.27 curl/7.50.3 hwloc/1.11.4 libtool/2.4.6 lzo/2.09 netlib-lapack/3.6.1 openssl/1.0.2j py-scipy/0.18.1-openblas util-macros/1.19.0
+ bison/3.0.4 expat/2.2.0 libarchive/3.2.1 libxml2/2.9.4 m4/1.4.17 nettle/3.2 pkg-config/0.29.1 py-setuptools/25.2.0 xz/5.2.2
+ bzip2/1.0.6 flex/2.6.0 libpciaccess/0.13.4 lz4/131 mpich/3.2 openblas/0.2.19 py-nose/1.3.7 python/2.7.12 zlib/1.2.8
+ cmake/3.6.1 gmp/6.1.1 libsigsegv/2.10 lzma/4.32.7 ncurses/6.0 openmpi/2.0.1 py-numpy/1.11.1-openblas sqlite/3.8.5
+
+ ----------------------------------------------------------------------- ~/spack/share/spack/lmod/linux-Ubuntu14-x86_64/Core -----------------------------------------------------------------------
+ gcc/6.2.0 (L)
+
+The same holds true for the ``MPI`` part of the stack, that you can enable by loading
+either ``mpich`` or ``openmpi``. The nice features of LMod will become evident
+once you'll try switching among different stacks:
+
+.. code-block:: console
+
+ $ module load mpich
+ $ module avail
+
+ ----------------------------------------------------------- ~/spack/share/spack/lmod/linux-Ubuntu14-x86_64/mpich/3.2-5n5xoep/gcc/6.2.0 ------------------------------------------------------------
+ netlib-scalapack/2.0.2-netlib netlib-scalapack/2.0.2-openblas (D)
+
+ -------------------------------------------------------------------- ~/spack/share/spack/lmod/linux-Ubuntu14-x86_64/gcc/6.2.0 ---------------------------------------------------------------------
+ binutils/2.27 curl/7.50.3 hwloc/1.11.4 libtool/2.4.6 lzo/2.09 netlib-lapack/3.6.1 openssl/1.0.2j py-scipy/0.18.1-openblas util-macros/1.19.0
+ bison/3.0.4 expat/2.2.0 libarchive/3.2.1 libxml2/2.9.4 m4/1.4.17 nettle/3.2 pkg-config/0.29.1 py-setuptools/25.2.0 xz/5.2.2
+ bzip2/1.0.6 flex/2.6.0 libpciaccess/0.13.4 lz4/131 mpich/3.2 (L) openblas/0.2.19 py-nose/1.3.7 python/2.7.12 zlib/1.2.8
+ cmake/3.6.1 gmp/6.1.1 libsigsegv/2.10 lzma/4.32.7 ncurses/6.0 openmpi/2.0.1 py-numpy/1.11.1-openblas sqlite/3.8.5
+
+ ----------------------------------------------------------------------- ~/spack/share/spack/lmod/linux-Ubuntu14-x86_64/Core -----------------------------------------------------------------------
+ gcc/6.2.0 (L)
+
+ $ module load openblas netlib-scalapack/2.0.2-openblas
+ $ module list
+
+ Currently Loaded Modules:
+ 1) gcc/6.2.0 2) mpich/3.2 3) openblas/0.2.19 4) netlib-scalapack/2.0.2-openblas
+
+ $ module load openmpi
+
+ Lmod is automatically replacing "mpich/3.2" with "openmpi/2.0.1"
+
+
+ Due to MODULEPATH changes the following have been reloaded:
+ 1) netlib-scalapack/2.0.2-openblas
+
+This layout is already a great improvement over the usual non-hierarchical layout,
+but it still has an asymmetry: ``LAPACK`` providers are semantically the same as ``MPI``
+providers, but they are still not part of the hierarchy. We'll see a possible solution
+next.
+
+.. Activate lmod and turn the previous modifications into lmod:
+ Add core compilers
+
+^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+Extend the hierarchy to other virtual providers
+^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+.. warning::
+ This is an experimental feature
+ Having a hierarchy deeper than ``Core``/``Compiler``/``MPI`` is an experimental
+ feature, still not fully supported by ``module spider``,
+ see `here <https://github.com/TACC/Lmod/issues/114>`_. Furthermore its use
+ with hierarchies more complex than ``Core``/``Compiler``/``MPI``/``LAPACK``
+ has not been thoroughly tested in production environments.
+
+Spack permits you to generate Lua hierarchical module files where users
+can add an arbitrary list of virtual providers to the triplet
+``Core``/``Compiler``/``MPI``. A configuration file like:
+
+.. code-block:: yaml
+ :emphasize-lines: 7,8
+
+ modules:
+ enable::
+ - lmod
+ lmod:
+ core_compilers:
+ - 'gcc@4.8'
+ hierarchical_scheme:
+ - lapack
+ hash_length: 0
+ whitelist:
+ - gcc
+ blacklist:
+ - '%gcc@4.8'
+ - readline
+ all:
+ filter:
+ environment_blacklist: ['CPATH', 'LIBRARY_PATH']
+ environment:
+ set:
+ '${PACKAGE}_ROOT': '${PREFIX}'
+ gcc:
+ environment:
+ set:
+ CC: gcc
+ CXX: g++
+ FC: gfortran
+ F90: gfortran
+ F77: gfortran
+ openmpi:
+ environment:
+ set:
+ SLURM_MPI_TYPE: pmi2
+ OMPI_MCA_btl_openib_warn_default_gid_prefix: '0'
+
+will add ``lapack`` providers to the mix. After the usual regeneration of module files:
+
+.. code-block:: console
+
+ $ module purge
+ $ spack module refresh --module-type lmod --delete-tree -y
+ ==> Regenerating lmod module files
+
+you will have something like:
+
+.. code-block:: console
+
+ $ module load gcc
+ $ module load openblas
+ $ module load openmpi
+ $ module avail
+
+ --------------------------------------------- ~/spack/share/spack/lmod/linux-Ubuntu14-x86_64/openblas/0.2.19-js33umc/openmpi/2.0.1-s3qbtby/gcc/6.2.0 ----------------------------------------------
+ netlib-scalapack/2.0.2
+
+ -------------------------------------------------------- ~/spack/share/spack/lmod/linux-Ubuntu14-x86_64/openblas/0.2.19-js33umc/gcc/6.2.0 ---------------------------------------------------------
+ py-numpy/1.11.1 py-scipy/0.18.1
+
+ -------------------------------------------------------------------- ~/spack/share/spack/lmod/linux-Ubuntu14-x86_64/gcc/6.2.0 ---------------------------------------------------------------------
+ binutils/2.27 curl/7.50.3 hwloc/1.11.4 libtool/2.4.6 lzo/2.09 netlib-lapack/3.6.1 openssl/1.0.2j python/2.7.12 zlib/1.2.8
+ bison/3.0.4 expat/2.2.0 libarchive/3.2.1 libxml2/2.9.4 m4/1.4.17 nettle/3.2 pkg-config/0.29.1 sqlite/3.8.5
+ bzip2/1.0.6 flex/2.6.0 libpciaccess/0.13.4 lz4/131 mpich/3.2 openblas/0.2.19 (L) py-nose/1.3.7 util-macros/1.19.0
+ cmake/3.6.1 gmp/6.1.1 libsigsegv/2.10 lzma/4.32.7 ncurses/6.0 openmpi/2.0.1 (L) py-setuptools/25.2.0 xz/5.2.2
+
+ ----------------------------------------------------------------------- ~/spack/share/spack/lmod/linux-Ubuntu14-x86_64/Core -----------------------------------------------------------------------
+ gcc/6.2.0 (L)
+
+Now both the ``MPI`` and the ``LAPACK`` providers are handled by LMod as hierarchies:
+
+.. code-block:: console
+
+ $ module load py-numpy netlib-scalapack
+ $ module load mpich
+
+ Lmod is automatically replacing "openmpi/2.0.1" with "mpich/3.2"
+
+
+ Due to MODULEPATH changes the following have been reloaded:
+ 1) netlib-scalapack/2.0.2
+
+ $ module load netlib-lapack
+
+ Lmod is automatically replacing "openblas/0.2.19" with "netlib-lapack/3.6.1"
+
+
+ Inactive Modules:
+ 1) py-numpy
+
+ Due to MODULEPATH changes the following have been reloaded:
+ 1) netlib-scalapack/2.0.2
+
+making the use of tags to differentiate them unnecessary.
+Note that because we only compiled ``py-numpy`` with ``openblas`` the module
+is made inactive when we switch the ``LAPACK`` provider. The user
+environment will now be consistent by design!
diff --git a/lib/spack/docs/tutorial_sc16_packaging.rst b/lib/spack/docs/tutorial_sc16_packaging.rst
new file mode 100644
index 0000000000..e250ab835e
--- /dev/null
+++ b/lib/spack/docs/tutorial_sc16_packaging.rst
@@ -0,0 +1,462 @@
+.. _packaging-tutorial:
+
+=========================
+Package Creation Tutorial
+=========================
+
+This tutorial will walk you through the steps behind building a simple
+package installation script. We'll focus building an mpileaks package,
+which is a MPI debugging tool. By creating a package file we're
+essentially giving Spack a recipe for how to build a particular piece of
+software. We're describing some of the software's dependencies, where to
+find the package, what commands and options are used to build the package
+from source, and more. Once we've specified a package's recipe, we can
+ask Spack to build that package in many different ways.
+
+This tutorial assumes you have a basic familiarity with some of the Spack
+commands, and that you have a working version of Spack installed. If
+not, we suggest looking at Spack's *Getting Started* guide. This
+tutorial also assumes you have at least a beginner's-level familiarity
+with Python.
+
+Also note that this document is a tutorial. It can help you get started
+with packaging, but is not intended to be complete. See Spack's
+:ref:`packaging-guide` for more complete documentation on this topic.
+
+---------------
+Getting Started
+---------------
+
+A few things before we get started:
+
+- We'll refer to the Spack installation location via the environment
+ variable ``SPACK_ROOT``. You should point ``SPACK_ROOT`` at wherever
+ you have Spack installed.
+- Add ``$SPACK_ROOT/bin`` to your ``PATH`` before you start.
+- Make sure your ``EDITOR`` environment variable is set to some text
+ editor you like.
+- We'll be writing Python code as part of this tutorial. You can find
+ successive versions of the Python code in
+ ``$SPACK_ROOT/lib/spack/docs/tutorial/examples``.
+
+-------------------------
+Creating the Package File
+-------------------------
+
+Spack comes with a handy command to create a new package: ``spack create``
+
+This command is given the location of a package's source code, downloads
+the code, and sets up some basic packaging infrastructure for you. The
+mpileaks source code can be found on GitHub, and here's what happens when
+we run ``spack create`` on it:
+
+.. code-block:: console
+
+ $ spack create -f https://github.com/hpc/mpileaks/releases/download/v1.0/mpileaks-1.0.tar.gz
+ ==> This looks like a URL for mpileaks version 1.0
+ ==> Creating template for package mpileaks
+ ==> Downloading...
+ ==> Fetching https://github.com/hpc/mpileaks/releases/download/v1.0/mpileaks-1.0.tar.gz
+ ###################################################################################### 100.0%
+
+And Spack should spawn a text editor with this file:
+
+.. literalinclude:: tutorial/examples/0.package.py
+ :language: python
+
+Spack has created this file in
+``$SPACK_ROOT/var/spack/repos/builtin/packages/mpileaks/package.py``. Take a
+moment to look over the file. There's a few placeholders that Spack has
+created, which we'll fill in as part of this tutorial:
+
+- We'll document some information about this package in the comments.
+- We'll fill in the dependency list for this package.
+- We'll fill in some of the configuration arguments needed to build this
+ package.
+
+For the moment, exit your editor and let's see what happens when we try
+to build this package:
+
+.. code-block:: console
+
+ $ spack install mpileaks
+ ==> Installing mpileaks
+ ==> Using cached archive: /usr/workspace/wsa/legendre/spack/var/spack/cache/mpileaks/mpileaks-1.0.tar.gz
+ ==> Staging archive: /usr/workspace/wsa/legendre/spack/var/spack/stage/mpileaks-1.0-hufwhwpq5benv3sslie6ryflk5s6nm35/mpileaks-1.0.tar.gz
+ ==> Created stage in /usr/workspace/wsa/legendre/spack/var/spack/stage/mpileaks-1.0-hufwhwpq5benv3sslie6ryflk5s6nm35
+ ==> Ran patch() for mpileaks
+ ==> Building mpileaks [AutotoolsPackage]
+ ==> Executing phase : 'autoreconf'
+ ==> Executing phase : 'configure'
+ ==> Error: ProcessError: Command exited with status 1:
+ './configure' '--prefix=/usr/workspace/wsa/legendre/spack/opt/spack/linux-rhel7-x86_64/gcc-4.9.3/mpileaks-1.0-hufwhwpq5benv3sslie6ryflk5s6nm35'
+ /usr/workspace/wsa/legendre/spack/lib/spack/spack/build_systems/autotools.py:150, in configure:
+ 145 def configure(self, spec, prefix):
+ 146 """Runs configure with the arguments specified in `configure_args`
+ 147 and an appropriately set prefix
+ 148 """
+ 149 options = ['--prefix={0}'.format(prefix)] + self.configure_args()
+ >> 150 inspect.getmodule(self).configure(*options)
+
+ See build log for details:
+ /tmp/legendre/spack-stage/spack-stage-8HVzqu/mpileaks-1.0/spack-build.out
+
+This obviously didn't work; we need to fill in the package-specific
+information. Specifically, Spack didn't try to build any of mpileaks'
+dependencies, nor did it use the proper configure arguments. Let's start
+fixing things
+
+---------------------
+Package Documentation
+---------------------
+
+We can bring the ``package.py`` file back into our ``EDITOR`` with the
+``spack edit`` command:
+
+.. code-block:: console
+
+ $ spack edit mpileaks
+
+Let's remove some of the ``FIXME`` comments, and add links to the mpileaks
+homepage and document what mpileaks does. I'm also going to cut out the
+Copyright clause at this point to keep this tutorial document shorter,
+but you shouldn't do that normally. The results of these changes can be
+found in ``$SPACK_ROOT/lib/spack/docs/tutorial/examples/1.package.py``
+and are below. Make these changes to your ``package.py``:
+
+.. literalinclude:: tutorial/examples/1.package.py
+ :lines: 25-
+ :language: python
+
+We've filled in the comment that describes what this package does and
+added a link to the web site. That won't help us build yet, but it will
+allow Spack to provide some documentation on this package to other users:
+
+.. code-block:: console
+
+ $ spack info mpileaks
+ AutotoolsPackage: mpileaks
+ Homepage: https://github.com/hpc/mpileaks
+
+ Safe versions:
+ 1.0 https://github.com/hpc/mpileaks/releases/download/v1.0/mpileaks-1.0.tar.gz
+
+ Variants:
+ None
+
+ Installation Phases:
+ autoreconf configure build install
+
+ Build Dependencies:
+ None
+
+ Link Dependencies:
+ None
+
+ Run Dependencies:
+ None
+
+ Virtual Packages:
+ None
+
+ Description:
+ Tool to detect and report MPI objects like MPI_Requests and
+ MPI_Datatypes
+
+As we fill in more information about this package the ``spack info`` command
+will become more informative. Now let's start making this package build.
+
+------------
+Dependencies
+------------
+
+The mpileaks packages depends on three other package: ``MPI``,
+``adept-utils``, and ``callpath``. Let's add those via the
+``depends_on`` command in our ``package.py`` (this version is in
+``$SPACK_ROOT/lib/spack/docs/tutorial/examples/2.package.py``):
+
+.. literalinclude:: tutorial/examples/2.package.py
+ :lines: 25-
+ :language: python
+
+Now when we go to build mpileaks, Spack will fetch and build these
+dependencies before building mpileaks. Note that the mpi dependency is a
+different kind of beast than the adept-utils and callpath dependencies;
+there is no mpi package available in Spack. Instead mpi is a virtual
+dependency. Spack may satisfy that dependency by installing packages
+such as ``openmpi`` or ``mvapich``. See the :ref:`packaging-guide` for more
+information on virtual dependencies.
+
+Now when we try to install this package a lot more happens:
+
+.. code-block:: console
+
+ $ spack install mpileaks
+ ==> Installing mpileaks
+ ==> openmpi is already installed in /usr/workspace/wsa/legendre/spack/opt/spack/linux-rhel7-x86_64/gcc-4.9.3/openmpi-2.0.1-5ee5j34c2y4kb5c3joygrgahidqnwhnz
+ ==> callpath is already installed in /usr/workspace/wsa/legendre/spack/opt/spack/linux-rhel7-x86_64/gcc-4.9.3/callpath-1.0.2-zm4pf3gasgxeibyu2y262suktvaazube
+ ==> adept-utils is already installed in /usr/workspace/wsa/legendre/spack/opt/spack/linux-rhel7-x86_64/gcc-4.9.3/adept-utils-1.0.1-7p7ezxwtajdglj6cmojy2vybjct4j4jz
+ ==> Using cached archive: /usr/workspace/wsa/legendre/spack/var/spack/cache/mpileaks/mpileaks-1.0.tar.gz
+ ==> Already staged mpileaks-1.0-eum4hmnlt6ovalwjnciaygfb3beja4gk in /usr/workspace/wsa/legendre/spack/var/spack/stage/mpileaks-1.0-eum4hmnlt6ovalwjnciaygfb3beja4gk
+ ==> Already patched mpileaks
+ ==> Building mpileaks [AutotoolsPackage]
+ ==> Executing phase : 'autoreconf'
+ ==> Executing phase : 'configure'
+ ==> Error: ProcessError: Command exited with status 1:
+ './configure' '--prefix=/usr/workspace/wsa/legendre/spack/opt/spack/linux-rhel7-x86_64/gcc-4.9.3/mpileaks-1.0-eum4hmnlt6ovalwjnciaygfb3beja4gk'
+ /usr/workspace/wsa/legendre/spack/lib/spack/spack/build_systems/autotools.py:150, in configure:
+ 145 def configure(self, spec, prefix):
+ 146 """Runs configure with the arguments specified in `configure_args`
+ 147 and an appropriately set prefix
+ 148 """
+ 149 options = ['--prefix={0}'.format(prefix)] + self.configure_args()
+ >> 150 inspect.getmodule(self).configure(*options)
+
+ See build log for details:
+ /tmp/legendre/spack-stage/spack-stage-7V5yyk/mpileaks-1.0/spack-build.out
+
+Note that this command may take a while to run and produce more output if
+you don't have an MPI already installed or configured in Spack.
+
+Now Spack has identified and made sure all of our dependencies have been
+built. It found the ``openmpi`` package that will satisfy our ``mpi``
+dependency, and the ``callpath`` and ``adept-utils`` package to satisfy our
+concrete dependencies.
+
+------------------------
+Debugging Package Builds
+------------------------
+
+Our ``mpileaks`` package is still not building. It may be obvious to
+many of you that we're still missing the configure options. But let's
+pretend we're not all intelligent developers and use this opportunity
+spend some time debugging. We a few options that can tell us about
+what's going wrong:
+
+As per the error message, Spack has given us a ``spack-build.out`` debug log:
+
+.. code-block:: console
+
+ ==> './configure' '--prefix=/usr/workspace/wsa/legendre/spack/opt/spack/linux-rhel7-x86_64/gcc-4.9.3/mpileaks-1.0-eum4hmnlt6ovalwjnciaygfb3beja4gk'
+ checking metadata... no
+ checking installation directory variables... yes
+ checking for a BSD-compatible install... /usr/bin/install -c
+ checking whether build environment is sane... yes
+ checking for a thread-safe mkdir -p... /usr/bin/mkdir -p
+ checking for gawk... gawk
+ checking whether make sets $(MAKE)... yes
+ checking for gcc... /usr/workspace/wsa/legendre/spack/lib/spack/env/gcc/gcc
+ checking for C compiler default output file name... a.out
+ checking whether the C compiler works... yes
+ checking whether we are cross compiling... no
+ checking for suffix of executables...
+ checking for suffix of object files... o
+ checking whether we are using the GNU C compiler... yes
+ checking whether /usr/workspace/wsa/legendre/spack/lib/spack/env/gcc/gcc accepts -g... yes
+ checking for /usr/workspace/wsa/legendre/spack/lib/spack/env/gcc/gcc option to accept ISO C89... none needed
+ checking for style of include used by make... GNU
+ checking dependency style of /usr/workspace/wsa/legendre/spack/lib/spack/env/gcc/gcc... gcc3
+ checking whether /usr/workspace/wsa/legendre/spack/lib/spack/env/gcc/gcc and cc understand -c and -o together... yes
+ checking whether we are using the GNU C++ compiler... yes
+ checking whether /usr/workspace/wsa/legendre/spack/lib/spack/env/gcc/g++ accepts -g... yes
+ checking dependency style of /usr/workspace/wsa/legendre/spack/lib/spack/env/gcc/g++... gcc3
+ checking for /usr/workspace/wsa/legendre/spack/opt/spack/linux-rhel7-x86_64/gcc-4.9.3/openmpi-2.0.1-5ee5j34c2y4kb5c3joygrgahidqnwhnz/bin/mpicc... /usr/workspace/wsa/legendre/spack/opt/spack/linux-rhel7-x86_64/gcc-4.9.3/openmpi-2.0.1-5ee5j34c2y4kb5c3joygrgahidqnwhnz/bin/mpicc
+ Checking whether /usr/workspace/wsa/legendre/spack/opt/spack/linux-rhel7-x86_64/gcc-4.9.3/openmpi-2.0.1-5ee5j34c2y4kb5c3joygrgahidqnwhnz/bin/mpicc responds to '-showme:compile'... yes
+ configure: error: unable to locate ``adept-utils`` installation
+
+This gives us the output from the build, and it's fairly obvious that
+mpileaks isn't finding its ``adept-utils`` package. Spack has
+automatically added the include and library directories of
+``adept-utils`` to the compiler's search path, but some packages like
+mpileaks can sometimes be picky and still want things spelled out on
+their command line. But let's continue to pretend we're not brilliant
+developers, and explore some other debugging paths:
+
+We can also enter the build area and try to manually run the build:
+
+.. code-block:: console
+
+ $ spack env mpileaks bash
+ $ spack cd mpileaks
+
+The ``spack env`` command spawned a new shell that contains the same
+environment that Spack used to build the mpileaks package (you can
+substitute bash for your favorite shell). The ``spack cd`` command
+changed our working dirctory to the last attempted build for mpileaks.
+From here we can manually re-run the build:
+
+.. code-block:: console
+
+ $ ./configure
+ checking metadata... no
+ checking installation directory variables... yes
+ checking for a BSD-compatible install... /usr/bin/install -c
+ checking whether build environment is sane... yes
+ checking for a thread-safe mkdir -p... /usr/bin/mkdir -p
+ checking for gawk... gawk
+ checking whether make sets $(MAKE)... yes
+ checking for gcc... /usr/workspace/wsa/legendre/spack/lib/spack/env/gcc/gcc
+ checking for C compiler default output file name... a.out
+ checking whether the C compiler works... yes
+ checking whether we are cross compiling... no
+ checking for suffix of executables...
+ checking for suffix of object files... o
+ checking whether we are using the GNU C compiler... yes
+ checking whether /usr/workspace/wsa/legendre/spack/lib/spack/env/gcc/gcc accepts -g... yes
+ checking for /usr/workspace/wsa/legendre/spack/lib/spack/env/gcc/gcc option to accept ISO C89... none needed
+ checking for style of include used by make... GNU
+ checking dependency style of /usr/workspace/wsa/legendre/spack/lib/spack/env/gcc/gcc... gcc3
+ checking whether /usr/workspace/wsa/legendre/spack/lib/spack/env/gcc/gcc and cc understand -c and -o together... yes
+ checking whether we are using the GNU C++ compiler... yes
+ checking whether /usr/workspace/wsa/legendre/spack/lib/spack/env/gcc/g++ accepts -g... yes
+ checking dependency style of /usr/workspace/wsa/legendre/spack/lib/spack/env/gcc/g++... gcc3
+ checking for /usr/workspace/wsa/legendre/spack/opt/spack/linux-rhel7-x86_64/gcc-4.9.3/openmpi-2.0.1-5ee5j34c2y4kb5c3joygrgahidqnwhnz/bin/mpicc... /usr/workspace/wsa /legendre/spack/opt/spack/linux-rhel7-x86_64/gcc-4.9.3/openmpi-2.0.1-5ee5j34c2y4kb5c3joygrgahidqnwhnz/bin/mpicc
+ Checking whether /usr/workspace/wsa/legendre/spack/opt/spack/linux-rhel7-x86_64/gcc-4.9.3/openmpi-2.0.1-5ee5j34c2y4kb5c3joygrgahidqnwhnz/bin/mpicc responds to '-showme:compile'... yes
+ configure: error: unable to locate adept-utils installation
+
+We're seeing the same error, but now we're in a shell where we can run
+the command ourselves and debug as needed. We could, for example, run
+``./configure --help`` to see what options we can use to specify
+dependencies.
+
+We can use the ``exit`` command to leave the shell spawned by ``spack
+env``.
+
+------------------------------
+Specifying Configure Arguments
+------------------------------
+
+Let's add the configure arguments to the mpileaks' ``package.py``. This
+version can be found in
+``$SPACK_ROOT/lib/spack/docs/tutorial/examples/3.package.py``:
+
+.. literalinclude:: tutorial/examples/3.package.py
+ :lines: 25-
+ :language: python
+
+This is all we need for working mpileaks! If we install now we'll see:
+
+.. code-block:: console
+
+ $ spack install mpileaks
+ spack install mpileaks
+ ==> Installing mpileaks
+ ==> openmpi is already installed in /usr/workspace/wsa/legendre/spack/opt/spack/linux-rhel7-x86_64/gcc-4.9.3/openmpi-2.0.1-5ee5j34c2y4kb5c3joygrgahidqnwhnz
+ ==> callpath is already installed in /usr/workspace/wsa/legendre/spack/opt/spack/linux-rhel7-x86_64/gcc-4.9.3/callpath-1.0.2-zm4pf3gasgxeibyu2y262suktvaazube
+ ==> adept-utils is already installed in /usr/workspace/wsa/legendre/spack/opt/spack/linux-rhel7-x86_64/gcc-4.9.3/adept-utils-1.0.1-7p7ezxwtajdglj6cmojy2vybjct4j4jz
+ ==> Using cached archive: /usr/workspace/wsa/legendre/spack/var/spack/cache/mpileaks/mpileaks-1.0.tar.gz
+ ==> Already staged mpileaks-1.0-eum4hmnlt6ovalwjnciaygfb3beja4gk in /usr/workspace/wsa/legendre/spack/var/spack/stage/mpileaks-1.0-eum4hmnlt6ovalwjnciaygfb3beja4gk
+ ==> Already patched mpileaks
+ ==> Building mpileaks [AutotoolsPackage]
+ ==> Executing phase : 'autoreconf'
+ ==> Executing phase : 'configure'
+ ==> Executing phase : 'build'
+ ==> Executing phase : 'install'
+ ==> Successfully installed mpileaks
+ Fetch: 0.00s. Build: 14.08s. Total: 14.08s.
+ [+] /usr/workspace/wsa/legendre/spack/opt/spack/linux-rhel7-x86_64/gcc-4.9.3/mpileaks-1.0-eum4hmnlt6ovalwjnciaygfb3beja4gk
+
+We took a few shortcuts for this package that are worth highlighting.
+Spack automatically detected that mpileaks was an Autotools-based package
+when we ran ``spack create``. If this had been a CMake-based package we
+would have been filling in a ``cmake_args`` function instead of
+``configure_args``. If Spack hadn't been able to detect the build
+system, we'd be filling in a generic install method that would manually
+be calling build commands, such as is found in the ``zlib`` package:
+
+.. code-block:: python
+
+ def install(self, spec, prefix):
+ configure('--prefix={0}'.format(prefix))
+ make()
+ make('install')
+
+--------
+Variants
+--------
+
+We have a successful mpileaks build, but let's take some time to improve
+it. ``mpileaks`` has a build-time option to truncate parts of the stack
+that it walks. Let's add a variant to allow users to set this when they
+build in Spack.
+
+To do this, we'll add a variant to our package, as per the following (see
+``$SPACK_ROOT/lib/spack/docs/tutorial/examples/4.package.py``):
+
+.. literalinclude:: tutorial/examples/4.package.py
+ :lines: 25-
+ :language: python
+
+We've added the variant ``stackstart``, and given it a default value of
+``0``. If we install now we can see the stackstart variant added to the
+configure line (output truncated for length):
+
+.. code-block:: console
+
+ $ spack install --verbose mpileaks stackstart=4
+ ==> Installing mpileaks
+ ==> openmpi is already installed in /usr/workspace/wsa/legendre/spack/opt/spack/linux-rhel7-x86_64/gcc-4.9.3/openmpi-2.0.1-5ee5j34c2y4kb5c3joygrgahidqnwhnz
+ ==> callpath is already installed in /usr/workspace/wsa/legendre/spack/opt/spack/linux-rhel7-x86_64/gcc-4.9.3/callpath-1.0.2-zm4pf3gasgxeibyu2y262suktvaazube
+ ==> adept-utils is already installed in /usr/workspace/wsa/legendre/spack/opt/spack/linux-rhel7-x86_64/gcc-4.9.3/adept-utils-1.0.1-7p7ezxwtajdglj6cmojy2vybjct4j4jz
+ ==> Using cached archive: /usr/workspace/wsa/legendre/spack/var/spack/cache/mpileaks/mpileaks-1.0.tar.gz
+ ==> Staging archive: /usr/workspace/wsa/legendre/spack/var/spack/stage/mpileaks-1.0-otqo2opkhan5ksujt6tpmdftydrieig7/mpileaks-1.0.tar.gz
+ ==> Created stage in /usr/workspace/wsa/legendre/spack/var/spack/stage/mpileaks-1.0-otqo2opkhan5ksujt6tpmdftydrieig7
+ ==> Ran patch() for mpileaks
+ ==> Building mpileaks [AutotoolsPackage]
+ ==> Executing phase : 'autoreconf'
+ ==> Executing phase : 'configure'
+ ==> './configure' '--prefix=/usr/workspace/wsa/legendre/spack/opt/spack/linux-rhel7-x86_64/gcc-4.9.3/mpileaks-1.0-otqo2opkhan5ksujt6tpmdftydrieig7' '--with-adept-utils=/usr/workspace/wsa/legendre/spack/opt/spack/linux-rhel7-x86_64/gcc-4.9.3/adept-utils-1.0.1-7p7ezxwtajdglj6cmojy2vybjct4j4jz' '--with-callpath=/usr/workspace/wsa/legendre/spack/opt/spack/linux-rhel7-x86_64/gcc-4.9.3/callpath-1.0.2-zm4pf3gasgxeibyu2y262suktvaazube' '--with-stack-start-c=4' '--with-stack-start-fortran=4'
+
+---------------
+The Spec Object
+---------------
+
+This tutorial has glossed over a few important features, which weren't
+too relevant for mpileaks but may be useful for other packages. There
+were several places we references the ``self.spec`` object. This is a
+powerful class for querying information about what we're building. For
+example, you could use the spec to query information about how a
+package's dependencies were built, or what compiler was being used, or
+what version of a package is being installed. Full documentation can be
+found in the :ref:`packaging-guide`, but here's some quick snippets with
+common queries:
+
+- Am I building ``mpileaks`` version ``1.1`` or greater?
+
+.. code-block:: python
+
+ if self.spec.satisfies('@1.1:'):
+ # Do things needed for 1.1+
+
+- Is ``openmpi`` the MPI I'm building with?
+
+.. code-block:: python
+
+ if self.spec['mpi'].name == 'openmpi':
+ # Do openmpi things
+
+- Am I building with ``gcc`` version less than ``5.0.0``:
+
+.. code-block:: python
+
+ if self.spec.satisfies('%gcc@:5.0.0'):
+ # Add arguments specific to gcc's earlier than 5.0.0
+
+- Am I built with the ``debug`` variant:
+
+.. code-block:: python
+
+ if self.spec.satisfies('+debug'):
+ # Add -g option to configure flags
+
+- Is my ``dyninst`` dependency greater than version ``8.0``?
+
+.. code-block:: python
+
+ if self.spec['dyninst'].satisfies('@8.0:'):
+ # Use newest dyninst options
+
+More examples can be found in the thousands of packages already added to
+Spack in ``$SPACK_ROOT/var/spack/repos/builtin/packages``.
+
+Good Luck!
diff --git a/lib/spack/docs/tutorial_sc16_spack_basics.rst b/lib/spack/docs/tutorial_sc16_spack_basics.rst
new file mode 100644
index 0000000000..9511907ceb
--- /dev/null
+++ b/lib/spack/docs/tutorial_sc16_spack_basics.rst
@@ -0,0 +1,1255 @@
+.. _basics-tutorial:
+
+=========================================
+Basic Installation Tutorial
+=========================================
+
+This tutorial will guide you through the process of installing software
+using Spack. We will first cover the `spack install` command, focusing on
+the power of the spec syntax and the flexibility it gives to users. We
+will also cover the `spack find` command for viewing installed packages
+and the `spack uninstall` command. Finally, we will touch on how Spack
+manages compilers, especially as it relates to using Spack-built
+compilers within Spack. We will include full output from all of the
+commands demonstrated, although we will frequently call attention to only
+small portions of that output (or merely to the fact that it
+succeeded). The provided output is all from a cluster running Red Hat
+Enterprise Linux.
+
+.. _basics-tutorial-install:
+
+----------------
+Installing Spack
+----------------
+
+Spack works out of the box. Simply clone spack and get going.
+
+.. code-block:: console
+
+ $ git clone https://github.com/LLNL/spack.git
+ Initialized empty Git repository in ~/spack/.git/
+ remote: Counting objects: 47125, done.
+ remote: Compressing objects: 100% (68/68), done.
+ remote: Total 47125 (delta 16), reused 2 (delta 2), pack-reused 47047
+ Receiving objects: 100% (47125/47125), 12.02 MiB | 2.11 MiB/s, done.
+ Resolving deltas: 100% (23044/23044), done.
+ $ cd spack
+
+Then add Spack to your path.
+
+.. code-block:: console
+
+ $ export PATH=~/spack/bin:$PATH
+
+You're good to go!
+
+-----------------
+What is in Spack?
+-----------------
+
+The ``spack list`` command shows available packages.
+
+.. code-block:: console
+
+ $ spack list
+ ==> 1016 packages.
+ abinit hwloc piranha r-rjava
+ ack hydra pixman r-rjson
+ activeharmony hypre pkg-config r-rjsonio
+ ...
+
+The ``spack list`` command can also take a query string. Spack
+automatically adds wildcards to both ends of the string. For example,
+we can view all available python packages.
+
+.. code-block:: console
+
+ $ spack list py
+ ==> 129 packages.
+ py-3to2 py-epydoc py-nestle py-pycparser py-six
+ py-alabaster py-flake8 py-netcdf py-pydatalog py-sncosmo
+ py-argcomplete py-funcsigs py-networkx py-pyelftools py-snowballstemmer
+ ...
+
+-------------------
+Installing Packages
+-------------------
+
+Installing a package with Spack is very simple. To install a piece of
+software, simply type ``spack install <package_name>``
+
+.. code-block:: console
+
+ $ spack install libelf
+ ==> Installing libelf
+ ==> Trying to fetch from ~/spack/var/spack/cache/libelf/libelf-0.8.13.tar.gz
+ curl: (37) Couldn't open file ~/spack/var/spack/cache/libelf/libelf-0.8.13.tar.gz
+ ==> Fetching from ~/spack/var/spack/cache/libelf/libelf-0.8.13.tar.gz failed.
+ ==> Trying to fetch from http://www.mr511.de/software/libelf-0.8.13.tar.gz
+ ################################################################################################################################################################################# 100.0%
+ ==> Staging archive: ~/spack/var/spack/stage/libelf-0.8.13-csrt4qxfkhjgn5xg3zjpkir7xdnszl2a/libelf-0.8.13.tar.gz
+ ==> Created stage in ~/spack/var/spack/stage/libelf-0.8.13-csrt4qxfkhjgn5xg3zjpkir7xdnszl2a
+ ==> No patches needed for libelf
+ ==> Building libelf [Package]
+ ==> Executing phase : 'install'
+ ==> Successfully installed libelf
+ Fetch: 1.21s. Build: 8.42s. Total: 9.62s.
+ [+] ~/spack/opt/spack/linux-redhat6-x86_64/gcc-4.4.7/libelf-0.8.13-csrt4qxfkhjgn5xg3zjpkir7xdnszl2a
+
+
+Spack's spec syntax is the interface by which we can request specific
+configurations of the package. The ``%`` sigil is used to specify
+compilers.
+
+.. code-block:: console
+
+ $ spack install libelf %intel
+ ==> Installing libelf
+ ==> Trying to fetch from ~/spack/var/spack/cache/libelf/libelf-0.8.13.tar.gz
+ ################################################################################################################################################################################# 100.0%
+ ==> Staging archive: ~/spack/var/spack/stage/libelf-0.8.13-7wgp32xksatkvw2tbssmehw2t5tnxndj/libelf-0.8.13.tar.gz
+ ==> Created stage in ~/spack/var/spack/stage/libelf-0.8.13-7wgp32xksatkvw2tbssmehw2t5tnxndj
+ ==> No patches needed for libelf
+ ==> Building libelf [Package]
+ ==> Executing phase : 'install'
+ ==> Successfully installed libelf
+ Fetch: 0.09s. Build: 50.64s. Total: 50.72s.
+ [+] ~/spack/opt/spack/linux-redhat6-x86_64/intel-16.0.3/libelf-0.8.13-7wgp32xksatkvw2tbssmehw2t5tnxndj
+
+Note that this installation is located separately from the previous
+one. We will discuss this in more detail later, but this is part of what
+allows Spack to support arbitrarily versioned software.
+
+You can check for particular versions before requesting them. We will
+use the ``spack versions`` command to see the available versions, and then
+install a different version of ``libelf``.
+
+.. code-block:: console
+
+ $ spack versions libelf
+ ==> Safe versions (already checksummed):
+ 0.8.13
+ 0.8.12
+ ==> Remote versions (not yet checksummed):
+ 0.8.11
+ 0.8.10
+ 0.8.9
+ 0.8.8
+ 0.8.7
+ 0.8.6
+ 0.8.5
+ 0.8.4
+ 0.8.3
+ 0.8.2
+ 0.8.0
+ 0.7.0
+ 0.6.4
+ 0.5.2
+
+
+The ``@`` sigil is used to specify versions, both of packages and of
+compilers.
+
+.. code-block:: console
+
+ $ spack install libelf @0.8.12
+ ==> Installing libelf
+ ==> Trying to fetch from ~/spack/var/spack/cache/libelf/libelf-0.8.12.tar.gz
+ curl: (37) Couldn't open file ~/spack/var/spack/cache/libelf/libelf-0.8.12.tar.gz
+ ==> Fetching from ~/spack/var/spack/cache/libelf/libelf-0.8.12.tar.gz failed.
+ ==> Trying to fetch from http://www.mr511.de/software/libelf-0.8.12.tar.gz
+ ################################################################################################################################################################################# 100.0%
+ ==> Staging archive: ~/spack/var/spack/stage/libelf-0.8.12-ipggckv6i7h44iryzfa4dwdela32a7fy/libelf-0.8.12.tar.gz
+ ==> Created stage in ~/spack/var/spack/stage/libelf-0.8.12-ipggckv6i7h44iryzfa4dwdela32a7fy
+ ==> No patches needed for libelf
+ ==> Building libelf [Package]
+ ==> Executing phase : 'install'
+ ==> Successfully installed libelf
+ Fetch: 1.12s. Build: 7.88s. Total: 9.00s.
+ [+] ~/spack/opt/spack/linux-redhat6-x86_64/gcc-4.4.7/libelf-0.8.12-ipggckv6i7h44iryzfa4dwdela32a7fy
+
+
+
+ $ spack install libelf %intel@15.0.4
+ ==> Installing libelf
+ ==> Trying to fetch from ~/spack/var/spack/cache/libelf/libelf-0.8.13.tar.gz
+ ################################################################################################################################################################################# 100.0%
+ ==> Staging archive: ~/spack/var/spack/stage/libelf-0.8.13-w33hrejdyqu2j2gggdswitls2zv6kdsi/libelf-0.8.13.tar.gz
+ ==> Created stage in ~/spack/var/spack/stage/libelf-0.8.13-w33hrejdyqu2j2gggdswitls2zv6kdsi
+ ==> No patches needed for libelf
+ ==> Building libelf [Package]
+ ==> Executing phase : 'install'
+ ==> Successfully installed libelf
+ Fetch: 0.09s. Build: 55.51s. Total: 55.60s.
+ [+] ~/spack/opt/spack/linux-redhat6-x86_64/intel-15.0.4/libelf-0.8.13-w33hrejdyqu2j2gggdswitls2zv6kdsi
+
+
+The spec syntax also includes compiler flags. Spack accepts
+``cppflags``, ``cflags``, ``cxxflags``, ``fflags``, ``ldflags``, and
+``ldlibs`` parameters. The values of these fields must be quoted on
+the command line if they include spaces. These values are injected
+into the compile line automatically by the Spack compiler wrappers.
+
+.. code-block:: console
+
+ $ spack install libelf @0.8.12 cppflags="-O3"
+ ==> Installing libelf
+ ==> Trying to fetch from ~/spack/var/spack/cache/libelf/libelf-0.8.12.tar.gz
+ ################################################################################################################################################################################# 100.0%
+ ==> Staging archive: ~/spack/var/spack/stage/libelf-0.8.12-vrv2ttbd34xlfoxy4jwt6qsjrcbalmmw/libelf-0.8.12.tar.gz
+ ==> Created stage in ~/spack/var/spack/stage/libelf-0.8.12-vrv2ttbd34xlfoxy4jwt6qsjrcbalmmw
+ ==> No patches needed for libelf
+ ==> Building libelf [Package]
+ ==> Executing phase : 'install'
+ ==> Successfully installed libelf
+ Fetch: 0.04s. Build: 7.95s. Total: 7.99s.
+ [+] ~/spack/opt/spack/linux-redhat6-x86_64/gcc-4.4.7/libelf-0.8.12-vrv2ttbd34xlfoxy4jwt6qsjrcbalmmw
+
+
+The ``spack find`` command is used to query installed packages. Note that
+some packages appear identical with the default output. The ``-l`` flag
+shows the hash of each package, and the ``-f`` flag shows any non-empty
+compiler flags of those packages.
+
+.. code-block:: console
+
+ $ spack find
+ ==> 5 installed packages.
+ -- linux-redhat6-x86_64 / gcc@4.4.7 -----------------------------
+ libelf@0.8.12
+ libelf@0.8.12
+ libelf@0.8.13
+
+ -- linux-redhat6-x86_64 / intel@15.0.4 --------------------------
+ libelf@0.8.13
+
+ -- linux-redhat6-x86_64 / intel@16.0.3 --------------------------
+ libelf@0.8.13
+
+
+
+ $ spack find -lf
+ ==> 5 installed packages.
+ -- linux-redhat6-x86_64 / gcc@4.4.7 -----------------------------
+ ipggckv libelf@0.8.12%gcc
+
+ vrv2ttb libelf@0.8.12%gcc cppflags="-O3"
+
+ csrt4qx libelf@0.8.13%gcc
+
+
+ -- linux-redhat6-x86_64 / intel@15.0.4 --------------------------
+ w33hrej libelf@0.8.13%intel
+
+
+ -- linux-redhat6-x86_64 / intel@16.0.3 --------------------------
+ 7wgp32x libelf@0.8.13%intel
+
+
+Spack generates a hash for each spec. This hash is a function of the full
+provenance of the package, so any change to the spec affects the
+hash. Spack uses this value to compare specs and to generate unique
+installation directories for every combinatorial version. As we move into
+more complicated packages with software dependencies, we can see that
+Spack reuses existing packages to satisfy a dependency only when the
+existing package's hash matches the desired spec.
+
+.. code-block:: console
+
+ $ spack install libdwarf
+ ==> Installing libdwarf
+ ==> libelf is already installed in ~/spack/opt/spack/linux-redhat6-x86_64/gcc-4.4.7/libelf-0.8.13-csrt4qxfkhjgn5xg3zjpkir7xdnszl2a
+ ==> Can not find version 20160507 in url_list
+ ==> Trying to fetch from ~/spack/var/spack/cache/libdwarf/libdwarf-20160507.tar.gz
+ curl: (37) Couldn't open file ~/spack/var/spack/cache/libdwarf/libdwarf-20160507.tar.gz
+ ==> Fetching from ~/spack/var/spack/cache/libdwarf/libdwarf-20160507.tar.gz failed.
+ ==> Trying to fetch from http://www.prevanders.net/libdwarf-20160507.tar.gz
+ ################################################################################################################################################################################# 100.0%
+ ==> Staging archive: ~/spack/var/spack/stage/libdwarf-20160507-yfx6p3g3rkmqvcqbmtb34o6pln7pqvcz/libdwarf-20160507.tar.gz
+ ==> Created stage in ~/spack/var/spack/stage/libdwarf-20160507-yfx6p3g3rkmqvcqbmtb34o6pln7pqvcz
+ ==> No patches needed for libdwarf
+ ==> Building libdwarf [Package]
+ ==> Executing phase : 'install'
+ ==> Successfully installed libdwarf
+ Fetch: 1.56s. Build: 33.59s. Total: 35.15s.
+ [+] ~/spack/opt/spack/linux-redhat6-x86_64/gcc-4.4.7/libdwarf-20160507-yfx6p3g3rkmqvcqbmtb34o6pln7pqvcz
+
+
+Dependencies can be explicitly requested using the ``^`` sigil. Note that
+the spec syntax is recursive. Anything we could specify about the
+top-level package, we can also specify about a dependency using ``^``.
+
+.. code-block:: console
+
+ $ spack install libdwarf ^libelf @0.8.12 %intel
+ ==> Installing libdwarf
+ ==> Installing libelf
+ ==> Trying to fetch from ~/spack/var/spack/cache/libelf/libelf-0.8.12.tar.gz
+ ################################################################################################################################################################################# 100.0%
+ ==> Staging archive: ~/spack/var/spack/stage/libelf-0.8.12-4blbe3qxqct3ymrfoxxnxysmybvbxay7/libelf-0.8.12.tar.gz
+ ==> Created stage in ~/spack/var/spack/stage/libelf-0.8.12-4blbe3qxqct3ymrfoxxnxysmybvbxay7
+ ==> No patches needed for libelf
+ ==> Building libelf [Package]
+ ==> Executing phase : 'install'
+ ==> Successfully installed libelf
+ Fetch: 0.04s. Build: 52.16s. Total: 52.19s.
+ [+] ~/spack/opt/spack/linux-redhat6-x86_64/intel-16.0.3/libelf-0.8.12-4blbe3qxqct3ymrfoxxnxysmybvbxay7
+ ==> Can not find version 20160507 in url_list
+ ==> Trying to fetch from ~/spack/var/spack/cache/libdwarf/libdwarf-20160507.tar.gz
+ ################################################################################################################################################################################# 100.0%
+ ==> Staging archive: ~/spack/var/spack/stage/libdwarf-20160507-csruprgucaujkfkrcywhwou7nbeis5fo/libdwarf-20160507.tar.gz
+ ==> Created stage in ~/spack/var/spack/stage/libdwarf-20160507-csruprgucaujkfkrcywhwou7nbeis5fo
+ ==> No patches needed for libdwarf
+ ==> Building libdwarf [Package]
+ ==> Executing phase : 'install'
+ ==> Successfully installed libdwarf
+ Fetch: 0.40s. Build: 2m 17.29s. Total: 2m 17.69s.
+ [+] ~/spack/opt/spack/linux-redhat6-x86_64/intel-16.0.3/libdwarf-20160507-csruprgucaujkfkrcywhwou7nbeis5fo
+
+
+Packages can also be referred to from the command line by their package
+hash. Using the ``spack find -lf`` command earlier we saw that the hash
+of our optimized installation of libelf (``cppflags="-O3"``) began with
+``vrv2ttb``. We can now explicitly build with that package without typing
+the entire spec, by using the ``/`` sigil to refer to it by hash. As with
+other tools like git, you do not need to specify an *entire* hash on the
+command line. You can specify just enough digits to identify a hash
+uniquely. If a hash prefix is ambiguous (i.e., two or more installed
+packages share the prefix) then spack will report an error.
+
+.. code-block:: console
+
+ $ spack install libdwarf ^/vrv2ttb
+ ==> Installing libdwarf
+ ==> libelf is already installed in ~/spack/opt/spack/linux-redhat6-x86_64/gcc-4.4.7/libelf-0.8.12-vrv2ttbd34xlfoxy4jwt6qsjrcbalmmw
+ ==> Can not find version 20160507 in url_list
+ ==> Trying to fetch from ~/spack/var/spack/cache/libdwarf/libdwarf-20160507.tar.gz
+ #################################################################################################################################################################################################################################################### 100.0%
+ ==> Staging archive: ~/spack/var/spack/stage/libdwarf-20160507-dtg3tgnp7htccoly26gduqlrgvnwcp5t/libdwarf-20160507.tar.gz
+ ==> Created stage in ~/spack/var/spack/stage/libdwarf-20160507-dtg3tgnp7htccoly26gduqlrgvnwcp5t
+ ==> No patches needed for libdwarf
+ ==> Building libdwarf [Package]
+ ==> Executing phase : 'install'
+ ==> Successfully installed libdwarf
+ Fetch: 0.96s. Build: 24.03s. Total: 24.99s.
+ [+] ~/spack/opt/spack/linux-redhat6-x86_64/gcc-4.4.7/libdwarf-20160507-dtg3tgnp7htccoly26gduqlrgvnwcp5t
+
+
+The ``spack find`` command can also take a ``-d`` flag, which can show
+dependency information. Note that each package has a top-level entry,
+even if it also appears as a dependency.
+
+.. code-block:: console
+
+ $ spack find -ldf
+ ==> 9 installed packages.
+ -- linux-redhat6-x86_64 / gcc@4.4.7 -----------------------------
+ dtg3tgn libdwarf@20160507%gcc
+ vrv2ttb ^libelf@0.8.12%gcc cppflags="-O3"
+
+ yfx6p3g libdwarf@20160507%gcc
+ csrt4qx ^libelf@0.8.13%gcc
+
+ ipggckv libelf@0.8.12%gcc
+
+ vrv2ttb libelf@0.8.12%gcc cppflags="-O3"
+
+ csrt4qx libelf@0.8.13%gcc
+
+
+ -- linux-redhat6-x86_64 / intel@15.0.4 --------------------------
+ w33hrej libelf@0.8.13%intel
+
+
+ -- linux-redhat6-x86_64 / intel@16.0.3 --------------------------
+ csruprg libdwarf@20160507%intel
+ 4blbe3q ^libelf@0.8.12%intel
+
+ 4blbe3q libelf@0.8.12%intel
+
+ 7wgp32x libelf@0.8.13%intel
+
+
+As we get to more complex packages, full installs will take too long to
+build in the time allotted for this tutorial. Our collaborators at CERN
+have been working on binary caching for Spack, which would allow for very
+fast installs of previously built packages. We are still working out the
+security ramifications of the feature, but it is coming soon.
+
+For now, we will switch to doing "fake" installs. When supplied with the
+``--fake`` flag (primarily used for debugging), Spack computes build
+metadata the same way it normally would, but it does not download the
+source or run the install script for a pacakge. We can use this to
+quickly demonstrate some of the more advanced Spack features in our
+limited tutorial time.
+
+``HDF5`` is an example of a more complicated package, with an MPI
+dependency. If we install it "out of the box," it will build with
+``openmpi``.
+
+.. code-block:: console
+
+ $ spack install --fake hdf5
+ ==> Installing hdf5
+ ==> Installing zlib
+ ==> Building zlib [Package]
+ ==> Successfully installed zlib
+ Fetch: . Build: 0.11s. Total: 0.11s.
+ [+] ~/spack/opt/spack/linux-redhat6-x86_64/gcc-4.4.7/zlib-1.2.8-ayc4jq7vxuzge5n444gutvskeytfdruh
+ ==> Installing openmpi
+ ==> Installing hwloc
+ ==> Installing libpciaccess
+ ==> Installing util-macros
+ ==> Building util-macros [Package]
+ ==> Successfully installed util-macros
+ Fetch: . Build: 0.11s. Total: 0.11s.
+ [+] ~/spack/opt/spack/linux-redhat6-x86_64/gcc-4.4.7/util-macros-1.19.0-pc6zhs4cnkmg2cv4et4fizsp6scuvacg
+ ==> Installing libtool
+ ==> Installing m4
+ ==> Installing libsigsegv
+ ==> Building libsigsegv [Package]
+ ==> Successfully installed libsigsegv
+ Fetch: . Build: 0.11s. Total: 0.11s.
+ [+] ~/spack/opt/spack/linux-redhat6-x86_64/gcc-4.4.7/libsigsegv-2.10-q4cok3yber7lhf3jswg6mysg7oi53unh
+ ==> Building m4 [Package]
+ ==> Successfully installed m4
+ Fetch: . Build: 0.23s. Total: 0.23s.
+ [+] ~/spack/opt/spack/linux-redhat6-x86_64/gcc-4.4.7/m4-1.4.17-qijdzvhjyybrtwbqm73vykhmkaqro3je
+ ==> Building libtool [Package]
+ ==> Successfully installed libtool
+ Fetch: . Build: 0.11s. Total: 0.11s.
+ [+] ~/spack/opt/spack/linux-redhat6-x86_64/gcc-4.4.7/libtool-2.4.6-rdx5nkfjwlvcanz5il3ys2pe34j4vxx5
+ ==> Installing pkg-config
+ ==> Building pkg-config [Package]
+ ==> Successfully installed pkg-config
+ Fetch: . Build: 0.11s. Total: 0.11s.
+ [+] ~/spack/opt/spack/linux-redhat6-x86_64/gcc-4.4.7/pkg-config-0.29.1-wpjnlzahdw6ahkrgmqyeugkj2zhv4tui
+ ==> Building libpciaccess [Package]
+ ==> Successfully installed libpciaccess
+ Fetch: . Build: 0.10s. Total: 0.10s.
+ [+] ~/spack/opt/spack/linux-redhat6-x86_64/gcc-4.4.7/libpciaccess-0.13.4-m2f6fpm22rpprq2ihkmfx6llf363264m
+ ==> Building hwloc [Package]
+ ==> Successfully installed hwloc
+ Fetch: . Build: 0.23s. Total: 0.23s.
+ [+] ~/spack/opt/spack/linux-redhat6-x86_64/gcc-4.4.7/hwloc-1.11.4-xpb6hbl2hsze25cgdgfnoppn6rchhzaz
+ ==> Building openmpi [Package]
+ ==> Successfully installed openmpi
+ Fetch: . Build: 0.35s. Total: 0.35s.
+ [+] ~/spack/opt/spack/linux-redhat6-x86_64/gcc-4.4.7/openmpi-2.0.1-j4cgoq4furxvr73pq72r2qgywgksw3qn
+ ==> Building hdf5 [AutotoolsPackage]
+ ==> Successfully installed hdf5
+ Fetch: . Build: 0.61s. Total: 0.61s.
+ [+] ~/spack/opt/spack/linux-redhat6-x86_64/gcc-4.4.7/hdf5-1.10.0-patch1-ezvtnox35albuaxqryuondweyjgeo6es
+
+
+Spack packages can also have variants. Boolean variants can be specified
+using the ``+`` and ``~`` or ``-`` sigils. There are two sigils for
+``False`` to avoid conflicts with shell parsing in different
+situations. Variants (boolean or otherwise) can also be specified using
+the same syntax as compiler flags. Here we can install HDF5 without MPI
+support.
+
+.. code-block:: console
+
+ $ spack install --fake hdf5~mpi
+ ==> Installing hdf5
+ ==> zlib is already installed in ~/spack/opt/spack/linux-redhat6-x86_64/gcc-4.4.7/zlib-1.2.8-ayc4jq7vxuzge5n444gutvskeytfdruh
+ ==> Building hdf5 [AutotoolsPackage]
+ ==> Successfully installed hdf5
+ Fetch: . Build: 0.22s. Total: 0.22s.
+ [+] ~/spack/opt/spack/linux-redhat6-x86_64/gcc-4.4.7/hdf5-1.10.0-patch1-twppaioxqn6lti4grgopnmhwcq3h2rpw
+
+
+We might also want to install HDF5 with a different MPI
+implementation. While MPI is not a package itself, packages can depend on
+abstract interfaces like MPI. Spack handles these through "virtual
+dependencies." A package, such as HDF5, can depend on the MPI
+interface. Other packages (``openmpi``, ``mpich``, ``mvapich``, etc.)
+provide the MPI interface. Any of these providers can be requested for
+an MPI dependency. For example, we can build HDF5 with MPI support
+provided by mpich by specifying a dependency on ``mpich``. Spack also
+supports versioning of virtual dependencies. A package can depend on the
+MPI interface at version 3, and provider packages specify what version of
+the interface *they* provide. The partial spec ``^mpi@3`` can be safisfied
+by any of several providers.
+
+.. code-block:: console
+
+ $ spack install --fake hdf5+mpi ^mpich
+ ==> Installing hdf5
+ ==> mpich is already installed in ~/spack/opt/spack/linux-redhat6-x86_64/gcc-4.4.7/mpich-3.2-5jlp2ndnsb67txggraglu47vjmayx5za
+ ==> zlib is already installed in ~/spack/opt/spack/linux-redhat6-x86_64/gcc-4.4.7/zlib-1.2.8-ayc4jq7vxuzge5n444gutvskeytfdruh
+ ==> Building hdf5 [AutotoolsPackage]
+ ==> Successfully installed hdf5
+ Fetch: . Build: 0.38s. Total: 0.38s.
+ [+] ~/spack/opt/spack/linux-redhat6-x86_64/gcc-4.4.7/hdf5-1.10.0-patch1-j36yfw25i6gdd3q4vwlupgkpwic4ua6m
+
+
+We'll do a quick check in on what we have installed so far.
+
+.. code-block:: console
+
+ $ spack find -ldf
+ ==> 22 installed packages.
+ -- linux-redhat6-x86_64 / gcc@4.4.7 -----------------------------
+ twppaio hdf5@1.10.0-patch1%gcc
+ ayc4jq7 ^zlib@1.2.8%gcc
+
+ j36yfw2 hdf5@1.10.0-patch1%gcc
+ 5jlp2nd ^mpich@3.2%gcc
+ ayc4jq7 ^zlib@1.2.8%gcc
+
+ ezvtnox hdf5@1.10.0-patch1%gcc
+ j4cgoq4 ^openmpi@2.0.1%gcc
+ xpb6hbl ^hwloc@1.11.4%gcc
+ m2f6fpm ^libpciaccess@0.13.4%gcc
+ ayc4jq7 ^zlib@1.2.8%gcc
+
+ xpb6hbl hwloc@1.11.4%gcc
+ m2f6fpm ^libpciaccess@0.13.4%gcc
+
+ dtg3tgn libdwarf@20160507%gcc
+ vrv2ttb ^libelf@0.8.12%gcc cppflags="-O3"
+
+ yfx6p3g libdwarf@20160507%gcc
+ csrt4qx ^libelf@0.8.13%gcc
+
+ ipggckv libelf@0.8.12%gcc
+
+ vrv2ttb libelf@0.8.12%gcc cppflags="-O3"
+
+ csrt4qx libelf@0.8.13%gcc
+
+ m2f6fpm libpciaccess@0.13.4%gcc
+
+ q4cok3y libsigsegv@2.10%gcc
+
+ rdx5nkf libtool@2.4.6%gcc
+
+ qijdzvh m4@1.4.17%gcc
+ q4cok3y ^libsigsegv@2.10%gcc
+
+ 5jlp2nd mpich@3.2%gcc
+
+ j4cgoq4 openmpi@2.0.1%gcc
+ xpb6hbl ^hwloc@1.11.4%gcc
+ m2f6fpm ^libpciaccess@0.13.4%gcc
+
+ wpjnlza pkg-config@0.29.1%gcc
+
+ pc6zhs4 util-macros@1.19.0%gcc
+
+ ayc4jq7 zlib@1.2.8%gcc
+
+
+ -- linux-redhat6-x86_64 / intel@15.0.4 --------------------------
+ w33hrej libelf@0.8.13%intel
+
+
+ -- linux-redhat6-x86_64 / intel@16.0.3 --------------------------
+ csruprg libdwarf@20160507%intel
+ 4blbe3q ^libelf@0.8.12%intel
+
+ 4blbe3q libelf@0.8.12%intel
+
+ 7wgp32x libelf@0.8.13%intel
+
+
+Spack models the dependencies of packages as a directed acyclic graph
+(DAG). The ``spack find -d`` command shows the tree representation of
+that graph. We can also use the ``spack graph`` command to view the entire
+DAG as a graph.
+
+.. code-block:: console
+
+ $ spack graph hdf5+mpi ^mpich
+ o hdf5
+ |\
+ o | zlib
+ /
+ o mpich
+
+You may also have noticed that there are some packages shown in the
+``spack find -d`` output that we didn't install explicitly. These are
+dependencies that were installed implicitly. A few packages installed
+implicitly are not shown as dependencies in the ``spack find -d``
+output. These are build dependencies. For example, ``libpciaccess`` is a
+dependency of openmpi and requires m4 to build. Spack will build `m4`` as
+part of the installation of ``openmpi``, but it does not become a part of
+the DAG because it is not linked in at run time. Spack handles build
+dependencies differently because of their different (less strict)
+consistency requirements. It is entirely possible to have two packages
+using different versions of a dependency to build, which obviously cannot
+be done with linked dependencies.
+
+``HDF5`` is more complicated than our basic example of libelf and
+libdwarf, but it's still within the realm of software that an experienced
+HPC user could reasonably expect to install given a bit of time. Now
+let's look at a more complicated package.
+
+.. code-block:: console
+
+ $ spack install --fake trilinos
+ ==> Installing trilinos
+ ==> Installing superlu-dist
+ ==> openmpi is already installed in ~/spack/opt/spack/linux-redhat6-x86_64/gcc-4.4.7/openmpi-2.0.1-j4cgoq4furxvr73pq72r2qgywgksw3qn
+ ==> Installing parmetis
+ ==> openmpi is already installed in ~/spack/opt/spack/linux-redhat6-x86_64/gcc-4.4.7/openmpi-2.0.1-j4cgoq4furxvr73pq72r2qgywgksw3qn
+ ==> Installing cmake
+ ==> Installing bzip2
+ ==> Building bzip2 [Package]
+ ==> Successfully installed bzip2
+ Fetch: . Build: 0.11s. Total: 0.11s.
+ [+] ~/spack/opt/spack/linux-redhat6-x86_64/gcc-4.4.7/bzip2-1.0.6-gll2xsahysy7ji5gkmfxwkofdt3mwjhs
+ ==> expat is already installed in ~/spack/opt/spack/linux-redhat6-x86_64/gcc-4.4.7/expat-2.2.0-mg5kwd3kluxdgorj32vzbp7aksg3vqej
+ ==> Installing ncurses
+ ==> Building ncurses [Package]
+ ==> Successfully installed ncurses
+ Fetch: . Build: 0.11s. Total: 0.11s.
+ [+] ~/spack/opt/spack/linux-redhat6-x86_64/gcc-4.4.7/ncurses-6.0-fttg4astvrtq2buey4wq66tnyu7bgj2c
+ ==> zlib is already installed in ~/spack/opt/spack/linux-redhat6-x86_64/gcc-4.4.7/zlib-1.2.8-ayc4jq7vxuzge5n444gutvskeytfdruh
+ ==> Installing openssl
+ ==> zlib is already installed in ~/spack/opt/spack/linux-redhat6-x86_64/gcc-4.4.7/zlib-1.2.8-ayc4jq7vxuzge5n444gutvskeytfdruh
+ ==> Building openssl [Package]
+ ==> Successfully installed openssl
+ Fetch: . Build: 0.23s. Total: 0.23s.
+ [+] ~/spack/opt/spack/linux-redhat6-x86_64/gcc-4.4.7/openssl-1.0.2j-kt5xyk2dkho6tzadnqlbnbujmljprylg
+ ==> Installing libarchive
+ ==> Installing lzma
+ ==> Building lzma [Package]
+ ==> Successfully installed lzma
+ Fetch: . Build: 0.11s. Total: 0.11s.
+ [+] ~/spack/opt/spack/linux-redhat6-x86_64/gcc-4.4.7/lzma-4.32.7-hah2cdo3zbulz6yg5do6dvnfn6en5v5c
+ ==> Installing nettle
+ ==> m4 is already installed in ~/spack/opt/spack/linux-redhat6-x86_64/gcc-4.4.7/m4-1.4.17-qijdzvhjyybrtwbqm73vykhmkaqro3je
+ ==> Installing gmp
+ ==> m4 is already installed in ~/spack/opt/spack/linux-redhat6-x86_64/gcc-4.4.7/m4-1.4.17-qijdzvhjyybrtwbqm73vykhmkaqro3je
+ ==> Building gmp [AutotoolsPackage]
+ ==> Successfully installed gmp
+ Fetch: . Build: 0.11s. Total: 0.11s.
+ [+] ~/spack/opt/spack/linux-redhat6-x86_64/gcc-4.4.7/gmp-6.1.1-uwn4gfdtq3sywy5uf4f7znrh66oybikf
+ ==> Building nettle [Package]
+ ==> Successfully installed nettle
+ Fetch: . Build: 0.18s. Total: 0.18s.
+ [+] ~/spack/opt/spack/linux-redhat6-x86_64/gcc-4.4.7/nettle-3.2-w4ieitifcmrldo4ra7as63apagzf56ja
+ ==> bzip2 is already installed in ~/spack/opt/spack/linux-redhat6-x86_64/gcc-4.4.7/bzip2-1.0.6-gll2xsahysy7ji5gkmfxwkofdt3mwjhs
+ ==> expat is already installed in ~/spack/opt/spack/linux-redhat6-x86_64/gcc-4.4.7/expat-2.2.0-mg5kwd3kluxdgorj32vzbp7aksg3vqej
+ ==> Installing libxml2
+ ==> Installing xz
+ ==> Building xz [Package]
+ ==> Successfully installed xz
+ Fetch: . Build: 0.11s. Total: 0.11s.
+ [+] ~/spack/opt/spack/linux-redhat6-x86_64/gcc-4.4.7/xz-5.2.2-bxh6cpyqqozazm5okvjqk23sww3gccnf
+ ==> zlib is already installed in ~/spack/opt/spack/linux-redhat6-x86_64/gcc-4.4.7/zlib-1.2.8-ayc4jq7vxuzge5n444gutvskeytfdruh
+ ==> Building libxml2 [Package]
+ ==> Successfully installed libxml2
+ Fetch: . Build: 0.35s. Total: 0.35s.
+ [+] ~/spack/opt/spack/linux-redhat6-x86_64/gcc-4.4.7/libxml2-2.9.4-un323rppyu5qipkegyf7flmymvtmunrx
+ ==> zlib is already installed in ~/spack/opt/spack/linux-redhat6-x86_64/gcc-4.4.7/zlib-1.2.8-ayc4jq7vxuzge5n444gutvskeytfdruh
+ ==> Installing lz4
+ ==> Building lz4 [Package]
+ ==> Successfully installed lz4
+ Fetch: . Build: 0.12s. Total: 0.12s.
+ [+] ~/spack/opt/spack/linux-redhat6-x86_64/gcc-4.4.7/lz4-131-ivy2fcaw7ywujx74weebdi5bsm7q4vkc
+ ==> openssl is already installed in ~/spack/opt/spack/linux-redhat6-x86_64/gcc-4.4.7/openssl-1.0.2j-kt5xyk2dkho6tzadnqlbnbujmljprylg
+ ==> xz is already installed in ~/spack/opt/spack/linux-redhat6-x86_64/gcc-4.4.7/xz-5.2.2-bxh6cpyqqozazm5okvjqk23sww3gccnf
+ ==> Installing lzo
+ ==> Building lzo [AutotoolsPackage]
+ ==> Successfully installed lzo
+ Fetch: . Build: 0.11s. Total: 0.11s.
+ [+] ~/spack/opt/spack/linux-redhat6-x86_64/gcc-4.4.7/lzo-2.09-dlgnm74ozo6baactkft5oah2jre2ri2i
+ ==> Building libarchive [Package]
+ ==> Successfully installed libarchive
+ Fetch: . Build: 1.35s. Total: 1.35s.
+ [+] ~/spack/opt/spack/linux-redhat6-x86_64/gcc-4.4.7/libarchive-3.2.1-biq3kebw7vel7njusill7vv7mjldkqjv
+ ==> xz is already installed in ~/spack/opt/spack/linux-redhat6-x86_64/gcc-4.4.7/xz-5.2.2-bxh6cpyqqozazm5okvjqk23sww3gccnf
+ ==> Installing curl
+ ==> zlib is already installed in ~/spack/opt/spack/linux-redhat6-x86_64/gcc-4.4.7/zlib-1.2.8-ayc4jq7vxuzge5n444gutvskeytfdruh
+ ==> openssl is already installed in ~/spack/opt/spack/linux-redhat6-x86_64/gcc-4.4.7/openssl-1.0.2j-kt5xyk2dkho6tzadnqlbnbujmljprylg
+ ==> Building curl [Package]
+ ==> Successfully installed curl
+ Fetch: . Build: 0.36s. Total: 0.36s.
+ [+] ~/spack/opt/spack/linux-redhat6-x86_64/gcc-4.4.7/curl-7.50.3-oze4gqutj4x2isbkcn5ob2bhhxbskod4
+ ==> Building cmake [Package]
+ ==> Successfully installed cmake
+ Fetch: . Build: 1.64s. Total: 1.64s.
+ [+] ~/spack/opt/spack/linux-redhat6-x86_64/gcc-4.4.7/cmake-3.6.1-n2nkknrku6dvuneo3rjumim7axt7n36e
+ ==> Installing metis
+ ==> cmake is already installed in ~/spack/opt/spack/linux-redhat6-x86_64/gcc-4.4.7/cmake-3.6.1-n2nkknrku6dvuneo3rjumim7axt7n36e
+ ==> Building metis [Package]
+ ==> Successfully installed metis
+ Fetch: . Build: 0.11s. Total: 0.11s.
+ [+] ~/spack/opt/spack/linux-redhat6-x86_64/gcc-4.4.7/metis-5.1.0-ithifyl4xvqbn76js23wsb4tjnztrbdv
+ ==> Building parmetis [Package]
+ ==> Successfully installed parmetis
+ Fetch: . Build: 0.62s. Total: 0.62s.
+ [+] ~/spack/opt/spack/linux-redhat6-x86_64/gcc-4.4.7/parmetis-4.0.3-rtg6hml5t6acdcnxomn3l5zfiful4d2t
+ ==> Installing openblas
+ ==> Building openblas [Package]
+ ==> Successfully installed openblas
+ Fetch: . Build: 0.11s. Total: 0.11s.
+ [+] ~/spack/opt/spack/linux-redhat6-x86_64/gcc-4.4.7/openblas-0.2.19-bwofa7fhff6od5zn56vy3j4eeyupsqgt
+ ==> metis is already installed in ~/spack/opt/spack/linux-redhat6-x86_64/gcc-4.4.7/metis-5.1.0-ithifyl4xvqbn76js23wsb4tjnztrbdv
+ ==> Building superlu-dist [Package]
+ ==> Successfully installed superlu-dist
+ Fetch: . Build: 0.85s. Total: 0.85s.
+ [+] ~/spack/opt/spack/linux-redhat6-x86_64/gcc-4.4.7/superlu-dist-5.1.1-25r6jlvkpjnkiuwt2rtbzhk3l3htuxs7
+ ==> cmake is already installed in ~/spack/opt/spack/linux-redhat6-x86_64/gcc-4.4.7/cmake-3.6.1-n2nkknrku6dvuneo3rjumim7axt7n36e
+ ==> Installing glm
+ ==> cmake is already installed in ~/spack/opt/spack/linux-redhat6-x86_64/gcc-4.4.7/cmake-3.6.1-n2nkknrku6dvuneo3rjumim7axt7n36e
+ ==> Building glm [Package]
+ ==> Successfully installed glm
+ Fetch: . Build: 0.12s. Total: 0.12s.
+ [+] ~/spack/opt/spack/linux-redhat6-x86_64/gcc-4.4.7/glm-0.9.7.1-7a6oho4aerz7vftxd5ur7lywscht2iry
+ ==> Installing hypre
+ ==> openmpi is already installed in ~/spack/opt/spack/linux-redhat6-x86_64/gcc-4.4.7/openmpi-2.0.1-j4cgoq4furxvr73pq72r2qgywgksw3qn
+ ==> openblas is already installed in ~/spack/opt/spack/linux-redhat6-x86_64/gcc-4.4.7/openblas-0.2.19-bwofa7fhff6od5zn56vy3j4eeyupsqgt
+ ==> Building hypre [Package]
+ ==> Successfully installed hypre
+ Fetch: . Build: 0.61s. Total: 0.61s.
+ [+] ~/spack/opt/spack/linux-redhat6-x86_64/gcc-4.4.7/hypre-2.11.1-lf7hcejiiww5peesh57quda72z67veit
+ ==> metis is already installed in ~/spack/opt/spack/linux-redhat6-x86_64/gcc-4.4.7/metis-5.1.0-ithifyl4xvqbn76js23wsb4tjnztrbdv
+ ==> Installing netlib-scalapack
+ ==> openmpi is already installed in ~/spack/opt/spack/linux-redhat6-x86_64/gcc-4.4.7/openmpi-2.0.1-j4cgoq4furxvr73pq72r2qgywgksw3qn
+ ==> cmake is already installed in ~/spack/opt/spack/linux-redhat6-x86_64/gcc-4.4.7/cmake-3.6.1-n2nkknrku6dvuneo3rjumim7axt7n36e
+ ==> openblas is already installed in ~/spack/opt/spack/linux-redhat6-x86_64/gcc-4.4.7/openblas-0.2.19-bwofa7fhff6od5zn56vy3j4eeyupsqgt
+ ==> Building netlib-scalapack [Package]
+ ==> Successfully installed netlib-scalapack
+ Fetch: . Build: 0.61s. Total: 0.61s.
+ [+] ~/spack/opt/spack/linux-redhat6-x86_64/gcc-4.4.7/netlib-scalapack-2.0.2-dvcanz2qq4dfcexznbhbmzbxfj43uz4q
+ ==> Installing suite-sparse
+ ==> Installing tbb
+ ==> Building tbb [Package]
+ ==> Successfully installed tbb
+ Fetch: . Build: 0.12s. Total: 0.12s.
+ [+] ~/spack/opt/spack/linux-redhat6-x86_64/gcc-4.4.7/tbb-4.4.4-zawzkkhrmdonbjpj3a5bb6gkgnqlrjeu
+ ==> openblas is already installed in ~/spack/opt/spack/linux-redhat6-x86_64/gcc-4.4.7/openblas-0.2.19-bwofa7fhff6od5zn56vy3j4eeyupsqgt
+ ==> metis is already installed in ~/spack/opt/spack/linux-redhat6-x86_64/gcc-4.4.7/metis-5.1.0-ithifyl4xvqbn76js23wsb4tjnztrbdv
+ ==> Building suite-sparse [Package]
+ ==> Successfully installed suite-sparse
+ Fetch: . Build: 0.49s. Total: 0.49s.
+ [+] ~/spack/opt/spack/linux-redhat6-x86_64/gcc-4.4.7/suite-sparse-4.5.3-lvur6hriy2j7xfjwh5punp3exwpynzm6
+ ==> openmpi is already installed in ~/spack/opt/spack/linux-redhat6-x86_64/gcc-4.4.7/openmpi-2.0.1-j4cgoq4furxvr73pq72r2qgywgksw3qn
+ ==> Installing netcdf
+ ==> m4 is already installed in ~/spack/opt/spack/linux-redhat6-x86_64/gcc-4.4.7/m4-1.4.17-qijdzvhjyybrtwbqm73vykhmkaqro3je
+ ==> curl is already installed in ~/spack/opt/spack/linux-redhat6-x86_64/gcc-4.4.7/curl-7.50.3-oze4gqutj4x2isbkcn5ob2bhhxbskod4
+ ==> zlib is already installed in ~/spack/opt/spack/linux-redhat6-x86_64/gcc-4.4.7/zlib-1.2.8-ayc4jq7vxuzge5n444gutvskeytfdruh
+ ==> hdf5 is already installed in ~/spack/opt/spack/linux-redhat6-x86_64/gcc-4.4.7/hdf5-1.10.0-patch1-ezvtnox35albuaxqryuondweyjgeo6es
+ ==> Building netcdf [Package]
+ ==> Successfully installed netcdf
+ Fetch: . Build: 0.90s. Total: 0.90s.
+ [+] ~/spack/opt/spack/linux-redhat6-x86_64/gcc-4.4.7/netcdf-4.4.1-tcl4zbrmdfrit2cqlaxig6xieu5h552j
+ ==> Installing mumps
+ ==> netlib-scalapack is already installed in ~/spack/opt/spack/linux-redhat6-x86_64/gcc-4.4.7/netlib-scalapack-2.0.2-dvcanz2qq4dfcexznbhbmzbxfj43uz4q
+ ==> openmpi is already installed in ~/spack/opt/spack/linux-redhat6-x86_64/gcc-4.4.7/openmpi-2.0.1-j4cgoq4furxvr73pq72r2qgywgksw3qn
+ ==> openblas is already installed in ~/spack/opt/spack/linux-redhat6-x86_64/gcc-4.4.7/openblas-0.2.19-bwofa7fhff6od5zn56vy3j4eeyupsqgt
+ ==> Building mumps [Package]
+ ==> Successfully installed mumps
+ Fetch: . Build: 0.74s. Total: 0.74s.
+ [+] ~/spack/opt/spack/linux-redhat6-x86_64/gcc-4.4.7/mumps-5.0.2-kr5r4nnx5tfcacxnk3ii5dsxbe6pu5fy
+ ==> Installing matio
+ ==> Building matio [Package]
+ ==> Successfully installed matio
+ Fetch: . Build: 0.11s. Total: 0.11s.
+ [+] ~/spack/opt/spack/linux-redhat6-x86_64/gcc-4.4.7/matio-1.5.2-4zrozucookychlvc4q53omp2zyfk2bed
+ ==> Installing boost
+ ==> bzip2 is already installed in ~/spack/opt/spack/linux-redhat6-x86_64/gcc-4.4.7/bzip2-1.0.6-gll2xsahysy7ji5gkmfxwkofdt3mwjhs
+ ==> zlib is already installed in ~/spack/opt/spack/linux-redhat6-x86_64/gcc-4.4.7/zlib-1.2.8-ayc4jq7vxuzge5n444gutvskeytfdruh
+ ==> Building boost [Package]
+ ==> Successfully installed boost
+ Fetch: . Build: 0.35s. Total: 0.35s.
+ [+] ~/spack/opt/spack/linux-redhat6-x86_64/gcc-4.4.7/boost-1.62.0-je7eqvzt74kezwhh55y5lwt5dy6pnali
+ ==> parmetis is already installed in ~/spack/opt/spack/linux-redhat6-x86_64/gcc-4.4.7/parmetis-4.0.3-rtg6hml5t6acdcnxomn3l5zfiful4d2t
+ ==> openblas is already installed in ~/spack/opt/spack/linux-redhat6-x86_64/gcc-4.4.7/openblas-0.2.19-bwofa7fhff6od5zn56vy3j4eeyupsqgt
+ ==> hdf5 is already installed in ~/spack/opt/spack/linux-redhat6-x86_64/gcc-4.4.7/hdf5-1.10.0-patch1-ezvtnox35albuaxqryuondweyjgeo6es
+ ==> Building trilinos [Package]
+ ==> Successfully installed trilinos
+ Fetch: . Build: 2.63s. Total: 2.63s.
+ [+] ~/spack/opt/spack/linux-redhat6-x86_64/gcc-4.4.7/trilinos-12.8.1-uvd6dfd7x4uyvck4awo3r3frudihn4ar
+
+
+Now we're starting to see the power of Spack. Trilinos has 11 top level
+dependecies, many of which have dependencies of their own. Installing
+more complex packages can take days or weeks even for an experienced
+user. Although we've done a fake installation for the tutorial, a real
+installation of trilinos using Spack takes about 3 hours (depending on
+the system), but only 20 seconds of programmer time.
+
+Spack manages constistency of the entire DAG. Every MPI dependency will
+be satisfied by the same configuration of MPI, etc. If we install
+``trilinos`` again specifying a dependency on our previous HDF5 built
+with ``mpich``:
+
+.. code-block:: console
+
+ $ spack install --fake trilinos ^hdf5+mpi ^mpich
+ ==> Installing trilinos
+ ==> Installing superlu-dist
+ ==> mpich is already installed in ~/spack/opt/spack/linux-redhat6-x86_64/gcc-4.4.7/mpich-3.2-5jlp2ndnsb67txggraglu47vjmayx5za
+ ==> metis is already installed in ~/spack/opt/spack/linux-redhat6-x86_64/gcc-4.4.7/metis-5.1.0-ithifyl4xvqbn76js23wsb4tjnztrbdv
+ ==> Installing parmetis
+ ==> mpich is already installed in ~/spack/opt/spack/linux-redhat6-x86_64/gcc-4.4.7/mpich-3.2-5jlp2ndnsb67txggraglu47vjmayx5za
+ ==> metis is already installed in ~/spack/opt/spack/linux-redhat6-x86_64/gcc-4.4.7/metis-5.1.0-ithifyl4xvqbn76js23wsb4tjnztrbdv
+ ==> cmake is already installed in ~/spack/opt/spack/linux-redhat6-x86_64/gcc-4.4.7/cmake-3.6.1-n2nkknrku6dvuneo3rjumim7axt7n36e
+ ==> Building parmetis [Package]
+ ==> Successfully installed parmetis
+ Fetch: . Build: 0.38s. Total: 0.38s.
+ [+] ~/spack/opt/spack/linux-redhat6-x86_64/gcc-4.4.7/parmetis-4.0.3-43kbtni6p5y446c6qdkybq4htj7ot4zn
+ ==> openblas is already installed in ~/spack/opt/spack/linux-redhat6-x86_64/gcc-4.4.7/openblas-0.2.19-bwofa7fhff6od5zn56vy3j4eeyupsqgt
+ ==> Building superlu-dist [Package]
+ ==> Successfully installed superlu-dist
+ Fetch: . Build: 0.61s. Total: 0.61s.
+ [+] ~/spack/opt/spack/linux-redhat6-x86_64/gcc-4.4.7/superlu-dist-5.1.1-46uuupehmonx5jicc6xnegnud2n5jqyl
+ ==> cmake is already installed in ~/spack/opt/spack/linux-redhat6-x86_64/gcc-4.4.7/cmake-3.6.1-n2nkknrku6dvuneo3rjumim7axt7n36e
+ ==> glm is already installed in ~/spack/opt/spack/linux-redhat6-x86_64/gcc-4.4.7/glm-0.9.7.1-7a6oho4aerz7vftxd5ur7lywscht2iry
+ ==> Installing hypre
+ ==> mpich is already installed in ~/spack/opt/spack/linux-redhat6-x86_64/gcc-4.4.7/mpich-3.2-5jlp2ndnsb67txggraglu47vjmayx5za
+ ==> openblas is already installed in ~/spack/opt/spack/linux-redhat6-x86_64/gcc-4.4.7/openblas-0.2.19-bwofa7fhff6od5zn56vy3j4eeyupsqgt
+ ==> Building hypre [Package]
+ ==> Successfully installed hypre
+ Fetch: . Build: 0.37s. Total: 0.37s.
+ [+] ~/spack/opt/spack/linux-redhat6-x86_64/gcc-4.4.7/hypre-2.11.1-6ajnyymoivs5apajd7thjisae36jv4lz
+ ==> metis is already installed in ~/spack/opt/spack/linux-redhat6-x86_64/gcc-4.4.7/metis-5.1.0-ithifyl4xvqbn76js23wsb4tjnztrbdv
+ ==> Installing netlib-scalapack
+ ==> mpich is already installed in ~/spack/opt/spack/linux-redhat6-x86_64/gcc-4.4.7/mpich-3.2-5jlp2ndnsb67txggraglu47vjmayx5za
+ ==> cmake is already installed in ~/spack/opt/spack/linux-redhat6-x86_64/gcc-4.4.7/cmake-3.6.1-n2nkknrku6dvuneo3rjumim7axt7n36e
+ ==> openblas is already installed in ~/spack/opt/spack/linux-redhat6-x86_64/gcc-4.4.7/openblas-0.2.19-bwofa7fhff6od5zn56vy3j4eeyupsqgt
+ ==> Building netlib-scalapack [Package]
+ ==> Successfully installed netlib-scalapack
+ Fetch: . Build: 0.37s. Total: 0.37s.
+ [+] ~/spack/opt/spack/linux-redhat6-x86_64/gcc-4.4.7/netlib-scalapack-2.0.2-dayeep27omm26wksd3iqvbu3gezc2eoh
+ ==> suite-sparse is already installed in ~/spack/opt/spack/linux-redhat6-x86_64/gcc-4.4.7/suite-sparse-4.5.3-lvur6hriy2j7xfjwh5punp3exwpynzm6
+ ==> Installing netcdf
+ ==> m4 is already installed in ~/spack/opt/spack/linux-redhat6-x86_64/gcc-4.4.7/m4-1.4.17-qijdzvhjyybrtwbqm73vykhmkaqro3je
+ ==> curl is already installed in ~/spack/opt/spack/linux-redhat6-x86_64/gcc-4.4.7/curl-7.50.3-oze4gqutj4x2isbkcn5ob2bhhxbskod4
+ ==> zlib is already installed in ~/spack/opt/spack/linux-redhat6-x86_64/gcc-4.4.7/zlib-1.2.8-ayc4jq7vxuzge5n444gutvskeytfdruh
+ ==> hdf5 is already installed in ~/spack/opt/spack/linux-redhat6-x86_64/gcc-4.4.7/hdf5-1.10.0-patch1-j36yfw25i6gdd3q4vwlupgkpwic4ua6m
+ ==> Building netcdf [Package]
+ ==> Successfully installed netcdf
+ Fetch: . Build: 0.67s. Total: 0.67s.
+ [+] ~/spack/opt/spack/linux-redhat6-x86_64/gcc-4.4.7/netcdf-4.4.1-gfemi4jk4qltvp33xhtpkam7dozbqvhq
+ ==> Installing mumps
+ ==> mpich is already installed in ~/spack/opt/spack/linux-redhat6-x86_64/gcc-4.4.7/mpich-3.2-5jlp2ndnsb67txggraglu47vjmayx5za
+ ==> netlib-scalapack is already installed in ~/spack/opt/spack/linux-redhat6-x86_64/gcc-4.4.7/netlib-scalapack-2.0.2-dayeep27omm26wksd3iqvbu3gezc2eoh
+ ==> openblas is already installed in ~/spack/opt/spack/linux-redhat6-x86_64/gcc-4.4.7/openblas-0.2.19-bwofa7fhff6od5zn56vy3j4eeyupsqgt
+ ==> Building mumps [Package]
+ ==> Successfully installed mumps
+ Fetch: . Build: 0.49s. Total: 0.49s.
+ [+] ~/spack/opt/spack/linux-redhat6-x86_64/gcc-4.4.7/mumps-5.0.2-w7t5pl3jhhwitfiyer63zj6zv7idkt3m
+ ==> mpich is already installed in ~/spack/opt/spack/linux-redhat6-x86_64/gcc-4.4.7/mpich-3.2-5jlp2ndnsb67txggraglu47vjmayx5za
+ ==> matio is already installed in ~/spack/opt/spack/linux-redhat6-x86_64/gcc-4.4.7/matio-1.5.2-4zrozucookychlvc4q53omp2zyfk2bed
+ ==> boost is already installed in ~/spack/opt/spack/linux-redhat6-x86_64/gcc-4.4.7/boost-1.62.0-je7eqvzt74kezwhh55y5lwt5dy6pnali
+ ==> parmetis is already installed in ~/spack/opt/spack/linux-redhat6-x86_64/gcc-4.4.7/parmetis-4.0.3-43kbtni6p5y446c6qdkybq4htj7ot4zn
+ ==> openblas is already installed in ~/spack/opt/spack/linux-redhat6-x86_64/gcc-4.4.7/openblas-0.2.19-bwofa7fhff6od5zn56vy3j4eeyupsqgt
+ ==> hdf5 is already installed in ~/spack/opt/spack/linux-redhat6-x86_64/gcc-4.4.7/hdf5-1.10.0-patch1-j36yfw25i6gdd3q4vwlupgkpwic4ua6m
+ ==> Building trilinos [Package]
+ ==> Successfully installed trilinos
+ Fetch: . Build: 2.42s. Total: 2.42s.
+ [+] ~/spack/opt/spack/linux-redhat6-x86_64/gcc-4.4.7/trilinos-12.8.1-ffwrpxnq7lhiw2abxn2u7ffr4jjsdwep
+
+We see that every package in the trilinos DAG that depends on MPI now
+uses ``mpich``.
+
+.. code-block:: console
+
+ $ spack find -d trilinos
+ ==> 2 installed packages.
+ -- linux-redhat6-x86_64 / gcc@4.4.7 -----------------------------
+ trilinos@12.8.1
+ ^boost@1.62.0
+ ^bzip2@1.0.6
+ ^zlib@1.2.8
+ ^glm@0.9.7.1
+ ^hdf5@1.10.0-patch1
+ ^mpich@3.2
+ ^hypre@2.11.1
+ ^openblas@0.2.19
+ ^matio@1.5.2
+ ^metis@5.1.0
+ ^mumps@5.0.2
+ ^netlib-scalapack@2.0.2
+ ^netcdf@4.4.1
+ ^curl@7.50.3
+ ^openssl@1.0.2j
+ ^parmetis@4.0.3
+ ^suite-sparse@4.5.3
+ ^tbb@4.4.4
+ ^superlu-dist@5.1.1
+
+ trilinos@12.8.1
+ ^boost@1.62.0
+ ^bzip2@1.0.6
+ ^zlib@1.2.8
+ ^glm@0.9.7.1
+ ^hdf5@1.10.0-patch1
+ ^openmpi@2.0.1
+ ^hwloc@1.11.4
+ ^libpciaccess@0.13.4
+ ^hypre@2.11.1
+ ^openblas@0.2.19
+ ^matio@1.5.2
+ ^metis@5.1.0
+ ^mumps@5.0.2
+ ^netlib-scalapack@2.0.2
+ ^netcdf@4.4.1
+ ^curl@7.50.3
+ ^openssl@1.0.2j
+ ^parmetis@4.0.3
+ ^suite-sparse@4.5.3
+ ^tbb@4.4.4
+ ^superlu-dist@5.1.1
+
+
+As we discussed before, the ``spack find -d`` command shows the
+dependency information as a tree. While that is often sufficient, many
+complicated packages, including trilinos, have dependencies that
+cannot be fully represented as a tree. Again, the ``spack graph``
+command shows the full DAG of the dependency information.
+
+.. code-block:: console
+
+ $ spack graph trilinos
+ o trilinos
+ |\
+ | |\
+ | | |\
+ | | | |\
+ | | | | |\
+ | | | | | |\
+ | | | | | | |\
+ | o | | | | | | netcdf
+ | |\ \ \ \ \ \ \
+ | | |\ \ \ \ \ \ \
+ | | | o | | | | | | curl
+ | | |/| | | | | | |
+ | |/| | | | | | | |
+ | | | o | | | | | | openssl
+ | | |/ / / / / / /
+ | |/| | | | | | |
+ | | o | | | | | | hdf5
+ | |/| | | | | | |
+ | | |/ / / / / /
+ | o | | | | | | zlib
+ | / / / / / /
+ o | | | | | | swig
+ o | | | | | | pcre
+ / / / / / /
+ o | | | | | mpi
+ / / / / /
+ o | | | | matio
+ / / / /
+ o | | | lapack
+ / / /
+ o | | glm
+ / /
+ o | boost
+ /
+ o blas
+
+
+You can control how the output is displayed with a number of options.
+
+The ASCII output from ``spack graph`` can be difficult to parse for
+complicated packages. The output can be changed to the ``graphviz``
+``.dot`` format using the `--dot` flag.
+
+.. code-block:: console
+
+ $ spack graph --dot trilinos | dot -Tpdf trilinos_graph.pdf
+
+.. _basics-tutorial-uninstall:
+
+---------------------
+Uninstalling Packages
+---------------------
+
+Earlier we installed many configurations each of libelf and
+libdwarf. Now we will go through and uninstall some of those packages
+that we didn't really need.
+
+.. code-block:: console
+
+ $ spack find -d libdwarf
+ ==> 3 installed packages.
+ -- linux-redhat6-x86_64 / gcc@4.4.7 -----------------------------
+ libdwarf@20160507
+ ^libelf@0.8.12
+
+ libdwarf@20160507
+ ^libelf@0.8.13
+
+
+ -- linux-redhat6-x86_64 / intel@16.0.3 --------------------------
+ libdwarf@20160507
+ ^libelf@0.8.12
+
+ $ spack find libelf
+ ==> 6 installed packages.
+ -- linux-redhat6-x86_64 / gcc@4.4.7 -----------------------------
+ libelf@0.8.12 libelf@0.8.12 libelf@0.8.13
+
+ -- linux-redhat6-x86_64 / intel@15.0.4 --------------------------
+ libelf@0.8.13
+
+ -- linux-redhat6-x86_64 / intel@16.0.3 --------------------------
+ libelf@0.8.12 libelf@0.8.13
+
+
+We can uninstall packages by spec using the same syntax as install.
+
+.. code-block:: console
+
+ $ spack uninstall libelf%intel@15.0.4
+ ==> The following packages will be uninstalled :
+
+ -- linux-redhat6-x86_64 / intel@15.0.4 --------------------------
+ w33hrej libelf@0.8.13%intel
+
+
+ ==> Do you want to proceed ? [y/n]
+ y
+ ==> Successfully uninstalled libelf@0.8.13%intel@15.0.4 arch=linux-redhat6-x86_64-w33hrej
+
+
+
+ $ spack find -lf libelf
+ ==> 5 installed packages.
+ -- linux-redhat6-x86_64 / gcc@4.4.7 -----------------------------
+ ipggckv libelf@0.8.12%gcc
+
+ vrv2ttb libelf@0.8.12%gcc cppflags="-O3"
+
+ csrt4qx libelf@0.8.13%gcc
+
+
+ -- linux-redhat6-x86_64 / intel@16.0.3 --------------------------
+ 4blbe3q libelf@0.8.12%intel
+
+ 7wgp32x libelf@0.8.13%intel
+
+
+We can uninstall packages by referring only to their hash.
+
+
+We can use either ``-f`` (force) or ``-d`` (remove dependents as well) to
+remove packages that are required by another installed package.
+
+.. code-block:: console
+
+ $ spack uninstall /4blb
+ ==> Error: Will not uninstall libelf@0.8.12%intel@16.0.3-4blbe3q
+
+ The following packages depend on it:
+ -- linux-redhat6-x86_64 / intel@16.0.3 --------------------------
+ csruprg libdwarf@20160507%intel
+
+
+ ==> Error: You can use spack uninstall --dependents to uninstall these dependencies as well
+ $ spack uninstall -d /4blb
+ ==> The following packages will be uninstalled :
+
+ -- linux-redhat6-x86_64 / intel@16.0.3 --------------------------
+ csruprg libdwarf@20160507%intel
+
+ 4blbe3q libelf@0.8.12%intel
+
+
+ ==> Do you want to proceed ? [y/n]
+ y
+ ==> Successfully uninstalled libdwarf@20160507%intel@16.0.3 arch=linux-redhat6-x86_64-csruprg
+ ==> Successfully uninstalled libelf@0.8.12%intel@16.0.3 arch=linux-redhat6-x86_64-4blbe3q
+
+
+Spack will not uninstall packages that are not sufficiently
+specified. The ``-a`` (all) flag can be used to uninstall multiple
+packages at once.
+
+.. code-block:: console
+
+ $ spack uninstall trilinos
+ ==> Error: trilinos matches multiple packages:
+
+ -- linux-redhat6-x86_64 / gcc@4.4.7 -----------------------------
+ ffwrpxn trilinos@12.8.1%gcc+boost~debug+hdf5+hypre+metis+mumps~python+shared+suite-sparse+superlu-dist
+
+ uvd6dfd trilinos@12.8.1%gcc+boost~debug+hdf5+hypre+metis+mumps~python+shared+suite-sparse+superlu-dist
+
+
+ ==> Error: You can either:
+ a) Use a more specific spec, or
+ b) use spack uninstall -a to uninstall ALL matching specs.
+
+
+
+ $ spack uninstall /ffwr
+ ==> The following packages will be uninstalled :
+
+ -- linux-redhat6-x86_64 / gcc@4.4.7 -----------------------------
+ ffwrpxn trilinos@12.8.1%gcc+boost~debug+hdf5+hypre+metis+mumps~python+shared+suite-sparse+superlu-dist
+
+
+ ==> Do you want to proceed ? [y/n]
+ y
+ ==> Successfully uninstalled trilinos@12.8.1%gcc@4.4.7+boost~debug+hdf5+hypre+metis+mumps~python+shared+suite-sparse+superlu-dist arch=linux-redhat6-x86_64-ffwrpxn
+
+-----------------------------
+Advanced ``spack find`` Usage
+-----------------------------
+
+We will go over some additional uses for the `spack find` command not
+already covered in the :ref:`basics-tutorial-install` and
+:ref:`basics-tutorial-uninstall` sections.
+
+The ``spack find`` command can accept what we call "anonymous specs."
+These are expressions in spec syntax that do not contain a package
+name. For example, `spack find %intel` will return every package built
+with the intel compiler, and ``spack find cppflags="-O3"`` will
+return every package which was built with ``cppflags="-O3"``.
+
+.. code-block:: console
+
+ $ spack find %intel
+ ==> 1 installed packages.
+ -- linux-redhat6-x86_64 / intel@16.0.3 --------------------------
+ libelf@0.8.13
+
+
+
+ $ spack find cppflags="-O3"
+ ==> 1 installed packages.
+ -- linux-redhat6-x86_64 / gcc@4.4.7 -----------------------------
+ libelf@0.8.12
+
+
+The ``find`` command can also show which packages were installed
+explicitly (rather than pulled in as a dependency) using the ``-e``
+flag. The ``-E`` flag shows implicit installs only. The ``find`` command can
+also show the path to which a spack package was installed using the ``-p``
+command.
+
+.. code-block:: console
+
+ $ spack find -pe
+ ==> 10 installed packages.
+ -- linux-redhat6-x86_64 / gcc@4.4.7 -----------------------------
+ hdf5@1.10.0-patch1 ~/spack/opt/spack/linux-redhat6-x86_64/gcc-4.4.7/hdf5-1.10.0-patch1-twppaioxqn6lti4grgopnmhwcq3h2rpw
+ hdf5@1.10.0-patch1 ~/spack/opt/spack/linux-redhat6-x86_64/gcc-4.4.7/hdf5-1.10.0-patch1-j36yfw25i6gdd3q4vwlupgkpwic4ua6m
+ hdf5@1.10.0-patch1 ~/spack/opt/spack/linux-redhat6-x86_64/gcc-4.4.7/hdf5-1.10.0-patch1-ezvtnox35albuaxqryuondweyjgeo6es
+ libdwarf@20160507 ~/spack/opt/spack/linux-redhat6-x86_64/gcc-4.4.7/libdwarf-20160507-dtg3tgnp7htccoly26gduqlrgvnwcp5t
+ libdwarf@20160507 ~/spack/opt/spack/linux-redhat6-x86_64/gcc-4.4.7/libdwarf-20160507-yfx6p3g3rkmqvcqbmtb34o6pln7pqvcz
+ libelf@0.8.12 ~/spack/opt/spack/linux-redhat6-x86_64/gcc-4.4.7/libelf-0.8.12-ipggckv6i7h44iryzfa4dwdela32a7fy
+ libelf@0.8.12 ~/spack/opt/spack/linux-redhat6-x86_64/gcc-4.4.7/libelf-0.8.12-vrv2ttbd34xlfoxy4jwt6qsjrcbalmmw
+ libelf@0.8.13 ~/spack/opt/spack/linux-redhat6-x86_64/gcc-4.4.7/libelf-0.8.13-csrt4qxfkhjgn5xg3zjpkir7xdnszl2a
+ trilinos@12.8.1 ~/spack/opt/spack/linux-redhat6-x86_64/gcc-4.4.7/trilinos-12.8.1-uvd6dfd7x4uyvck4awo3r3frudihn4ar
+
+ -- linux-redhat6-x86_64 / intel@16.0.3 --------------------------
+ libelf@0.8.13 ~/spack/opt/spack/linux-redhat6-x86_64/intel-16.0.3/libelf-0.8.13-7wgp32xksatkvw2tbssmehw2t5tnxndj
+
+
+---------------------
+Customizing Compilers
+---------------------
+
+
+Spack manages a list of available compilers on the system, detected
+automatically from from the user's ``PATH`` variable. The ``spack
+compilers`` command is an alias for the command ``spack compiler list``.
+
+.. code-block:: console
+
+ $ spack compilers
+ ==> Available compilers
+ -- gcc ----------------------------------------------------------
+ gcc@4.4.7
+
+ -- intel --------------------------------------------------------
+ intel@16.0.3 intel@15.0.1 intel@14.0.0 intel@12.1.3 intel@10.0
+ intel@16.0.2 intel@15.0.0 intel@13.1.1 intel@12.1.2 intel@9.1
+ intel@16.0.1 intel@14.0.4 intel@13.1.0 intel@12.1.0
+ intel@16.0.0 intel@14.0.3 intel@13.0.1 intel@12.0.4
+ intel@15.0.4 intel@14.0.2 intel@13.0.0 intel@11.1
+ intel@15.0.3 intel@14.0.1 intel@12.1.5 intel@10.1
+
+ -- pgi ----------------------------------------------------------
+ pgi@16.5-0 pgi@15.7-0 pgi@14.7-0 pgi@13.2-0 pgi@11.10-0 pgi@9.0-4
+ pgi@16.3-0 pgi@15.5-0 pgi@14.3-0 pgi@13.1-1 pgi@11.1-0 pgi@8.0-1
+ pgi@16.1-0 pgi@15.1-0 pgi@13.10-0 pgi@12.8-0 pgi@10.9-0 pgi@7.1-3
+ pgi@15.10-0 pgi@14.10-0 pgi@13.6-0 pgi@12.1-0 pgi@10.2-0 pgi@7.0-6
+
+The compilers are maintained in a YAML file that can be hand-edited
+for special cases. Spack also has tools to add compilers, and
+compilers built with Spack can be added to the configuration.
+
+.. code-block:: console
+
+ $ spack install --fake gcc@6.1.0
+ ==> Installing gcc
+ ==> gmp is already installed in ~/spack/opt/spack/linux-redhat6-x86_64/gcc-4.4.7/gmp-6.1.1-uwn4gfdtq3sywy5uf4f7znrh66oybikf
+ ==> Installing isl
+ ==> gmp is already installed in ~/spack/opt/spack/linux-redhat6-x86_64/gcc-4.4.7/gmp-6.1.1-uwn4gfdtq3sywy5uf4f7znrh66oybikf
+ ==> Building isl [Package]
+ ==> Successfully installed isl
+ Fetch: . Build: 0.19s. Total: 0.19s.
+ [+] ~/spack/opt/spack/linux-redhat6-x86_64/gcc-4.4.7/isl-0.14-hs2w7mjjjaakkmbbv5yvfqf7yyzhorl6
+ ==> Installing mpc
+ ==> gmp is already installed in ~/spack/opt/spack/linux-redhat6-x86_64/gcc-4.4.7/gmp-6.1.1-uwn4gfdtq3sywy5uf4f7znrh66oybikf
+ ==> Installing mpfr
+ ==> gmp is already installed in ~/spack/opt/spack/linux-redhat6-x86_64/gcc-4.4.7/gmp-6.1.1-uwn4gfdtq3sywy5uf4f7znrh66oybikf
+ ==> Building mpfr [Package]
+ ==> Successfully installed mpfr
+ Fetch: . Build: 0.17s. Total: 0.17s.
+ [+] ~/spack/opt/spack/linux-redhat6-x86_64/gcc-4.4.7/mpfr-3.1.4-7kt5ij437khredfq4bvnyu22t3fmtfvt
+ ==> Building mpc [Package]
+ ==> Successfully installed mpc
+ Fetch: . Build: 0.28s. Total: 0.28s.
+ [+] ~/spack/opt/spack/linux-redhat6-x86_64/gcc-4.4.7/mpc-1.0.3-g5taq6lt3zuy5l2jtggi5lctxnl4la5u
+ ==> Installing binutils
+ ==> m4 is already installed in ~/spack/opt/spack/linux-redhat6-x86_64/gcc-4.4.7/m4-1.4.17-qijdzvhjyybrtwbqm73vykhmkaqro3je
+ ==> Installing bison
+ ==> m4 is already installed in ~/spack/opt/spack/linux-redhat6-x86_64/gcc-4.4.7/m4-1.4.17-qijdzvhjyybrtwbqm73vykhmkaqro3je
+ ==> Building bison [Package]
+ ==> Successfully installed bison
+ Fetch: . Build: 0.12s. Total: 0.12s.
+ [+] ~/spack/opt/spack/linux-redhat6-x86_64/gcc-4.4.7/bison-3.0.4-hkhfysfvq5l6rsns67g2htmkpxauvnwa
+ ==> Installing flex
+ ==> m4 is already installed in ~/spack/opt/spack/linux-redhat6-x86_64/gcc-4.4.7/m4-1.4.17-qijdzvhjyybrtwbqm73vykhmkaqro3je
+ ==> bison is already installed in ~/spack/opt/spack/linux-redhat6-x86_64/gcc-4.4.7/bison-3.0.4-hkhfysfvq5l6rsns67g2htmkpxauvnwa
+ ==> Building flex [Package]
+ ==> Successfully installed flex
+ Fetch: . Build: 0.11s. Total: 0.11s.
+ [+] ~/spack/opt/spack/linux-redhat6-x86_64/gcc-4.4.7/flex-2.6.0-qd6d73rdfrozdrsdpimvl4tj7d5ps7qg
+ ==> Building binutils [Package]
+ ==> Successfully installed binutils
+ Fetch: . Build: 0.11s. Total: 0.11s.
+ [+] ~/spack/opt/spack/linux-redhat6-x86_64/gcc-4.4.7/binutils-2.27-iq2hry3gvaxszmwwbnll7njgdgaek56o
+ ==> mpfr is already installed in ~/spack/opt/spack/linux-redhat6-x86_64/gcc-4.4.7/mpfr-3.1.4-7kt5ij437khredfq4bvnyu22t3fmtfvt
+ ==> Building gcc [Package]
+ ==> Successfully installed gcc
+ Fetch: . Build: 0.66s. Total: 0.66s.
+ [+] ~/spack/opt/spack/linux-redhat6-x86_64/gcc-4.4.7/gcc-6.1.0-j5576zbsot2ydljlthjzhsirsesnogvh
+
+
+
+ $ spack find -p gcc
+ ==> 1 installed packages.
+ -- linux-redhat6-x86_64 / gcc@4.4.7 -----------------------------
+ gcc@6.1.0 ~/spack/opt/spack/linux-redhat6-x86_64/gcc-4.4.7/gcc-6.1.0-j5576zbsot2ydljlthjzhsirsesnogvh
+
+
+If we had done a "real" install of gcc, we could add it to our
+configuration now using the `spack compiler add` command, but we would
+also be waiting for it to install. If we run the command now, it will
+return no new compilers.
+
+.. code-block:: console
+
+ $ spack compiler add ~/spack/opt/spack/linux-redhat6-x86_64/gcc-4.4.7/gcc-6.1.0-j5576zbsot2ydljlthjzhsirsesnogvh/bin
+ ==> Found no new compilers
+
+If we had done a real install, the output would have been as follows:
+
+.. code-block:: console
+
+ $ spack compiler add ~/spack/opt/spack/linux-redhat6-x86_64/gcc-4.4.7/gcc-6.1.0-j5576zbsot2ydljlthjzhsirsesnogvh/bin
+ ==> Added 1 new compiler to ~/.spack/linux/compilers.yaml
+ gcc@6.1.0
diff --git a/lib/spack/docs/workflows.rst b/lib/spack/docs/workflows.rst
new file mode 100644
index 0000000000..11b77c008d
--- /dev/null
+++ b/lib/spack/docs/workflows.rst
@@ -0,0 +1,1198 @@
+=========
+Workflows
+=========
+
+The process of using Spack involves building packages, running
+binaries from those packages, and developing software that depends on
+those packages. For example, one might use Spack to build the
+``netcdf`` package, use ``spack load`` to run the ``ncdump`` binary, and
+finally, write a small C program to read/write a particular NetCDF file.
+
+Spack supports a variety of workflows to suit a variety of situations
+and user preferences, there is no single way to do all these things.
+This chapter demonstrates different workflows that have been
+developed, pointing out the pros and cons of them.
+
+-----------
+Definitions
+-----------
+
+First some basic definitions.
+
+^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+Package, Concrete Spec, Installed Package
+^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+In Spack, a package is an abstract recipe to build one piece of software.
+Spack packages may be used to build, in principle, any version of that
+software with any set of variants. Examples of packages include
+``curl`` and ``zlib``.
+
+A package may be *instantiated* to produce a concrete spec; one
+possible realization of a particular package, out of combinatorially
+many other realizations. For example, here is a concrete spec
+instantiated from ``curl``:
+
+.. code-block:: console
+
+ curl@7.50.1%gcc@5.3.0 arch=linux-SuSE11-x86_64
+ ^openssl@system%gcc@5.3.0 arch=linux-SuSE11-x86_64
+ ^zlib@1.2.8%gcc@5.3.0 arch=linux-SuSE11-x86_64
+
+Spack's core concretization algorithm generates concrete specs by
+instantiating packages from its repo, based on a set of "hints",
+including user input and the ``packages.yaml`` file. This algorithm
+may be accessed at any time with the ``spack spec`` command. For
+example:
+
+.. code-block:: console
+
+ $ spack spec curl
+ curl@7.50.1%gcc@5.3.0 arch=linux-SuSE11-x86_64
+ ^openssl@system%gcc@5.3.0 arch=linux-SuSE11-x86_64
+ ^zlib@1.2.8%gcc@5.3.0 arch=linux-SuSE11-x86_64
+
+Every time Spack installs a package, that installation corresponds to
+a concrete spec. Only a vanishingly small fraction of possible
+concrete specs will be installed at any one Spack site.
+
+^^^^^^^^^^^^^^^
+Consistent Sets
+^^^^^^^^^^^^^^^
+
+A set of Spack specs is said to be *consistent* if each package is
+only instantiated one way within it --- that is, if two specs in the
+set have the same package, then they must also have the same version,
+variant, compiler, etc. For example, the following set is consistent:
+
+.. code-block:: console
+
+ curl@7.50.1%gcc@5.3.0 arch=linux-SuSE11-x86_64
+ ^openssl@system%gcc@5.3.0 arch=linux-SuSE11-x86_64
+ ^zlib@1.2.8%gcc@5.3.0 arch=linux-SuSE11-x86_64
+ zlib@1.2.8%gcc@5.3.0 arch=linux-SuSE11-x86_64
+
+The following set is not consistent:
+
+.. code-block:: console
+
+ curl@7.50.1%gcc@5.3.0 arch=linux-SuSE11-x86_64
+ ^openssl@system%gcc@5.3.0 arch=linux-SuSE11-x86_64
+ ^zlib@1.2.8%gcc@5.3.0 arch=linux-SuSE11-x86_64
+ zlib@1.2.7%gcc@5.3.0 arch=linux-SuSE11-x86_64
+
+The compatibility of a set of installed packages determines what may
+be done with it. It is always possible to ``spack load`` any set of
+installed packages, whether or not they are consistent, and run their
+binaries from the command line. However, a set of installed packages
+can only be linked together in one binary if it is consistent.
+
+If the user produces a series of ``spack spec`` or ``spack load``
+commands, in general there is no guarantee of consistency between
+them. Spack's concretization procedure guarantees that the results of
+any *single* ``spack spec`` call will be consistent. Therefore, the
+best way to ensure a consistent set of specs is to create a Spack
+package with dependencies, and then instantiate that package. We will
+use this technique below.
+
+-----------------
+Building Packages
+-----------------
+
+Suppose you are tasked with installing a set of software packages on a
+system in order to support one application -- both a core application
+program, plus software to prepare input and analyze output. The
+required software might be summed up as a series of ``spack install``
+commands placed in a script. If needed, this script can always be run
+again in the future. For example:
+
+.. code-block:: sh
+
+ #!/bin/sh
+ spack install modele-utils
+ spack install emacs
+ spack install ncview
+ spack install nco
+ spack install modele-control
+ spack install py-numpy
+
+In most cases, this script will not correctly install software
+according to your specific needs: choices need to be made for
+variants, versions and virtual dependency choices may be needed. It
+*is* possible to specify these choices by extending specs on the
+command line; however, the same choices must be specified repeatedly.
+For example, if you wish to use ``openmpi`` to satisfy the ``mpi``
+dependency, then ``^openmpi`` will have to appear on *every* ``spack
+install`` line that uses MPI. It can get repetitive fast.
+
+Customizing Spack installation options is easier to do in the
+``~/.spack/packages.yaml`` file. In this file, you can specify
+preferred versions and variants to use for packages. For example:
+
+.. code-block:: yaml
+
+ packages:
+ python:
+ version: [3.5.1]
+ modele-utils:
+ version: [cmake]
+
+ everytrace:
+ version: [develop]
+ eigen:
+ variants: ~suitesparse
+ netcdf:
+ variants: +mpi
+
+ all:
+ compiler: [gcc@5.3.0]
+ providers:
+ mpi: [openmpi]
+ blas: [openblas]
+ lapack: [openblas]
+
+
+This approach will work as long as you are building packages for just
+one application.
+
+^^^^^^^^^^^^^^^^^^^^^
+Multiple Applications
+^^^^^^^^^^^^^^^^^^^^^
+
+Suppose instead you're building multiple inconsistent applications.
+For example, users want package A to be built with ``openmpi`` and
+package B with ``mpich`` --- but still share many other lower-level
+dependencies. In this case, a single ``packages.yaml`` file will not
+work. Plans are to implement *per-project* ``packages.yaml`` files.
+In the meantime, one could write shell scripts to switch
+``packages.yaml`` between multiple versions as needed, using symlinks.
+
+^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+Combinatorial Sets of Installs
+^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+Suppose that you are now tasked with systematically building many
+incompatible versions of packages. For example, you need to build
+``petsc`` 9 times for 3 different MPI implementations on 3 different
+compilers, in order to support user needs. In this case, you will
+need to either create 9 different ``packages.yaml`` files; or more
+likely, create 9 different ``spack install`` command lines with the
+correct options in the spec. Here is a real-life example of this kind
+of usage:
+
+.. code-block:: sh
+
+ #!/bin/sh
+
+ compilers=(
+ %gcc
+ %intel
+ %pgi
+ )
+
+ mpis=(
+ openmpi+psm~verbs
+ openmpi~psm+verbs
+ mvapich2+psm~mrail
+ mvapich2~psm+mrail
+ mpich+verbs
+ )
+
+ for compiler in "${compilers[@]}"
+ do
+ # Serial installs
+ spack install szip $compiler
+ spack install hdf $compiler
+ spack install hdf5 $compiler
+ spack install netcdf $compiler
+ spack install netcdf-fortran $compiler
+ spack install ncview $compiler
+
+ # Parallel installs
+ for mpi in "${mpis[@]}"
+ do
+ spack install $mpi $compiler
+ spack install hdf5~cxx+mpi $compiler ^$mpi
+ spack install parallel-netcdf $compiler ^$mpi
+ done
+ done
+
+------------------------------
+Running Binaries from Packages
+------------------------------
+
+Once Spack packages have been built, the next step is to use them. As
+with building packages, there are many ways to use them, depending on
+the use case.
+
+^^^^^^^^^^^^
+Find and Run
+^^^^^^^^^^^^
+
+The simplest way to run a Spack binary is to find it and run it!
+In many cases, nothing more is needed because Spack builds binaries
+with RPATHs. Spack installation directories may be found with ``spack
+location --install-dir`` commands. For example:
+
+.. code-block:: console
+
+ $ spack location --install-dir cmake
+ ~/spack/opt/spack/linux-SuSE11-x86_64/gcc-5.3.0/cmake-3.6.0-7cxrynb6esss6jognj23ak55fgxkwtx7
+
+This gives the root of the Spack package; relevant binaries may be
+found within it. For example:
+
+.. code-block:: console
+
+ $ CMAKE=`spack location --install-dir cmake`/bin/cmake
+
+
+Standard UNIX tools can find binaries as well. For example:
+
+.. code-block:: console
+
+ $ find ~/spack/opt -name cmake | grep bin
+ ~/spack/opt/spack/linux-SuSE11-x86_64/gcc-5.3.0/cmake-3.6.0-7cxrynb6esss6jognj23ak55fgxkwtx7/bin/cmake
+
+These methods are suitable, for example, for setting up build
+processes or GUIs that need to know the location of particular tools.
+However, other more powerful methods are generally preferred for user
+environments.
+
+
+^^^^^^^^^^^^^^^^^^^^^^^
+Spack-Generated Modules
+^^^^^^^^^^^^^^^^^^^^^^^
+
+Suppose that Spack has been used to install a set of command-line
+programs, which users now wish to use. One can in principle put a
+number of ``spack load`` commands into ``.bashrc``, for example, to
+load a set of Spack-generated modules:
+
+.. code-block:: sh
+
+ spack load modele-utils
+ spack load emacs
+ spack load ncview
+ spack load nco
+ spack load modele-control
+
+Although simple load scripts like this are useful in many cases, they
+have some drawbacks:
+
+1. The set of modules loaded by them will in general not be
+ consistent. They are a decent way to load commands to be called
+ from command shells. See below for better ways to assemble a
+ consistent set of packages for building application programs.
+
+2. The ``spack spec`` and ``spack install`` commands use a
+ sophisticated concretization algorithm that chooses the "best"
+ among several options, taking into account ``packages.yaml`` file.
+ The ``spack load`` and ``spack module loads`` commands, on the
+ other hand, are not very smart: if the user-supplied spec matches
+ more than one installed package, then ``spack module loads`` will
+ fail. This may change in the future. For now, the workaround is to
+ be more specific on any ``spack module loads`` lines that fail.
+
+
+""""""""""""""""""""""
+Generated Load Scripts
+""""""""""""""""""""""
+
+Another problem with using `spack load` is, it is slow; a typical user
+environment could take several seconds to load, and would not be
+appropriate to put into ``.bashrc`` directly. It is preferable to use
+a series of ``spack module loads`` commands to pre-compute which
+modules to load. These can be put in a script that is run whenever
+installed Spack packages change. For example:
+
+.. code-block:: sh
+
+ #!/bin/sh
+ #
+ # Generate module load commands in ~/env/spackenv
+
+ cat <<EOF | /bin/sh >$HOME/env/spackenv
+ FIND='spack module loads --prefix linux-SuSE11-x86_64/'
+
+ \$FIND modele-utils
+ \$FIND emacs
+ \$FIND ncview
+ \$FIND nco
+ \$FIND modele-control
+ EOF
+
+The output of this file is written in ``~/env/spackenv``:
+
+.. code-block:: sh
+
+ # binutils@2.25%gcc@5.3.0+gold~krellpatch~libiberty arch=linux-SuSE11-x86_64
+ module load linux-SuSE11-x86_64/binutils-2.25-gcc-5.3.0-6w5d2t4
+ # python@2.7.12%gcc@5.3.0~tk~ucs4 arch=linux-SuSE11-x86_64
+ module load linux-SuSE11-x86_64/python-2.7.12-gcc-5.3.0-2azoju2
+ # ncview@2.1.7%gcc@5.3.0 arch=linux-SuSE11-x86_64
+ module load linux-SuSE11-x86_64/ncview-2.1.7-gcc-5.3.0-uw3knq2
+ # nco@4.5.5%gcc@5.3.0 arch=linux-SuSE11-x86_64
+ module load linux-SuSE11-x86_64/nco-4.5.5-gcc-5.3.0-7aqmimu
+ # modele-control@develop%gcc@5.3.0 arch=linux-SuSE11-x86_64
+ module load linux-SuSE11-x86_64/modele-control-develop-gcc-5.3.0-7rddsij
+ # zlib@1.2.8%gcc@5.3.0 arch=linux-SuSE11-x86_64
+ module load linux-SuSE11-x86_64/zlib-1.2.8-gcc-5.3.0-fe5onbi
+ # curl@7.50.1%gcc@5.3.0 arch=linux-SuSE11-x86_64
+ module load linux-SuSE11-x86_64/curl-7.50.1-gcc-5.3.0-4vlev55
+ # hdf5@1.10.0-patch1%gcc@5.3.0+cxx~debug+fortran+mpi+shared~szip~threadsafe arch=linux-SuSE11-x86_64
+ module load linux-SuSE11-x86_64/hdf5-1.10.0-patch1-gcc-5.3.0-pwnsr4w
+ # netcdf@4.4.1%gcc@5.3.0~hdf4+mpi arch=linux-SuSE11-x86_64
+ module load linux-SuSE11-x86_64/netcdf-4.4.1-gcc-5.3.0-rl5canv
+ # netcdf-fortran@4.4.4%gcc@5.3.0 arch=linux-SuSE11-x86_64
+ module load linux-SuSE11-x86_64/netcdf-fortran-4.4.4-gcc-5.3.0-stdk2xq
+ # modele-utils@cmake%gcc@5.3.0+aux+diags+ic arch=linux-SuSE11-x86_64
+ module load linux-SuSE11-x86_64/modele-utils-cmake-gcc-5.3.0-idyjul5
+ # everytrace@develop%gcc@5.3.0+fortran+mpi arch=linux-SuSE11-x86_64
+ module load linux-SuSE11-x86_64/everytrace-develop-gcc-5.3.0-p5wmb25
+
+Users may now put ``source ~/env/spackenv`` into ``.bashrc``.
+
+.. note ::
+
+ Some module systems put a prefix on the names of modules created
+ by Spack. For example, that prefix is ``linux-SuSE11-x86_64/`` in
+ the above case. If a prefix is not needed, you may omit the
+ ``--prefix`` flag from ``spack module loads``.
+
+
+"""""""""""""""""""""""
+Transitive Dependencies
+"""""""""""""""""""""""
+
+In the script above, each ``spack module loads`` command generates a
+*single* ``module load`` line. Transitive dependencies do not usually
+need to be loaded, only modules the user needs in in ``$PATH``. This is
+because Spack builds binaries with RPATH. Spack's RPATH policy has
+some nice features:
+
+#. Modules for multiple inconsistent applications may be loaded
+ simultaneously. In the above example (Multiple Applications),
+ package A and package B can coexist together in the user's $PATH,
+ even though they use different MPIs.
+
+#. RPATH eliminates a whole class of strange errors that can happen
+ in non-RPATH binaries when the wrong ``LD_LIBRARY_PATH`` is
+ loaded.
+
+#. Recursive module systems such as LMod are not necessary.
+
+#. Modules are not needed at all to execute binaries. If a path to a
+ binary is known, it may be executed. For example, the path for a
+ Spack-built compiler can be given to an IDE without requiring the
+ IDE to load that compiler's module.
+
+Unfortunately, Spack's RPATH support does not work in all case. For example:
+
+#. Software comes in many forms --- not just compiled ELF binaries,
+ but also as interpreted code in Python, R, JVM bytecode, etc.
+ Those systems almost universally use an environment variable
+ analogous to ``LD_LIBRARY_PATH`` to dynamically load libraries.
+
+#. Although Spack generally builds binaries with RPATH, it does not
+ currently do so for compiled Python extensions (for example,
+ ``py-numpy``). Any libraries that these extensions depend on
+ (``blas`` in this case, for example) must be specified in the
+ ``LD_LIBRARY_PATH``.`
+
+#. In some cases, Spack-generated binaries end up without a
+ functional RPATH for no discernible reason.
+
+In cases where RPATH support doesn't make things "just work," it can
+be necessary to load a module's dependencies as well as the module
+itself. This is done by adding the ``--dependencies`` flag to the
+``spack module loads`` command. For example, the following line,
+added to the script above, would be used to load SciPy, along with
+Numpy, core Python, BLAS/LAPACK and anything else needed:
+
+.. code-block:: sh
+
+ spack module loads --dependencies py-scipy
+
+^^^^^^^^^^^^^^^^^^
+Extension Packages
+^^^^^^^^^^^^^^^^^^
+
+:ref:`packaging_extensions` may be used as an alternative to loading
+Python (and similar systems) packages directly. If extensions are
+activated, then ``spack load python`` will also load all the
+extensions activated for the given ``python``. This reduces the need
+for users to load a large number of modules.
+
+However, Spack extensions have two potential drawbacks:
+
+#. Activated packages that involve compiled C extensions may still
+ need their dependencies to be loaded manually. For example,
+ ``spack load openblas`` might be required to make ``py-numpy``
+ work.
+
+#. Extensions "break" a core feature of Spack, which is that multiple
+ versions of a package can co-exist side-by-side. For example,
+ suppose you wish to run a Python package in two different
+ environments but the same basic Python --- one with
+ ``py-numpy@1.7`` and one with ``py-numpy@1.8``. Spack extensions
+ will not support this potential debugging use case.
+
+
+^^^^^^^^^^^^^^
+Dummy Packages
+^^^^^^^^^^^^^^
+
+As an alternative to a series of ``module load`` commands, one might
+consider dummy packages as a way to create a *consistent* set of
+packages that may be loaded as one unit. The idea here is pretty
+simple:
+
+#. Create a package (say, ``mydummy``) with no URL and no
+ ``install()`` method, just dependencies.
+
+#. Run ``spack install mydummy`` to install.
+
+An advantage of this method is the set of packages produced will be
+consistent. This means that you can reliably build software against
+it. A disadvantage is the set of packages will be consistent; this
+means you cannot load up two applications this way if they are not
+consistent with each other.
+
+^^^^^^^^^^^^^^^^
+Filesystem Views
+^^^^^^^^^^^^^^^^
+
+Filesystem views offer an alternative to environment modules, another
+way to assemble packages in a useful way and load them into a user's
+environment.
+
+A filesystem view is a single directory tree that is the union of the
+directory hierarchies of a number of installed packages; it is similar
+to the directory hiearchy that might exist under ``/usr/local``. The
+files of the view's installed packages are brought into the view by
+symbolic or hard links, referencing the original Spack installation.
+
+When software is built and installed, absolute paths are frequently
+"baked into" the software, making it non-relocatable. This happens
+not just in RPATHs, but also in shebangs, configuration files, and
+assorted other locations.
+
+Therefore, programs run out of a Spack view will typically still look
+in the original Spack-installed location for shared libraries and
+other resources. This behavior is not easily changed; in general,
+there is no way to know where absolute paths might be written into an
+installed package, and how to relocate it. Therefore, the original
+Spack tree must be kept in place for a filesystem view to work, even
+if the view is built with hardlinks.
+
+.. FIXME: reference the relocation work of Hegner and Gartung (PR #1013)
+
+
+""""""""""""""""""""""
+Using Filesystem Views
+""""""""""""""""""""""
+
+A filesystem view is created, and packages are linked in, by the ``spack
+view`` command's ``symlink`` and ``hardlink`` sub-commands. The
+``spack view remove`` command can be used to unlink some or all of the
+filesystem view.
+
+The following example creates a filesystem view based
+on an installed ``cmake`` package and then removes from the view the
+files in the ``cmake`` package while retaining its dependencies.
+
+.. code-block:: console
+
+ $ spack view --verbose symlink myview cmake@3.5.2
+ ==> Linking package: "ncurses"
+ ==> Linking package: "zlib"
+ ==> Linking package: "openssl"
+ ==> Linking package: "cmake"
+
+ $ ls myview/
+ bin doc etc include lib share
+
+ $ ls myview/bin/
+ captoinfo clear cpack ctest infotocap openssl tabs toe tset
+ ccmake cmake c_rehash infocmp ncurses6-config reset tic tput
+
+ $ spack view --verbose --dependencies false rm myview cmake@3.5.2
+ ==> Removing package: "cmake"
+
+ $ ls myview/bin/
+ captoinfo c_rehash infotocap openssl tabs toe tset
+ clear infocmp ncurses6-config reset tic tput
+
+.. note::
+
+ If the set of packages being included in a view is inconsistent,
+ then it is possible that two packages will provide the same file. Any
+ conflicts of this type are handled on a first-come-first-served basis,
+ and a warning is printed.
+
+.. note::
+
+ When packages are removed from a view, empty directories are
+ purged.
+
+""""""""""""""""""
+Fine-Grain Control
+""""""""""""""""""
+
+The ``--exclude`` and ``--dependencies`` option flags allow for
+fine-grained control over which packages and dependencies do or not
+get included in a view. For example, suppose you are developing the
+``appsy`` package. You wish to build against a view of all ``appsy``
+dependencies, but not ``appsy`` itself:
+
+.. code-block:: console
+
+ $ spack view symlink --dependencies yes --exclude appsy appsy
+
+Alternately, you wish to create a view whose purpose is to provide
+binary executables to end users. You only need to include
+applications they might want, and not those applications'
+dependencies. In this case, you might use:
+
+.. code-block:: console
+
+ $ spack view symlink --dependencies no cmake
+
+
+"""""""""""""""""""""""
+Hybrid Filesystem Views
+"""""""""""""""""""""""
+
+Although filesystem views are usually created by Spack, users are free
+to add to them by other means. For example, imagine a filesystem
+view, created by Spack, that looks something like:
+
+.. code-block:: console
+
+ /path/to/MYVIEW/bin/programA -> /path/to/spack/.../bin/programA
+ /path/to/MYVIEW/lib/libA.so -> /path/to/spack/.../lib/libA.so
+
+Now, the user may add to this view by non-Spack means; for example, by
+running a classic install script. For example:
+
+.. code-block:: console
+
+ $ tar -xf B.tar.gz
+ $ cd B/
+ $ ./configure --prefix=/path/to/MYVIEW \
+ --with-A=/path/to/MYVIEW
+ $ make && make install
+
+The result is a hybrid view:
+
+.. code-block:: console
+
+ /path/to/MYVIEW/bin/programA -> /path/to/spack/.../bin/programA
+ /path/to/MYVIEW/bin/programB
+ /path/to/MYVIEW/lib/libA.so -> /path/to/spack/.../lib/libA.so
+ /path/to/MYVIEW/lib/libB.so
+
+In this case, real files coexist, interleaved with the "view"
+symlinks. At any time one can delete ``/path/to/MYVIEW`` or use
+``spack view`` to manage it surgically. None of this will affect the
+real Spack install area.
+
+
+^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+Discussion: Running Binaries
+^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+Modules, extension packages and filesystem views are all ways to
+assemble sets of Spack packages into a useful environment. They are
+all semantically similar, in that conflicting installed packages
+cannot simultaneously be loaded, activated or included in a view.
+
+With all of these approaches, there is no guarantee that the
+environment created will be consistent. It is possible, for example,
+to simultaneously load application A that uses OpenMPI and application
+B that uses MPICH. Both applications will run just fine in this
+inconsistent environment because they rely on RPATHs, not the
+environment, to find their dependencies.
+
+In general, environments set up using modules vs. views will work
+similarly. Both can be used to set up ephemeral or long-lived
+testing/development environments. Operational differences between the
+two approaches can make one or the other preferable in certain
+environments:
+
+* Filesystem views do not require environment module infrastructure.
+ Although Spack can install ``environment-modules``, users might be
+ hostile to its use. Filesystem views offer a good solution for
+ sysadmins serving users who just "want all the stuff I need in one
+ place" and don't want to hear about Spack.
+
+* Although modern build systems will find dependencies wherever they
+ might be, some applications with hand-built make files expect their
+ dependencies to be in one place. One common problem is makefiles
+ that assume that ``netcdf`` and ``netcdf-fortran`` are installed in
+ the same tree. Or, one might use an IDE that requires tedious
+ configuration of dependency paths; and it's easier to automate that
+ administration in a view-building script than in the IDE itself.
+ For all these cases, a view will be preferable to other ways to
+ assemble an environment.
+
+* On systems with I-node quotas, modules might be preferable to views
+ and extension packages.
+
+* Views and activated extensions maintain state that is semantically
+ equivalent to the information in a ``spack module loads`` script.
+ Administrators might find things easier to maintain without the
+ added "heavyweight" state of a view.
+
+------------------------------
+Developing Software with Spack
+------------------------------
+
+For any project, one needs to assemble an
+environment of that application's dependencies. You might consider
+loading a series of modules or creating a filesystem view. This
+approach, while obvious, has some serious drawbacks:
+
+1. There is no guarantee that an environment created this way will be
+ consistent. Your application could end up with dependency A
+ expecting one version of MPI, and dependency B expecting another.
+ The linker will not be happy...
+
+2. Suppose you need to debug a package deep within your software DAG.
+ If you build that package with a manual environment, then it
+ becomes difficult to have Spack auto-build things that depend on
+ it. That could be a serious problem, depending on how deep the
+ package in question is in your dependency DAG.
+
+3. At its core, Spack is a sophisticated concretization algorithm that
+ matches up packages with appropriate dependencies and creates a
+ *consistent* environment for the package it's building. Writing a
+ list of ``spack load`` commands for your dependencies is at least
+ as hard as writing the same list of ``depends_on()`` declarations
+ in a Spack package. But it makes no use of Spack concretization
+ and is more error-prone.
+
+4. Spack provides an automated, systematic way not just to find a
+ packages's dependencies --- but also to build other packages on
+ top. Any Spack package can become a dependency for another Spack
+ package, offering a powerful vision of software re-use. If you
+ build your package A outside of Spack, then your ability to use it
+ as a building block for other packages in an automated way is
+ diminished: other packages depending on package A will not
+ be able to use Spack to fulfill that dependency.
+
+5. If you are reading this manual, you probably love Spack. You're
+ probably going to write a Spack package for your software so
+ prospective users can install it with the least amount of pain.
+ Why should you go to additional work to find dependencies in your
+ development environment? Shouldn't Spack be able to help you build
+ your software based on the package you've already written?
+
+In this section, we show how Spack can be used in the software
+development process to greatest effect, and how development packages
+can be seamlessly integrated into the Spack ecosystem. We will show
+how this process works by example, assuming the software you are
+creating is called ``mylib``.
+
+^^^^^^^^^^^^^^^^^^^^^
+Write the CMake Build
+^^^^^^^^^^^^^^^^^^^^^
+
+For now, the techniques in this section only work for CMake-based
+projects, although they could be easily extended to other build
+systems in the future. We will therefore assume you are using CMake
+to build your project.
+
+The ``CMakeLists.txt`` file should be written as normal. A few caveats:
+
+1. Your project should produce binaries with RPATHs. This will ensure
+ that they work the same whether built manually or automatically by
+ Spack. For example:
+
+.. code-block:: cmake
+
+ # enable @rpath in the install name for any shared library being built
+ # note: it is planned that a future version of CMake will enable this by default
+ set(CMAKE_MACOSX_RPATH 1)
+
+ # Always use full RPATH
+ # http://www.cmake.org/Wiki/CMake_RPATH_handling
+ # http://www.kitware.com/blog/home/post/510
+
+ # use, i.e. don't skip the full RPATH for the build tree
+ SET(CMAKE_SKIP_BUILD_RPATH FALSE)
+
+ # when building, don't use the install RPATH already
+ # (but later on when installing)
+ SET(CMAKE_BUILD_WITH_INSTALL_RPATH FALSE)
+
+ # add the automatically determined parts of the RPATH
+ # which point to directories outside the build tree to the install RPATH
+ SET(CMAKE_INSTALL_RPATH_USE_LINK_PATH TRUE)
+
+ # the RPATH to be used when installing, but only if it's not a system directory
+ LIST(FIND CMAKE_PLATFORM_IMPLICIT_LINK_DIRECTORIES "${CMAKE_INSTALL_PREFIX}/lib" isSystemDir)
+ IF("${isSystemDir}" STREQUAL "-1")
+ SET(CMAKE_INSTALL_RPATH "${CMAKE_INSTALL_PREFIX}/lib")
+ ENDIF("${isSystemDir}" STREQUAL "-1")
+
+
+2. Spack provides a CMake variable called
+ ``SPACK_TRANSITIVE_INCLUDE_PATH``, which contains the ``include/``
+ directory for all of your project's transitive dependencies. It
+ can be useful if your project ``#include``s files from package B,
+ which ``#include`` files from package C, but your project only
+ lists project B as a dependency. This works in traditional
+ single-tree build environments, in which B and C's include files
+ live in the same place. In order to make it work with Spack as
+ well, you must add the following to ``CMakeLists.txt``. It will
+ have no effect when building without Spack:
+
+ .. code-block:: cmake
+
+ # Include all the transitive dependencies determined by Spack.
+ # If we're not running with Spack, this does nothing...
+ include_directories($ENV{SPACK_TRANSITIVE_INCLUDE_PATH})
+
+ .. note::
+
+ Note that this feature is controversial and could break with
+ future versions of GNU ld. The best practice is to make sure
+ anything you ``#include`` is listed as a dependency in your
+ CMakeLists.txt (and Spack package).
+
+.. _write-the-spack-package:
+
+^^^^^^^^^^^^^^^^^^^^^^^
+Write the Spack Package
+^^^^^^^^^^^^^^^^^^^^^^^
+
+The Spack package also needs to be written, in tandem with setting up
+the build (for example, CMake). The most important part of this task
+is declaring dependencies. Here is an example of the Spack package
+for the ``mylib`` package (ellipses for brevity):
+
+.. code-block:: python
+
+ class Mylib(CMakePackage):
+ """Misc. reusable utilities used by Myapp."""
+
+ homepage = "https://github.com/citibeth/mylib"
+ url = "https://github.com/citibeth/mylib/tarball/123"
+
+ version('0.1.2', '3a6acd70085e25f81b63a7e96c504ef9')
+ version('develop', git='https://github.com/citibeth/mylib.git',
+ branch='develop')
+
+ variant('everytrace', default=False,
+ description='Report errors through Everytrace')
+ ...
+
+ extends('python')
+
+ depends_on('eigen')
+ depends_on('everytrace', when='+everytrace')
+ depends_on('proj', when='+proj')
+ ...
+ depends_on('cmake', type='build')
+ depends_on('doxygen', type='build')
+
+ def configure_args(self):
+ spec = self.spec
+ return [
+ '-DUSE_EVERYTRACE=%s' % ('YES' if '+everytrace' in spec else 'NO'),
+ '-DUSE_PROJ4=%s' % ('YES' if '+proj' in spec else 'NO'),
+ ...
+ '-DUSE_UDUNITS2=%s' % ('YES' if '+udunits2' in spec else 'NO'),
+ '-DUSE_GTEST=%s' % ('YES' if '+googletest' in spec else 'NO')]
+
+This is a standard Spack package that can be used to install
+``mylib`` in a production environment. The list of dependencies in
+the Spack package will generally be a repeat of the list of CMake
+dependencies. This package also has some features that allow it to be
+used for development:
+
+1. It subclasses ``CMakePackage`` instead of ``Package``. This
+ eliminates the need to write an ``install()`` method, which is
+ defined in the superclass. Instead, one just needs to write the
+ ``configure_args()`` method. That method should return the
+ arguments needed for the ``cmake`` command (beyond the standard
+ CMake arguments, which Spack will include already). These
+ arguments are typically used to turn features on/off in the build.
+
+2. It specifies a non-checksummed version ``develop``. Running
+ ``spack install mylib@develop`` the ``@develop`` version will
+ install the latest version off the develop branch. This method of
+ download is useful for the developer of a project while it is in
+ active development; however, it should only be used by developers
+ who control and trust the repository in question!
+
+3. The ``url``, ``url_for_version()`` and ``homepage`` attributes are
+ not used in development. Don't worry if you don't have any, or if
+ they are behind a firewall.
+
+^^^^^^^^^^^^^^^^
+Build with Spack
+^^^^^^^^^^^^^^^^
+
+Now that you have a Spack package, you can use Spack to find its
+dependencies automatically. For example:
+
+.. code-block:: console
+
+ $ cd mylib
+ $ spack setup mylib@local
+
+The result will be a file ``spconfig.py`` in the top-level
+``mylib/`` directory. It is a short script that calls CMake with the
+dependencies and options determined by Spack --- similar to what
+happens in ``spack install``, but now written out in script form.
+From a developer's point of view, you can think of ``spconfig.py`` as
+a stand-in for the ``cmake`` command.
+
+.. note::
+
+ You can invent any "version" you like for the ``spack setup``
+ command.
+
+.. note::
+
+ Although ``spack setup`` does not build your package, it does
+ create and install a module file, and mark in the database that
+ your package has been installed. This can lead to errors, of
+ course, if you don't subsequently install your package.
+ Also... you will need to ``spack uninstall`` before you run
+ ``spack setup`` again.
+
+
+You can now build your project as usual with CMake:
+
+.. code-block:: console
+
+ $ mkdir build; cd build
+ $ ../spconfig.py .. # Instead of cmake ..
+ $ make
+ $ make install
+
+Once your ``make install`` command is complete, your package will be
+installed, just as if you'd run ``spack install``. Except you can now
+edit, re-build and re-install as often as needed, without checking
+into Git or downloading tarballs.
+
+.. note::
+
+ The build you get this way will be *almost* the same as the build
+ from ``spack install``. The only difference is, you will not be
+ using Spack's compiler wrappers. This difference has not caused
+ problems in our experience, as long as your project sets
+ RPATHs as shown above. You DO use RPATHs, right?
+
+^^^^^^^^^^^^^^^^^^^^
+Build Other Software
+^^^^^^^^^^^^^^^^^^^^
+
+Now that you've built ``mylib`` with Spack, you might want to build
+another package that depends on it --- for example, ``myapp``. This
+is accomplished easily enough:
+
+.. code-block:: console
+
+ $ spack install myapp ^mylib@local
+
+Note that auto-built software has now been installed *on top of*
+manually-built software, without breaking Spack's "web." This
+property is useful if you need to debug a package deep in the
+dependency hierarchy of your application. It is a *big* advantage of
+using ``spack setup`` to build your package's environment.
+
+If you feel your software is stable, you might wish to install it with
+``spack install`` and skip the source directory. You can just use,
+for example:
+
+.. code-block:: console
+
+ $ spack install mylib@develop
+
+.. _release-your-software:
+
+^^^^^^^^^^^^^^^^^^^^^
+Release Your Software
+^^^^^^^^^^^^^^^^^^^^^
+
+You are now ready to release your software as a tarball with a
+numbered version, and a Spack package that can build it. If you're
+hosted on GitHub, this process will be a bit easier.
+
+#. Put tag(s) on the version(s) in your GitHub repo you want to be
+ release versions. For example, a tag ``v0.1.0`` for version 0.1.0.
+
+#. Set the ``url`` in your ``package.py`` to download a tarball for
+ the appropriate version. GitHub will give you a tarball for any
+ commit in the repo, if you tickle it the right way. For example:
+
+ .. code-block:: python
+
+ url = 'https://github.com/citibeth/mylib/tarball/v0.1.2'
+
+#. Use Spack to determine your version's hash, and cut'n'paste it into
+ your ``package.py``:
+
+ .. code-block:: console
+
+ $ spack checksum mylib 0.1.2
+ ==> Found 1 versions of mylib
+ 0.1.2 https://github.com/citibeth/mylib/tarball/v0.1.2
+
+ How many would you like to checksum? (default is 5, q to abort)
+ ==> Downloading...
+ ==> Trying to fetch from https://github.com/citibeth/mylib/tarball/v0.1.2
+ ######################################################################## 100.0%
+ ==> Checksummed new versions of mylib:
+ version('0.1.2', '3a6acd70085e25f81b63a7e96c504ef9')
+
+#. You should now be able to install released version 0.1.2 of your package with:
+
+ .. code-block:: console
+
+ $ spack install mylib@0.1.2
+
+#. There is no need to remove the `develop` version from your package.
+ Spack concretization will always prefer numbered version to
+ non-numeric versions. Users will only get it if they ask for it.
+
+^^^^^^^^^^^^^^^^^^^^^^^^
+Distribute Your Software
+^^^^^^^^^^^^^^^^^^^^^^^^
+
+Once you've released your software, other people will want to build
+it; and you will need to tell them how. In the past, that has meant a
+few paragraphs of prose explaining which dependencies to install. But
+now you use Spack, and those instructions are written in executable
+Python code. But your software has many dependencies, and you know
+Spack is the best way to install it:
+
+#. First, you will want to fork Spack's ``develop`` branch. Your aim
+ is to provide a stable version of Spack that you KNOW will install
+ your software. If you make changes to Spack in the process, you
+ will want to submit pull requests to Spack core.
+
+#. Add your software's ``package.py`` to that fork. You should submit
+ a pull request for this as well, unless you don't want the public
+ to know about your software.
+
+#. Prepare instructions that read approximately as follows:
+
+ #. Download Spack from your forked repo.
+
+ #. Install Spack; see :ref:`getting_started`.
+
+ #. Set up an appropriate ``packages.yaml`` file. You should tell
+ your users to include in this file whatever versions/variants
+ are needed to make your software work correctly (assuming those
+ are not already in your ``packages.yaml``).
+
+ #. Run ``spack install mylib``.
+
+ #. Run this script to generate the ``module load`` commands or
+ filesystem view needed to use this software.
+
+#. Be aware that your users might encounter unexpected bootstrapping
+ issues on their machines, especially if they are running on older
+ systems. The :ref:`getting_started` section should cover this, but
+ there could always be issues.
+
+^^^^^^^^^^^^^^^^^^^
+Other Build Systems
+^^^^^^^^^^^^^^^^^^^
+
+``spack setup`` currently only supports CMake-based builds, in
+packages that subclass ``CMakePackage``. The intent is that this
+mechanism should support a wider range of build systems; for example,
+GNU Autotools. Someone well-versed in Autotools is needed to develop
+this patch and test it out.
+
+Python Distutils is another popular build system that should get
+``spack setup`` support. For non-compiled languages like Python,
+``spack diy`` may be used. Even better is to put the source directory
+directly in the user's ``PYTHONPATH``. Then, edits in source files
+are immediately available to run without any install process at all!
+
+^^^^^^^^^^
+Conclusion
+^^^^^^^^^^
+
+The ``spack setup`` development workflow provides better automation,
+flexibility and safety than workflows relying on environment modules
+or filesystem views. However, it has some drawbacks:
+
+#. It currently works only with projects that use the CMake build
+ system. Support for other build systems is not hard to build, but
+ will require a small amount of effort for each build system to be
+ supported. It might not work well with some IDEs.
+
+#. It only works with packages that sub-class ``StagedPackage``.
+ Currently, most Spack packages do not. Converting them is not
+ hard; but must be done on a package-by-package basis.
+
+#. It requires that users are comfortable with Spack, as they
+ integrate Spack explicitly in their workflow. Not all users are
+ willing to do this.
+
+------------------
+Upstream Bug Fixes
+------------------
+
+It is not uncommon to discover a bug in an upstream project while
+trying to build with Spack. Typically, the bug is in a package that
+serves a dependency to something else. This section describes
+procedure to work around and ultimately resolve these bugs, while not
+delaying the Spack user's main goal.
+
+^^^^^^^^^^^^^^^^^
+Buggy New Version
+^^^^^^^^^^^^^^^^^
+
+Sometimes, the old version of a package works fine, but a new version
+is buggy. For example, it was once found that `Adios did not build
+with hdf5@1.10 <https://github.com/LLNL/spack/issues/1683>`_. If the
+old version of ``hdf5`` will work with ``adios``, the suggested
+procedure is:
+
+#. Revert ``adios`` to the old version of ``hdf5``. Put in its
+ ``adios/package.py``:
+
+ .. code-block:: python
+
+ # Adios does not build with HDF5 1.10
+ # See: https://github.com/LLNL/spack/issues/1683
+ depends_on('hdf5@:1.9')
+
+#. Determine whether the problem is with ``hdf5`` or ``adios``, and
+ report the problem to the appropriate upstream project. In this
+ case, the problem was with ``adios``.
+
+#. Once a new version of ``adios`` comes out with the bugfix, modify
+ ``adios/package.py`` to reflect it:
+
+ .. code-block:: python
+
+ # Adios up to v1.10.0 does not build with HDF5 1.10
+ # See: https://github.com/LLNL/spack/issues/1683
+ depends_on('hdf5@:1.9', when='@:1.10.0')
+ depends_on('hdf5', when='@1.10.1:')
+
+^^^^^^^^^^^^^^^^
+No Version Works
+^^^^^^^^^^^^^^^^
+
+Sometimes, *no* existing versions of a dependency work for a build.
+This typically happens when developing a new project: only then does
+the developer notice that existing versions of a dependency are all
+buggy, or the non-buggy versions are all missing a critical feature.
+
+In the long run, the upstream project will hopefully fix the bug and
+release a new version. But that could take a while, even if a bugfix
+has already been pushed to the project's repository. In the meantime,
+the Spack user needs things to work.
+
+The solution is to create an unofficial Spack release of the project,
+as soon as the bug is fixed in *some* repository. A study of the `Git
+history <https://github.com/citibeth/spack/commits/efischer/develop/var/spack/repos/builtin/packages/py-proj/package.py>`_
+of ``py-proj/package.py`` is instructive here:
+
+#. On `April 1 <https://github.com/citibeth/spack/commit/44a1d6a96706affe6ef0a11c3a780b91d21d105a>`_, an initial bugfix was identified for the PyProj project
+ and a pull request submitted to PyProj. Because the upstream
+ authors had not yet fixed the bug, the ``py-proj`` Spack package
+ downloads from a forked repository, set up by the package's author.
+ A non-numeric version number is used to make it easy to upgrade the
+ package without recomputing checksums; however, this is an
+ untrusted download method and should not be distributed. The
+ package author has now become, temporarily, a maintainer of the
+ upstream project:
+
+ .. code-block:: python
+
+ # We need the benefits of this PR
+ # https://github.com/jswhit/pyproj/pull/54
+ version('citibeth-latlong2',
+ git='https://github.com/citibeth/pyproj.git',
+ branch='latlong2')
+
+
+#. By May 14, the upstream project had accepted a pull request with
+ the required bugfix. At this point, the forked repository was
+ deleted. However, the upstream project still had not released a
+ new version with a bugfix. Therefore, a Spack-only release was
+ created by specifying the desired hash in the main project
+ repository. The version number ``@1.9.5.1.1`` was chosen for this
+ "release" because it's a descendent of the officially released
+ version ``@1.9.5.1``. This is a trusted download method, and can
+ be released to the Spack community:
+
+ .. code-block:: python
+
+ # This is not a tagged release of pyproj.
+ # The changes in this "version" fix some bugs, especially with Python3 use.
+ version('1.9.5.1.1', 'd035e4bc704d136db79b43ab371b27d2',
+ url='https://www.github.com/jswhit/pyproj/tarball/0be612cc9f972e38b50a90c946a9b353e2ab140f')
+
+ .. note::
+
+ It would have been simpler to use Spack's Git download method,
+ which is also a trusted download in this case:
+
+ .. code-block:: python
+
+ # This is not a tagged release of pyproj.
+ # The changes in this "version" fix some bugs, especially with Python3 use.
+ version('1.9.5.1.1',
+ git='https://github.com/jswhit/pyproj.git',
+ commit='0be612cc9f972e38b50a90c946a9b353e2ab140f')
+
+ .. note::
+
+ In this case, the upstream project fixed the bug in its
+ repository in a relatively timely manner. If that had not been
+ the case, the numbered version in this step could have been
+ released from the forked repository.
+
+
+#. The author of the Spack package has now become an unofficial
+ release engineer for the upstream project. Depending on the
+ situation, it may be advisable to put ``preferred=True`` on the
+ latest *officially released* version.
+
+#. As of August 31, the upstream project still had not made a new
+ release with the bugfix. In the meantime, Spack-built ``py-proj``
+ provides the bugfix needed by packages depending on it. As long as
+ this works, there is no particular need for the upstream project to
+ make a new official release.
+
+#. If the upstream project releases a new official version with the
+ bugfix, then the unofficial ``version()`` line should be removed
+ from the Spack package.
+
+^^^^^^^
+Patches
+^^^^^^^
+
+Spack's source patching mechanism provides another way to fix bugs in
+upstream projects. This has advantages and disadvantages compared to the procedures above.
+
+Advantages:
+
+ 1. It can fix bugs in existing released versions, and (probably)
+ future releases as well.
+
+ 2. It is lightweight, does not require a new fork to be set up.
+
+Disadvantages:
+
+ 1. It is harder to develop and debug a patch, vs. a branch in a
+ repository. The user loses the automation provided by version
+ control systems.
+
+ 2. Although patches of a few lines work OK, large patch files can be
+ hard to create and maintain.
+
diff --git a/lib/spack/env/cc b/lib/spack/env/cc
index b9b79f83a3..c4e51834a5 100755
--- a/lib/spack/env/cc
+++ b/lib/spack/env/cc
@@ -55,7 +55,10 @@ parameters=(
# The compiler input variables are checked for sanity later:
# SPACK_CC, SPACK_CXX, SPACK_F77, SPACK_FC
-# Debug flag is optional; set to "TRUE" for debug logging:
+# The default compiler flags are passed from these variables:
+# SPACK_CFLAGS, SPACK_CXXFLAGS, SPACK_FCFLAGS, SPACK_FFLAGS,
+# SPACK_LDFLAGS, SPACK_LDLIBS
+# Debug env var is optional; set to true for debug logging:
# SPACK_DEBUG
# Test command is used to unit test the compiler script.
# SPACK_TEST_COMMAND
@@ -99,21 +102,25 @@ case "$command" in
command="$SPACK_CC"
language="C"
comp="CC"
+ lang_flags=C
;;
c++|CC|g++|clang++|icpc|pgc++|xlc++)
command="$SPACK_CXX"
language="C++"
comp="CXX"
+ lang_flags=CXX
;;
- f90|fc|f95|gfortran|ifort|pgfortran|xlf90|nagfor)
+ ftn|f90|fc|f95|gfortran|ifort|pgfortran|xlf90|nagfor)
command="$SPACK_FC"
language="Fortran 90"
comp="FC"
+ lang_flags=F
;;
- f77|gfortran|ifort|pgfortran|xlf|nagfor)
+ f77|gfortran|ifort|pgfortran|xlf|nagfor|ftn)
command="$SPACK_F77"
language="Fortran 77"
comp="F77"
+ lang_flags=F
;;
ld)
mode=ld
@@ -131,7 +138,7 @@ if [[ -z $mode ]]; then
if [[ $arg == -v || $arg == -V || $arg == --version || $arg == -dumpversion ]]; then
mode=vcheck
break
- fi
+ fi
done
fi
@@ -167,6 +174,40 @@ if [[ -z $command ]]; then
die "ERROR: Compiler '$SPACK_COMPILER_SPEC' does not support compiling $language programs."
fi
+#
+# Set paths as defined in the 'environment' section of the compiler config
+# names are stored in SPACK_ENV_TO_SET
+# values are stored in SPACK_ENV_SET_<varname>
+#
+IFS=':' read -ra env_set_varnames <<< "$SPACK_ENV_TO_SET"
+for varname in "${env_set_varnames[@]}"; do
+ spack_varname="SPACK_ENV_SET_$varname"
+ export $varname=${!spack_varname}
+ unset $spack_varname
+done
+
+#
+# Filter '.' and Spack environment directories out of PATH so that
+# this script doesn't just call itself
+#
+IFS=':' read -ra env_path <<< "$PATH"
+IFS=':' read -ra spack_env_dirs <<< "$SPACK_ENV_PATH"
+spack_env_dirs+=("" ".")
+PATH=""
+for dir in "${env_path[@]}"; do
+ addpath=true
+ for env_dir in "${spack_env_dirs[@]}"; do
+ if [[ $dir == $env_dir ]]; then
+ addpath=false
+ break
+ fi
+ done
+ if $addpath; then
+ PATH="${PATH:+$PATH:}$dir"
+ fi
+done
+export PATH
+
if [[ $mode == vcheck ]]; then
exec ${command} "$@"
fi
@@ -175,9 +216,9 @@ fi
# It doesn't work with -rpath.
# This variable controls whether they are added.
add_rpaths=true
-if [[ $mode == ld && "$SPACK_SHORT_SPEC" =~ "darwin" ]]; then
+if [[ ($mode == ld || $mode == ccld) && "$SPACK_SHORT_SPEC" =~ "darwin" ]]; then
for arg in "$@"; do
- if [[ $arg == -r ]]; then
+ if [[ ($arg == -r && $mode == ld) || ($arg == -Wl,-r && $mode == ccld) ]]; then
add_rpaths=false
break
fi
@@ -188,6 +229,42 @@ fi
input_command="$@"
args=("$@")
+# Prepend cppflags, cflags, cxxflags, fcflags, fflags, and ldflags
+
+# Add ldflags
+case "$mode" in
+ ld|ccld)
+ args=(${SPACK_LDFLAGS[@]} "${args[@]}") ;;
+esac
+
+# Add compiler flags.
+case "$mode" in
+ cc|ccld)
+ # Add c, cxx, fc, and f flags
+ case $lang_flags in
+ C)
+ args=(${SPACK_CFLAGS[@]} "${args[@]}") ;;
+ CXX)
+ args=(${SPACK_CXXFLAGS[@]} "${args[@]}") ;;
+ esac
+ ;;
+esac
+
+# Add cppflags
+case "$mode" in
+ cpp|as|cc|ccld)
+ args=(${SPACK_CPPFLAGS[@]} "${args[@]}") ;;
+esac
+
+case "$mode" in cc|ccld)
+ # Add fortran flags
+ case $lang_flags in
+ F)
+ args=(${SPACK_FFLAGS[@]} "${args[@]}") ;;
+ esac
+ ;;
+esac
+
# Read spack dependencies from the path environment variable
IFS=':' read -ra deps <<< "$SPACK_DEPENDENCIES"
for dep in "${deps[@]}"; do
@@ -201,22 +278,38 @@ for dep in "${deps[@]}"; do
# Prepend lib and RPATH directories
if [[ -d $dep/lib ]]; then
if [[ $mode == ccld ]]; then
- $add_rpaths && args=("$rpath$dep/lib" "${args[@]}")
- args=("-L$dep/lib" "${args[@]}")
+ if [[ $SPACK_RPATH_DEPS == *$dep* ]]; then
+ $add_rpaths && args=("$rpath$dep/lib" "${args[@]}")
+ fi
+ if [[ $SPACK_LINK_DEPS == *$dep* ]]; then
+ args=("-L$dep/lib" "${args[@]}")
+ fi
elif [[ $mode == ld ]]; then
- $add_rpaths && args=("-rpath" "$dep/lib" "${args[@]}")
- args=("-L$dep/lib" "${args[@]}")
+ if [[ $SPACK_RPATH_DEPS == *$dep* ]]; then
+ $add_rpaths && args=("-rpath" "$dep/lib" "${args[@]}")
+ fi
+ if [[ $SPACK_LINK_DEPS == *$dep* ]]; then
+ args=("-L$dep/lib" "${args[@]}")
+ fi
fi
fi
# Prepend lib64 and RPATH directories
if [[ -d $dep/lib64 ]]; then
if [[ $mode == ccld ]]; then
- $add_rpaths && args=("$rpath$dep/lib64" "${args[@]}")
- args=("-L$dep/lib64" "${args[@]}")
+ if [[ $SPACK_RPATH_DEPS == *$dep* ]]; then
+ $add_rpaths && args=("$rpath$dep/lib64" "${args[@]}")
+ fi
+ if [[ $SPACK_LINK_DEPS == *$dep* ]]; then
+ args=("-L$dep/lib64" "${args[@]}")
+ fi
elif [[ $mode == ld ]]; then
- $add_rpaths && args=("-rpath" "$dep/lib64" "${args[@]}")
- args=("-L$dep/lib64" "${args[@]}")
+ if [[ $SPACK_RPATH_DEPS == *$dep* ]]; then
+ $add_rpaths && args=("-rpath" "$dep/lib64" "${args[@]}")
+ fi
+ if [[ $SPACK_LINK_DEPS == *$dep* ]]; then
+ args=("-L$dep/lib64" "${args[@]}")
+ fi
fi
fi
done
@@ -230,34 +323,21 @@ elif [[ $mode == ld ]]; then
$add_rpaths && args=("-rpath" "$SPACK_PREFIX/lib" "${args[@]}")
fi
-#
-# Unset pesky environment variables that could affect build sanity.
-#
-unset LD_LIBRARY_PATH
-unset LD_RUN_PATH
-unset DYLD_LIBRARY_PATH
-
-#
-# Filter '.' and Spack environment directories out of PATH so that
-# this script doesn't just call itself
-#
-IFS=':' read -ra env_path <<< "$PATH"
-IFS=':' read -ra spack_env_dirs <<< "$SPACK_ENV_PATH"
-spack_env_dirs+=("" ".")
-PATH=""
-for dir in "${env_path[@]}"; do
- addpath=true
- for env_dir in "${spack_env_dirs[@]}"; do
- if [[ $dir == $env_dir ]]; then
- addpath=false
- break
- fi
- done
- if $addpath; then
- PATH="${PATH:+$PATH:}$dir"
+# Set extra RPATHs
+IFS=':' read -ra extra_rpaths <<< "$SPACK_COMPILER_EXTRA_RPATHS"
+for extra_rpath in "${extra_rpaths[@]}"; do
+ if [[ $mode == ccld ]]; then
+ $add_rpaths && args=("$rpath$extra_rpath" "${args[@]}")
+ elif [[ $mode == ld ]]; then
+ $add_rpaths && args=("-rpath" "$extra_rpath" "${args[@]}")
fi
done
-export PATH
+
+# Add SPACK_LDLIBS to args
+case "$mode" in
+ ld|ccld)
+ args=("${args[@]}" ${SPACK_LDLIBS[@]}) ;;
+esac
full_command=("$command" "${args[@]}")
@@ -275,8 +355,8 @@ fi
if [[ $SPACK_DEBUG == TRUE ]]; then
input_log="$SPACK_DEBUG_LOG_DIR/spack-cc-$SPACK_SHORT_SPEC.in.log"
output_log="$SPACK_DEBUG_LOG_DIR/spack-cc-$SPACK_SHORT_SPEC.out.log"
- echo "[$mode] $command $input_command" >> $input_log
- echo "[$mode] ${full_command[@]}" >> $output_log
+ echo "[$mode] $command $input_command" >> "$input_log"
+ echo "[$mode] ${full_command[@]}" >> "$output_log"
fi
exec "${full_command[@]}"
diff --git a/lib/spack/env/clang/gfortran b/lib/spack/env/clang/gfortran
new file mode 120000
index 0000000000..82c2b8e90a
--- /dev/null
+++ b/lib/spack/env/clang/gfortran
@@ -0,0 +1 @@
+../cc \ No newline at end of file
diff --git a/lib/spack/env/cray/CC b/lib/spack/env/cray/CC
new file mode 120000
index 0000000000..82c2b8e90a
--- /dev/null
+++ b/lib/spack/env/cray/CC
@@ -0,0 +1 @@
+../cc \ No newline at end of file
diff --git a/lib/spack/env/cray/cc b/lib/spack/env/cray/cc
new file mode 120000
index 0000000000..82c2b8e90a
--- /dev/null
+++ b/lib/spack/env/cray/cc
@@ -0,0 +1 @@
+../cc \ No newline at end of file
diff --git a/lib/spack/env/cray/ftn b/lib/spack/env/cray/ftn
new file mode 120000
index 0000000000..82c2b8e90a
--- /dev/null
+++ b/lib/spack/env/cray/ftn
@@ -0,0 +1 @@
+../cc \ No newline at end of file
diff --git a/lib/spack/env/craype/CC b/lib/spack/env/craype/CC
new file mode 120000
index 0000000000..82c2b8e90a
--- /dev/null
+++ b/lib/spack/env/craype/CC
@@ -0,0 +1 @@
+../cc \ No newline at end of file
diff --git a/lib/spack/env/craype/cc b/lib/spack/env/craype/cc
new file mode 120000
index 0000000000..82c2b8e90a
--- /dev/null
+++ b/lib/spack/env/craype/cc
@@ -0,0 +1 @@
+../cc \ No newline at end of file
diff --git a/lib/spack/env/craype/ftn b/lib/spack/env/craype/ftn
new file mode 120000
index 0000000000..82c2b8e90a
--- /dev/null
+++ b/lib/spack/env/craype/ftn
@@ -0,0 +1 @@
+../cc \ No newline at end of file
diff --git a/lib/spack/external/__init__.py b/lib/spack/external/__init__.py
index 7a89a1ac67..48fe4ec5ac 100644
--- a/lib/spack/external/__init__.py
+++ b/lib/spack/external/__init__.py
@@ -1,26 +1,26 @@
##############################################################################
-# Copyright (c) 2013, Lawrence Livermore National Security, LLC.
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
# Produced at the Lawrence Livermore National Laboratory.
#
# This file is part of Spack.
-# Written by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
# LLNL-CODE-647188
#
# For details, see https://github.com/llnl/spack
# Please also see the LICENSE file for our notice and the LGPL.
#
# This program is free software; you can redistribute it and/or modify
-# it under the terms of the GNU General Public License (as published by
-# the Free Software Foundation) version 2.1 dated February 1999.
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
-# conditions of the GNU General Public License for more details.
+# conditions of the GNU Lesser General Public License for more details.
#
-# You should have received a copy of the GNU Lesser General Public License
-# along with this program; if not, write to the Free Software Foundation,
-# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
"""
This module contains external, potentially separately licensed,
@@ -29,10 +29,21 @@ packages that are included in spack.
So far:
argparse: We include our own version to be Python 2.6 compatible.
+ distro: Provides a more stable linux distribution detection.
+
+ functools: Used for implementation of total_ordering.
+
+ jsonschema: An implementation of JSON Schema for Python.
+
+ ordereddict: We include our own version to be Python 2.6 compatible.
+
+ py: Needed by pytest. Library with cross-python path,
+ ini-parsing, io, code, and log facilities.
+
pyqver2: External script to query required python version of
python source code. Used for ensuring 2.6 compatibility.
- functools: Used for implementation of total_ordering.
+ pytest: Testing framework used by Spack.
yaml: Used for config files.
"""
diff --git a/lib/spack/external/_pytest/AUTHORS b/lib/spack/external/_pytest/AUTHORS
new file mode 100644
index 0000000000..8c7cb19cee
--- /dev/null
+++ b/lib/spack/external/_pytest/AUTHORS
@@ -0,0 +1,141 @@
+Holger Krekel, holger at merlinux eu
+merlinux GmbH, Germany, office at merlinux eu
+
+Contributors include::
+
+Abdeali JK
+Abhijeet Kasurde
+Ahn Ki-Wook
+Alexei Kozlenok
+Anatoly Bubenkoff
+Andreas Zeidler
+Andrzej Ostrowski
+Andy Freeland
+Anthon van der Neut
+Antony Lee
+Armin Rigo
+Aron Curzon
+Aviv Palivoda
+Ben Webb
+Benjamin Peterson
+Bernard Pratz
+Bob Ippolito
+Brian Dorsey
+Brian Okken
+Brianna Laugher
+Bruno Oliveira
+Cal Leeming
+Carl Friedrich Bolz
+Charles Cloud
+Charnjit SiNGH (CCSJ)
+Chris Lamb
+Christian Boelsen
+Christian Theunert
+Christian Tismer
+Christopher Gilling
+Daniel Grana
+Daniel Hahler
+Daniel Nuri
+Daniel Wandschneider
+Danielle Jenkins
+Dave Hunt
+David Díaz-Barquero
+David Mohr
+David Vierra
+Diego Russo
+Dmitry Dygalo
+Duncan Betts
+Edison Gustavo Muenz
+Edoardo Batini
+Eduardo Schettino
+Elizaveta Shashkova
+Endre Galaczi
+Eric Hunsberger
+Eric Siegerman
+Erik M. Bray
+Feng Ma
+Florian Bruhin
+Floris Bruynooghe
+Gabriel Reis
+Georgy Dyuldin
+Graham Horler
+Greg Price
+Grig Gheorghiu
+Grigorii Eremeev (budulianin)
+Guido Wesdorp
+Harald Armin Massa
+Ian Bicking
+Jaap Broekhuizen
+Jan Balster
+Janne Vanhala
+Jason R. Coombs
+Javier Domingo Cansino
+Javier Romero
+John Towler
+Jon Sonesen
+Jordan Guymon
+Joshua Bronson
+Jurko Gospodnetić
+Justyna Janczyszyn
+Kale Kundert
+Katarzyna Jachim
+Kevin Cox
+Lee Kamentsky
+Lev Maximov
+Lukas Bednar
+Luke Murphy
+Maciek Fijalkowski
+Maho
+Marc Schlaich
+Marcin Bachry
+Mark Abramowitz
+Markus Unterwaditzer
+Martijn Faassen
+Martin K. Scherer
+Martin Prusse
+Mathieu Clabaut
+Matt Bachmann
+Matt Williams
+Matthias Hafner
+mbyt
+Michael Aquilina
+Michael Birtwell
+Michael Droettboom
+Michael Seifert
+Mike Lundy
+Ned Batchelder
+Neven Mundar
+Nicolas Delaby
+Oleg Pidsadnyi
+Oliver Bestwalter
+Omar Kohl
+Pieter Mulder
+Piotr Banaszkiewicz
+Punyashloka Biswal
+Quentin Pradet
+Ralf Schmitt
+Raphael Pierzina
+Raquel Alegre
+Roberto Polli
+Romain Dorgueil
+Roman Bolshakov
+Ronny Pfannschmidt
+Ross Lawley
+Russel Winder
+Ryan Wooden
+Samuele Pedroni
+Simon Gomizelj
+Stefan Farmbauer
+Stefan Zimmermann
+Stefano Taschini
+Steffen Allner
+Stephan Obermann
+Tareq Alayan
+Ted Xiao
+Thomas Grainger
+Tom Viner
+Trevor Bekolay
+Tyler Goodlet
+Vasily Kuznetsov
+Wouter van Ackooy
+Xuecong Liao
diff --git a/lib/spack/external/_pytest/LICENSE b/lib/spack/external/_pytest/LICENSE
new file mode 100644
index 0000000000..9e27bd7841
--- /dev/null
+++ b/lib/spack/external/_pytest/LICENSE
@@ -0,0 +1,21 @@
+The MIT License (MIT)
+
+Copyright (c) 2004-2016 Holger Krekel and others
+
+Permission is hereby granted, free of charge, to any person obtaining a copy of
+this software and associated documentation files (the "Software"), to deal in
+the Software without restriction, including without limitation the rights to
+use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies
+of the Software, and to permit persons to whom the Software is furnished to do
+so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in all
+copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+SOFTWARE.
diff --git a/lib/spack/external/_pytest/README.rst b/lib/spack/external/_pytest/README.rst
new file mode 100644
index 0000000000..d5650af655
--- /dev/null
+++ b/lib/spack/external/_pytest/README.rst
@@ -0,0 +1,102 @@
+.. image:: http://docs.pytest.org/en/latest/_static/pytest1.png
+ :target: http://docs.pytest.org
+ :align: center
+ :alt: pytest
+
+------
+
+.. image:: https://img.shields.io/pypi/v/pytest.svg
+ :target: https://pypi.python.org/pypi/pytest
+.. image:: https://img.shields.io/pypi/pyversions/pytest.svg
+ :target: https://pypi.python.org/pypi/pytest
+.. image:: https://img.shields.io/coveralls/pytest-dev/pytest/master.svg
+ :target: https://coveralls.io/r/pytest-dev/pytest
+.. image:: https://travis-ci.org/pytest-dev/pytest.svg?branch=master
+ :target: https://travis-ci.org/pytest-dev/pytest
+.. image:: https://ci.appveyor.com/api/projects/status/mrgbjaua7t33pg6b?svg=true
+ :target: https://ci.appveyor.com/project/pytestbot/pytest
+
+The ``pytest`` framework makes it easy to write small tests, yet
+scales to support complex functional testing for applications and libraries.
+
+An example of a simple test:
+
+.. code-block:: python
+
+ # content of test_sample.py
+ def inc(x):
+ return x + 1
+
+ def test_answer():
+ assert inc(3) == 5
+
+
+To execute it::
+
+ $ pytest
+ ============================= test session starts =============================
+ collected 1 items
+
+ test_sample.py F
+
+ ================================== FAILURES ===================================
+ _________________________________ test_answer _________________________________
+
+ def test_answer():
+ > assert inc(3) == 5
+ E assert 4 == 5
+ E + where 4 = inc(3)
+
+ test_sample.py:5: AssertionError
+ ========================== 1 failed in 0.04 seconds ===========================
+
+
+Due to ``pytest``'s detailed assertion introspection, only plain ``assert`` statements are used. See `getting-started <http://docs.pytest.org/en/latest/getting-started.html#our-first-test-run>`_ for more examples.
+
+
+Features
+--------
+
+- Detailed info on failing `assert statements <http://docs.pytest.org/en/latest/assert.html>`_ (no need to remember ``self.assert*`` names);
+
+- `Auto-discovery
+ <http://docs.pytest.org/en/latest/goodpractices.html#python-test-discovery>`_
+ of test modules and functions;
+
+- `Modular fixtures <http://docs.pytest.org/en/latest/fixture.html>`_ for
+ managing small or parametrized long-lived test resources;
+
+- Can run `unittest <http://docs.pytest.org/en/latest/unittest.html>`_ (or trial),
+ `nose <http://docs.pytest.org/en/latest/nose.html>`_ test suites out of the box;
+
+- Python2.6+, Python3.3+, PyPy-2.3, Jython-2.5 (untested);
+
+- Rich plugin architecture, with over 150+ `external plugins <http://docs.pytest.org/en/latest/plugins.html#installing-external-plugins-searching>`_ and thriving community;
+
+
+Documentation
+-------------
+
+For full documentation, including installation, tutorials and PDF documents, please see http://docs.pytest.org.
+
+
+Bugs/Requests
+-------------
+
+Please use the `GitHub issue tracker <https://github.com/pytest-dev/pytest/issues>`_ to submit bugs or request features.
+
+
+Changelog
+---------
+
+Consult the `Changelog <http://docs.pytest.org/en/latest/changelog.html>`__ page for fixes and enhancements of each version.
+
+
+License
+-------
+
+Copyright Holger Krekel and others, 2004-2016.
+
+Distributed under the terms of the `MIT`_ license, pytest is free and open source software.
+
+.. _`MIT`: https://github.com/pytest-dev/pytest/blob/master/LICENSE
diff --git a/lib/spack/external/_pytest/__init__.py b/lib/spack/external/_pytest/__init__.py
new file mode 100644
index 0000000000..be20d3d41c
--- /dev/null
+++ b/lib/spack/external/_pytest/__init__.py
@@ -0,0 +1,2 @@
+#
+__version__ = '3.0.5'
diff --git a/lib/spack/external/_pytest/_argcomplete.py b/lib/spack/external/_pytest/_argcomplete.py
new file mode 100644
index 0000000000..3ab679d8be
--- /dev/null
+++ b/lib/spack/external/_pytest/_argcomplete.py
@@ -0,0 +1,102 @@
+
+"""allow bash-completion for argparse with argcomplete if installed
+needs argcomplete>=0.5.6 for python 3.2/3.3 (older versions fail
+to find the magic string, so _ARGCOMPLETE env. var is never set, and
+this does not need special code.
+
+argcomplete does not support python 2.5 (although the changes for that
+are minor).
+
+Function try_argcomplete(parser) should be called directly before
+the call to ArgumentParser.parse_args().
+
+The filescompleter is what you normally would use on the positional
+arguments specification, in order to get "dirname/" after "dirn<TAB>"
+instead of the default "dirname ":
+
+ optparser.add_argument(Config._file_or_dir, nargs='*'
+ ).completer=filescompleter
+
+Other, application specific, completers should go in the file
+doing the add_argument calls as they need to be specified as .completer
+attributes as well. (If argcomplete is not installed, the function the
+attribute points to will not be used).
+
+SPEEDUP
+=======
+The generic argcomplete script for bash-completion
+(/etc/bash_completion.d/python-argcomplete.sh )
+uses a python program to determine startup script generated by pip.
+You can speed up completion somewhat by changing this script to include
+ # PYTHON_ARGCOMPLETE_OK
+so the the python-argcomplete-check-easy-install-script does not
+need to be called to find the entry point of the code and see if that is
+marked with PYTHON_ARGCOMPLETE_OK
+
+INSTALL/DEBUGGING
+=================
+To include this support in another application that has setup.py generated
+scripts:
+- add the line:
+ # PYTHON_ARGCOMPLETE_OK
+ near the top of the main python entry point
+- include in the file calling parse_args():
+ from _argcomplete import try_argcomplete, filescompleter
+ , call try_argcomplete just before parse_args(), and optionally add
+ filescompleter to the positional arguments' add_argument()
+If things do not work right away:
+- switch on argcomplete debugging with (also helpful when doing custom
+ completers):
+ export _ARC_DEBUG=1
+- run:
+ python-argcomplete-check-easy-install-script $(which appname)
+ echo $?
+ will echo 0 if the magic line has been found, 1 if not
+- sometimes it helps to find early on errors using:
+ _ARGCOMPLETE=1 _ARC_DEBUG=1 appname
+ which should throw a KeyError: 'COMPLINE' (which is properly set by the
+ global argcomplete script).
+"""
+
+import sys
+import os
+from glob import glob
+
+class FastFilesCompleter:
+ 'Fast file completer class'
+ def __init__(self, directories=True):
+ self.directories = directories
+
+ def __call__(self, prefix, **kwargs):
+ """only called on non option completions"""
+ if os.path.sep in prefix[1:]: #
+ prefix_dir = len(os.path.dirname(prefix) + os.path.sep)
+ else:
+ prefix_dir = 0
+ completion = []
+ globbed = []
+ if '*' not in prefix and '?' not in prefix:
+ if prefix[-1] == os.path.sep: # we are on unix, otherwise no bash
+ globbed.extend(glob(prefix + '.*'))
+ prefix += '*'
+ globbed.extend(glob(prefix))
+ for x in sorted(globbed):
+ if os.path.isdir(x):
+ x += '/'
+ # append stripping the prefix (like bash, not like compgen)
+ completion.append(x[prefix_dir:])
+ return completion
+
+
+if os.environ.get('_ARGCOMPLETE'):
+ try:
+ import argcomplete.completers
+ except ImportError:
+ sys.exit(-1)
+ filescompleter = FastFilesCompleter()
+
+ def try_argcomplete(parser):
+ argcomplete.autocomplete(parser)
+else:
+ def try_argcomplete(parser): pass
+ filescompleter = None
diff --git a/lib/spack/external/_pytest/_code/__init__.py b/lib/spack/external/_pytest/_code/__init__.py
new file mode 100644
index 0000000000..3463c11eac
--- /dev/null
+++ b/lib/spack/external/_pytest/_code/__init__.py
@@ -0,0 +1,9 @@
+""" python inspection/code generation API """
+from .code import Code # noqa
+from .code import ExceptionInfo # noqa
+from .code import Frame # noqa
+from .code import Traceback # noqa
+from .code import getrawcode # noqa
+from .source import Source # noqa
+from .source import compile_ as compile # noqa
+from .source import getfslineno # noqa
diff --git a/lib/spack/external/_pytest/_code/_py2traceback.py b/lib/spack/external/_pytest/_code/_py2traceback.py
new file mode 100644
index 0000000000..a830d9899a
--- /dev/null
+++ b/lib/spack/external/_pytest/_code/_py2traceback.py
@@ -0,0 +1,81 @@
+# copied from python-2.7.3's traceback.py
+# CHANGES:
+# - some_str is replaced, trying to create unicode strings
+#
+import types
+
+def format_exception_only(etype, value):
+ """Format the exception part of a traceback.
+
+ The arguments are the exception type and value such as given by
+ sys.last_type and sys.last_value. The return value is a list of
+ strings, each ending in a newline.
+
+ Normally, the list contains a single string; however, for
+ SyntaxError exceptions, it contains several lines that (when
+ printed) display detailed information about where the syntax
+ error occurred.
+
+ The message indicating which exception occurred is always the last
+ string in the list.
+
+ """
+
+ # An instance should not have a meaningful value parameter, but
+ # sometimes does, particularly for string exceptions, such as
+ # >>> raise string1, string2 # deprecated
+ #
+ # Clear these out first because issubtype(string1, SyntaxError)
+ # would throw another exception and mask the original problem.
+ if (isinstance(etype, BaseException) or
+ isinstance(etype, types.InstanceType) or
+ etype is None or type(etype) is str):
+ return [_format_final_exc_line(etype, value)]
+
+ stype = etype.__name__
+
+ if not issubclass(etype, SyntaxError):
+ return [_format_final_exc_line(stype, value)]
+
+ # It was a syntax error; show exactly where the problem was found.
+ lines = []
+ try:
+ msg, (filename, lineno, offset, badline) = value.args
+ except Exception:
+ pass
+ else:
+ filename = filename or "<string>"
+ lines.append(' File "%s", line %d\n' % (filename, lineno))
+ if badline is not None:
+ if isinstance(badline, bytes): # python 2 only
+ badline = badline.decode('utf-8', 'replace')
+ lines.append(u' %s\n' % badline.strip())
+ if offset is not None:
+ caretspace = badline.rstrip('\n')[:offset].lstrip()
+ # non-space whitespace (likes tabs) must be kept for alignment
+ caretspace = ((c.isspace() and c or ' ') for c in caretspace)
+ # only three spaces to account for offset1 == pos 0
+ lines.append(' %s^\n' % ''.join(caretspace))
+ value = msg
+
+ lines.append(_format_final_exc_line(stype, value))
+ return lines
+
+def _format_final_exc_line(etype, value):
+ """Return a list of a single line -- normal case for format_exception_only"""
+ valuestr = _some_str(value)
+ if value is None or not valuestr:
+ line = "%s\n" % etype
+ else:
+ line = "%s: %s\n" % (etype, valuestr)
+ return line
+
+def _some_str(value):
+ try:
+ return unicode(value)
+ except Exception:
+ try:
+ return str(value)
+ except Exception:
+ pass
+ return '<unprintable %s object>' % type(value).__name__
diff --git a/lib/spack/external/_pytest/_code/code.py b/lib/spack/external/_pytest/_code/code.py
new file mode 100644
index 0000000000..616d5c4313
--- /dev/null
+++ b/lib/spack/external/_pytest/_code/code.py
@@ -0,0 +1,861 @@
+import sys
+from inspect import CO_VARARGS, CO_VARKEYWORDS
+import re
+from weakref import ref
+
+import py
+builtin_repr = repr
+
+reprlib = py.builtin._tryimport('repr', 'reprlib')
+
+if sys.version_info[0] >= 3:
+ from traceback import format_exception_only
+else:
+ from ._py2traceback import format_exception_only
+
+
+class Code(object):
+ """ wrapper around Python code objects """
+ def __init__(self, rawcode):
+ if not hasattr(rawcode, "co_filename"):
+ rawcode = getrawcode(rawcode)
+ try:
+ self.filename = rawcode.co_filename
+ self.firstlineno = rawcode.co_firstlineno - 1
+ self.name = rawcode.co_name
+ except AttributeError:
+ raise TypeError("not a code object: %r" %(rawcode,))
+ self.raw = rawcode
+
+ def __eq__(self, other):
+ return self.raw == other.raw
+
+ __hash__ = None
+
+ def __ne__(self, other):
+ return not self == other
+
+ @property
+ def path(self):
+ """ return a path object pointing to source code (note that it
+ might not point to an actually existing file). """
+ try:
+ p = py.path.local(self.raw.co_filename)
+ # maybe don't try this checking
+ if not p.check():
+ raise OSError("py.path check failed.")
+ except OSError:
+ # XXX maybe try harder like the weird logic
+ # in the standard lib [linecache.updatecache] does?
+ p = self.raw.co_filename
+
+ return p
+
+ @property
+ def fullsource(self):
+ """ return a _pytest._code.Source object for the full source file of the code
+ """
+ from _pytest._code import source
+ full, _ = source.findsource(self.raw)
+ return full
+
+ def source(self):
+ """ return a _pytest._code.Source object for the code object's source only
+ """
+ # return source only for that part of code
+ import _pytest._code
+ return _pytest._code.Source(self.raw)
+
+ def getargs(self, var=False):
+ """ return a tuple with the argument names for the code object
+
+ if 'var' is set True also return the names of the variable and
+ keyword arguments when present
+ """
+ # handfull shortcut for getting args
+ raw = self.raw
+ argcount = raw.co_argcount
+ if var:
+ argcount += raw.co_flags & CO_VARARGS
+ argcount += raw.co_flags & CO_VARKEYWORDS
+ return raw.co_varnames[:argcount]
+
+class Frame(object):
+ """Wrapper around a Python frame holding f_locals and f_globals
+ in which expressions can be evaluated."""
+
+ def __init__(self, frame):
+ self.lineno = frame.f_lineno - 1
+ self.f_globals = frame.f_globals
+ self.f_locals = frame.f_locals
+ self.raw = frame
+ self.code = Code(frame.f_code)
+
+ @property
+ def statement(self):
+ """ statement this frame is at """
+ import _pytest._code
+ if self.code.fullsource is None:
+ return _pytest._code.Source("")
+ return self.code.fullsource.getstatement(self.lineno)
+
+ def eval(self, code, **vars):
+ """ evaluate 'code' in the frame
+
+ 'vars' are optional additional local variables
+
+ returns the result of the evaluation
+ """
+ f_locals = self.f_locals.copy()
+ f_locals.update(vars)
+ return eval(code, self.f_globals, f_locals)
+
+ def exec_(self, code, **vars):
+ """ exec 'code' in the frame
+
+ 'vars' are optiona; additional local variables
+ """
+ f_locals = self.f_locals.copy()
+ f_locals.update(vars)
+ py.builtin.exec_(code, self.f_globals, f_locals )
+
+ def repr(self, object):
+ """ return a 'safe' (non-recursive, one-line) string repr for 'object'
+ """
+ return py.io.saferepr(object)
+
+ def is_true(self, object):
+ return object
+
+ def getargs(self, var=False):
+ """ return a list of tuples (name, value) for all arguments
+
+ if 'var' is set True also include the variable and keyword
+ arguments when present
+ """
+ retval = []
+ for arg in self.code.getargs(var):
+ try:
+ retval.append((arg, self.f_locals[arg]))
+ except KeyError:
+ pass # this can occur when using Psyco
+ return retval
+
+class TracebackEntry(object):
+ """ a single entry in a traceback """
+
+ _repr_style = None
+ exprinfo = None
+
+ def __init__(self, rawentry, excinfo=None):
+ self._excinfo = excinfo
+ self._rawentry = rawentry
+ self.lineno = rawentry.tb_lineno - 1
+
+ def set_repr_style(self, mode):
+ assert mode in ("short", "long")
+ self._repr_style = mode
+
+ @property
+ def frame(self):
+ import _pytest._code
+ return _pytest._code.Frame(self._rawentry.tb_frame)
+
+ @property
+ def relline(self):
+ return self.lineno - self.frame.code.firstlineno
+
+ def __repr__(self):
+ return "<TracebackEntry %s:%d>" %(self.frame.code.path, self.lineno+1)
+
+ @property
+ def statement(self):
+ """ _pytest._code.Source object for the current statement """
+ source = self.frame.code.fullsource
+ return source.getstatement(self.lineno)
+
+ @property
+ def path(self):
+ """ path to the source code """
+ return self.frame.code.path
+
+ def getlocals(self):
+ return self.frame.f_locals
+ locals = property(getlocals, None, None, "locals of underlaying frame")
+
+ def getfirstlinesource(self):
+ # on Jython this firstlineno can be -1 apparently
+ return max(self.frame.code.firstlineno, 0)
+
+ def getsource(self, astcache=None):
+ """ return failing source code. """
+ # we use the passed in astcache to not reparse asttrees
+ # within exception info printing
+ from _pytest._code.source import getstatementrange_ast
+ source = self.frame.code.fullsource
+ if source is None:
+ return None
+ key = astnode = None
+ if astcache is not None:
+ key = self.frame.code.path
+ if key is not None:
+ astnode = astcache.get(key, None)
+ start = self.getfirstlinesource()
+ try:
+ astnode, _, end = getstatementrange_ast(self.lineno, source,
+ astnode=astnode)
+ except SyntaxError:
+ end = self.lineno + 1
+ else:
+ if key is not None:
+ astcache[key] = astnode
+ return source[start:end]
+
+ source = property(getsource)
+
+ def ishidden(self):
+ """ return True if the current frame has a var __tracebackhide__
+ resolving to True
+
+ If __tracebackhide__ is a callable, it gets called with the
+ ExceptionInfo instance and can decide whether to hide the traceback.
+
+ mostly for internal use
+ """
+ try:
+ tbh = self.frame.f_locals['__tracebackhide__']
+ except KeyError:
+ try:
+ tbh = self.frame.f_globals['__tracebackhide__']
+ except KeyError:
+ return False
+
+ if py.builtin.callable(tbh):
+ return tbh(None if self._excinfo is None else self._excinfo())
+ else:
+ return tbh
+
+ def __str__(self):
+ try:
+ fn = str(self.path)
+ except py.error.Error:
+ fn = '???'
+ name = self.frame.code.name
+ try:
+ line = str(self.statement).lstrip()
+ except KeyboardInterrupt:
+ raise
+ except:
+ line = "???"
+ return " File %r:%d in %s\n %s\n" %(fn, self.lineno+1, name, line)
+
+ def name(self):
+ return self.frame.code.raw.co_name
+ name = property(name, None, None, "co_name of underlaying code")
+
+class Traceback(list):
+ """ Traceback objects encapsulate and offer higher level
+ access to Traceback entries.
+ """
+ Entry = TracebackEntry
+ def __init__(self, tb, excinfo=None):
+ """ initialize from given python traceback object and ExceptionInfo """
+ self._excinfo = excinfo
+ if hasattr(tb, 'tb_next'):
+ def f(cur):
+ while cur is not None:
+ yield self.Entry(cur, excinfo=excinfo)
+ cur = cur.tb_next
+ list.__init__(self, f(tb))
+ else:
+ list.__init__(self, tb)
+
+ def cut(self, path=None, lineno=None, firstlineno=None, excludepath=None):
+ """ return a Traceback instance wrapping part of this Traceback
+
+ by provding any combination of path, lineno and firstlineno, the
+ first frame to start the to-be-returned traceback is determined
+
+ this allows cutting the first part of a Traceback instance e.g.
+ for formatting reasons (removing some uninteresting bits that deal
+ with handling of the exception/traceback)
+ """
+ for x in self:
+ code = x.frame.code
+ codepath = code.path
+ if ((path is None or codepath == path) and
+ (excludepath is None or not hasattr(codepath, 'relto') or
+ not codepath.relto(excludepath)) and
+ (lineno is None or x.lineno == lineno) and
+ (firstlineno is None or x.frame.code.firstlineno == firstlineno)):
+ return Traceback(x._rawentry, self._excinfo)
+ return self
+
+ def __getitem__(self, key):
+ val = super(Traceback, self).__getitem__(key)
+ if isinstance(key, type(slice(0))):
+ val = self.__class__(val)
+ return val
+
+ def filter(self, fn=lambda x: not x.ishidden()):
+ """ return a Traceback instance with certain items removed
+
+ fn is a function that gets a single argument, a TracebackEntry
+ instance, and should return True when the item should be added
+ to the Traceback, False when not
+
+ by default this removes all the TracebackEntries which are hidden
+ (see ishidden() above)
+ """
+ return Traceback(filter(fn, self), self._excinfo)
+
+ def getcrashentry(self):
+ """ return last non-hidden traceback entry that lead
+ to the exception of a traceback.
+ """
+ for i in range(-1, -len(self)-1, -1):
+ entry = self[i]
+ if not entry.ishidden():
+ return entry
+ return self[-1]
+
+ def recursionindex(self):
+ """ return the index of the frame/TracebackEntry where recursion
+ originates if appropriate, None if no recursion occurred
+ """
+ cache = {}
+ for i, entry in enumerate(self):
+ # id for the code.raw is needed to work around
+ # the strange metaprogramming in the decorator lib from pypi
+ # which generates code objects that have hash/value equality
+ #XXX needs a test
+ key = entry.frame.code.path, id(entry.frame.code.raw), entry.lineno
+ #print "checking for recursion at", key
+ l = cache.setdefault(key, [])
+ if l:
+ f = entry.frame
+ loc = f.f_locals
+ for otherloc in l:
+ if f.is_true(f.eval(co_equal,
+ __recursioncache_locals_1=loc,
+ __recursioncache_locals_2=otherloc)):
+ return i
+ l.append(entry.frame.f_locals)
+ return None
+
+
+co_equal = compile('__recursioncache_locals_1 == __recursioncache_locals_2',
+ '?', 'eval')
+
+class ExceptionInfo(object):
+ """ wraps sys.exc_info() objects and offers
+ help for navigating the traceback.
+ """
+ _striptext = ''
+ def __init__(self, tup=None, exprinfo=None):
+ import _pytest._code
+ if tup is None:
+ tup = sys.exc_info()
+ if exprinfo is None and isinstance(tup[1], AssertionError):
+ exprinfo = getattr(tup[1], 'msg', None)
+ if exprinfo is None:
+ exprinfo = py._builtin._totext(tup[1])
+ if exprinfo and exprinfo.startswith('assert '):
+ self._striptext = 'AssertionError: '
+ self._excinfo = tup
+ #: the exception class
+ self.type = tup[0]
+ #: the exception instance
+ self.value = tup[1]
+ #: the exception raw traceback
+ self.tb = tup[2]
+ #: the exception type name
+ self.typename = self.type.__name__
+ #: the exception traceback (_pytest._code.Traceback instance)
+ self.traceback = _pytest._code.Traceback(self.tb, excinfo=ref(self))
+
+ def __repr__(self):
+ return "<ExceptionInfo %s tblen=%d>" % (self.typename, len(self.traceback))
+
+ def exconly(self, tryshort=False):
+ """ return the exception as a string
+
+ when 'tryshort' resolves to True, and the exception is a
+ _pytest._code._AssertionError, only the actual exception part of
+ the exception representation is returned (so 'AssertionError: ' is
+ removed from the beginning)
+ """
+ lines = format_exception_only(self.type, self.value)
+ text = ''.join(lines)
+ text = text.rstrip()
+ if tryshort:
+ if text.startswith(self._striptext):
+ text = text[len(self._striptext):]
+ return text
+
+ def errisinstance(self, exc):
+ """ return True if the exception is an instance of exc """
+ return isinstance(self.value, exc)
+
+ def _getreprcrash(self):
+ exconly = self.exconly(tryshort=True)
+ entry = self.traceback.getcrashentry()
+ path, lineno = entry.frame.code.raw.co_filename, entry.lineno
+ return ReprFileLocation(path, lineno+1, exconly)
+
+ def getrepr(self, showlocals=False, style="long",
+ abspath=False, tbfilter=True, funcargs=False):
+ """ return str()able representation of this exception info.
+ showlocals: show locals per traceback entry
+ style: long|short|no|native traceback style
+ tbfilter: hide entries (where __tracebackhide__ is true)
+
+ in case of style==native, tbfilter and showlocals is ignored.
+ """
+ if style == 'native':
+ return ReprExceptionInfo(ReprTracebackNative(
+ py.std.traceback.format_exception(
+ self.type,
+ self.value,
+ self.traceback[0]._rawentry,
+ )), self._getreprcrash())
+
+ fmt = FormattedExcinfo(showlocals=showlocals, style=style,
+ abspath=abspath, tbfilter=tbfilter, funcargs=funcargs)
+ return fmt.repr_excinfo(self)
+
+ def __str__(self):
+ entry = self.traceback[-1]
+ loc = ReprFileLocation(entry.path, entry.lineno + 1, self.exconly())
+ return str(loc)
+
+ def __unicode__(self):
+ entry = self.traceback[-1]
+ loc = ReprFileLocation(entry.path, entry.lineno + 1, self.exconly())
+ return unicode(loc)
+
+ def match(self, regexp):
+ """
+ Match the regular expression 'regexp' on the string representation of
+ the exception. If it matches then True is returned (so that it is
+ possible to write 'assert excinfo.match()'). If it doesn't match an
+ AssertionError is raised.
+ """
+ __tracebackhide__ = True
+ if not re.search(regexp, str(self.value)):
+ assert 0, "Pattern '{0!s}' not found in '{1!s}'".format(
+ regexp, self.value)
+ return True
+
+
+class FormattedExcinfo(object):
+ """ presenting information about failing Functions and Generators. """
+ # for traceback entries
+ flow_marker = ">"
+ fail_marker = "E"
+
+ def __init__(self, showlocals=False, style="long", abspath=True, tbfilter=True, funcargs=False):
+ self.showlocals = showlocals
+ self.style = style
+ self.tbfilter = tbfilter
+ self.funcargs = funcargs
+ self.abspath = abspath
+ self.astcache = {}
+
+ def _getindent(self, source):
+ # figure out indent for given source
+ try:
+ s = str(source.getstatement(len(source)-1))
+ except KeyboardInterrupt:
+ raise
+ except:
+ try:
+ s = str(source[-1])
+ except KeyboardInterrupt:
+ raise
+ except:
+ return 0
+ return 4 + (len(s) - len(s.lstrip()))
+
+ def _getentrysource(self, entry):
+ source = entry.getsource(self.astcache)
+ if source is not None:
+ source = source.deindent()
+ return source
+
+ def _saferepr(self, obj):
+ return py.io.saferepr(obj)
+
+ def repr_args(self, entry):
+ if self.funcargs:
+ args = []
+ for argname, argvalue in entry.frame.getargs(var=True):
+ args.append((argname, self._saferepr(argvalue)))
+ return ReprFuncArgs(args)
+
+ def get_source(self, source, line_index=-1, excinfo=None, short=False):
+ """ return formatted and marked up source lines. """
+ import _pytest._code
+ lines = []
+ if source is None or line_index >= len(source.lines):
+ source = _pytest._code.Source("???")
+ line_index = 0
+ if line_index < 0:
+ line_index += len(source)
+ space_prefix = " "
+ if short:
+ lines.append(space_prefix + source.lines[line_index].strip())
+ else:
+ for line in source.lines[:line_index]:
+ lines.append(space_prefix + line)
+ lines.append(self.flow_marker + " " + source.lines[line_index])
+ for line in source.lines[line_index+1:]:
+ lines.append(space_prefix + line)
+ if excinfo is not None:
+ indent = 4 if short else self._getindent(source)
+ lines.extend(self.get_exconly(excinfo, indent=indent, markall=True))
+ return lines
+
+ def get_exconly(self, excinfo, indent=4, markall=False):
+ lines = []
+ indent = " " * indent
+ # get the real exception information out
+ exlines = excinfo.exconly(tryshort=True).split('\n')
+ failindent = self.fail_marker + indent[1:]
+ for line in exlines:
+ lines.append(failindent + line)
+ if not markall:
+ failindent = indent
+ return lines
+
+ def repr_locals(self, locals):
+ if self.showlocals:
+ lines = []
+ keys = [loc for loc in locals if loc[0] != "@"]
+ keys.sort()
+ for name in keys:
+ value = locals[name]
+ if name == '__builtins__':
+ lines.append("__builtins__ = <builtins>")
+ else:
+ # This formatting could all be handled by the
+ # _repr() function, which is only reprlib.Repr in
+ # disguise, so is very configurable.
+ str_repr = self._saferepr(value)
+ #if len(str_repr) < 70 or not isinstance(value,
+ # (list, tuple, dict)):
+ lines.append("%-10s = %s" %(name, str_repr))
+ #else:
+ # self._line("%-10s =\\" % (name,))
+ # # XXX
+ # py.std.pprint.pprint(value, stream=self.excinfowriter)
+ return ReprLocals(lines)
+
+ def repr_traceback_entry(self, entry, excinfo=None):
+ import _pytest._code
+ source = self._getentrysource(entry)
+ if source is None:
+ source = _pytest._code.Source("???")
+ line_index = 0
+ else:
+ # entry.getfirstlinesource() can be -1, should be 0 on jython
+ line_index = entry.lineno - max(entry.getfirstlinesource(), 0)
+
+ lines = []
+ style = entry._repr_style
+ if style is None:
+ style = self.style
+ if style in ("short", "long"):
+ short = style == "short"
+ reprargs = self.repr_args(entry) if not short else None
+ s = self.get_source(source, line_index, excinfo, short=short)
+ lines.extend(s)
+ if short:
+ message = "in %s" %(entry.name)
+ else:
+ message = excinfo and excinfo.typename or ""
+ path = self._makepath(entry.path)
+ filelocrepr = ReprFileLocation(path, entry.lineno+1, message)
+ localsrepr = None
+ if not short:
+ localsrepr = self.repr_locals(entry.locals)
+ return ReprEntry(lines, reprargs, localsrepr, filelocrepr, style)
+ if excinfo:
+ lines.extend(self.get_exconly(excinfo, indent=4))
+ return ReprEntry(lines, None, None, None, style)
+
+ def _makepath(self, path):
+ if not self.abspath:
+ try:
+ np = py.path.local().bestrelpath(path)
+ except OSError:
+ return path
+ if len(np) < len(str(path)):
+ path = np
+ return path
+
+ def repr_traceback(self, excinfo):
+ traceback = excinfo.traceback
+ if self.tbfilter:
+ traceback = traceback.filter()
+ recursionindex = None
+ if is_recursion_error(excinfo):
+ recursionindex = traceback.recursionindex()
+ last = traceback[-1]
+ entries = []
+ extraline = None
+ for index, entry in enumerate(traceback):
+ einfo = (last == entry) and excinfo or None
+ reprentry = self.repr_traceback_entry(entry, einfo)
+ entries.append(reprentry)
+ if index == recursionindex:
+ extraline = "!!! Recursion detected (same locals & position)"
+ break
+ return ReprTraceback(entries, extraline, style=self.style)
+
+
+ def repr_excinfo(self, excinfo):
+ if sys.version_info[0] < 3:
+ reprtraceback = self.repr_traceback(excinfo)
+ reprcrash = excinfo._getreprcrash()
+
+ return ReprExceptionInfo(reprtraceback, reprcrash)
+ else:
+ repr_chain = []
+ e = excinfo.value
+ descr = None
+ while e is not None:
+ if excinfo:
+ reprtraceback = self.repr_traceback(excinfo)
+ reprcrash = excinfo._getreprcrash()
+ else:
+ # fallback to native repr if the exception doesn't have a traceback:
+ # ExceptionInfo objects require a full traceback to work
+ reprtraceback = ReprTracebackNative(py.std.traceback.format_exception(type(e), e, None))
+ reprcrash = None
+
+ repr_chain += [(reprtraceback, reprcrash, descr)]
+ if e.__cause__ is not None:
+ e = e.__cause__
+ excinfo = ExceptionInfo((type(e), e, e.__traceback__)) if e.__traceback__ else None
+ descr = 'The above exception was the direct cause of the following exception:'
+ elif e.__context__ is not None:
+ e = e.__context__
+ excinfo = ExceptionInfo((type(e), e, e.__traceback__)) if e.__traceback__ else None
+ descr = 'During handling of the above exception, another exception occurred:'
+ else:
+ e = None
+ repr_chain.reverse()
+ return ExceptionChainRepr(repr_chain)
+
+
+class TerminalRepr(object):
+ def __str__(self):
+ s = self.__unicode__()
+ if sys.version_info[0] < 3:
+ s = s.encode('utf-8')
+ return s
+
+ def __unicode__(self):
+ # FYI this is called from pytest-xdist's serialization of exception
+ # information.
+ io = py.io.TextIO()
+ tw = py.io.TerminalWriter(file=io)
+ self.toterminal(tw)
+ return io.getvalue().strip()
+
+ def __repr__(self):
+ return "<%s instance at %0x>" %(self.__class__, id(self))
+
+
+class ExceptionRepr(TerminalRepr):
+ def __init__(self):
+ self.sections = []
+
+ def addsection(self, name, content, sep="-"):
+ self.sections.append((name, content, sep))
+
+ def toterminal(self, tw):
+ for name, content, sep in self.sections:
+ tw.sep(sep, name)
+ tw.line(content)
+
+
+class ExceptionChainRepr(ExceptionRepr):
+ def __init__(self, chain):
+ super(ExceptionChainRepr, self).__init__()
+ self.chain = chain
+ # reprcrash and reprtraceback of the outermost (the newest) exception
+ # in the chain
+ self.reprtraceback = chain[-1][0]
+ self.reprcrash = chain[-1][1]
+
+ def toterminal(self, tw):
+ for element in self.chain:
+ element[0].toterminal(tw)
+ if element[2] is not None:
+ tw.line("")
+ tw.line(element[2], yellow=True)
+ super(ExceptionChainRepr, self).toterminal(tw)
+
+
+class ReprExceptionInfo(ExceptionRepr):
+ def __init__(self, reprtraceback, reprcrash):
+ super(ReprExceptionInfo, self).__init__()
+ self.reprtraceback = reprtraceback
+ self.reprcrash = reprcrash
+
+ def toterminal(self, tw):
+ self.reprtraceback.toterminal(tw)
+ super(ReprExceptionInfo, self).toterminal(tw)
+
+class ReprTraceback(TerminalRepr):
+ entrysep = "_ "
+
+ def __init__(self, reprentries, extraline, style):
+ self.reprentries = reprentries
+ self.extraline = extraline
+ self.style = style
+
+ def toterminal(self, tw):
+ # the entries might have different styles
+ for i, entry in enumerate(self.reprentries):
+ if entry.style == "long":
+ tw.line("")
+ entry.toterminal(tw)
+ if i < len(self.reprentries) - 1:
+ next_entry = self.reprentries[i+1]
+ if entry.style == "long" or \
+ entry.style == "short" and next_entry.style == "long":
+ tw.sep(self.entrysep)
+
+ if self.extraline:
+ tw.line(self.extraline)
+
+class ReprTracebackNative(ReprTraceback):
+ def __init__(self, tblines):
+ self.style = "native"
+ self.reprentries = [ReprEntryNative(tblines)]
+ self.extraline = None
+
+class ReprEntryNative(TerminalRepr):
+ style = "native"
+
+ def __init__(self, tblines):
+ self.lines = tblines
+
+ def toterminal(self, tw):
+ tw.write("".join(self.lines))
+
+class ReprEntry(TerminalRepr):
+ localssep = "_ "
+
+ def __init__(self, lines, reprfuncargs, reprlocals, filelocrepr, style):
+ self.lines = lines
+ self.reprfuncargs = reprfuncargs
+ self.reprlocals = reprlocals
+ self.reprfileloc = filelocrepr
+ self.style = style
+
+ def toterminal(self, tw):
+ if self.style == "short":
+ self.reprfileloc.toterminal(tw)
+ for line in self.lines:
+ red = line.startswith("E ")
+ tw.line(line, bold=True, red=red)
+ #tw.line("")
+ return
+ if self.reprfuncargs:
+ self.reprfuncargs.toterminal(tw)
+ for line in self.lines:
+ red = line.startswith("E ")
+ tw.line(line, bold=True, red=red)
+ if self.reprlocals:
+ #tw.sep(self.localssep, "Locals")
+ tw.line("")
+ self.reprlocals.toterminal(tw)
+ if self.reprfileloc:
+ if self.lines:
+ tw.line("")
+ self.reprfileloc.toterminal(tw)
+
+ def __str__(self):
+ return "%s\n%s\n%s" % ("\n".join(self.lines),
+ self.reprlocals,
+ self.reprfileloc)
+
+class ReprFileLocation(TerminalRepr):
+ def __init__(self, path, lineno, message):
+ self.path = str(path)
+ self.lineno = lineno
+ self.message = message
+
+ def toterminal(self, tw):
+ # filename and lineno output for each entry,
+ # using an output format that most editors unterstand
+ msg = self.message
+ i = msg.find("\n")
+ if i != -1:
+ msg = msg[:i]
+ tw.write(self.path, bold=True, red=True)
+ tw.line(":%s: %s" % (self.lineno, msg))
+
+class ReprLocals(TerminalRepr):
+ def __init__(self, lines):
+ self.lines = lines
+
+ def toterminal(self, tw):
+ for line in self.lines:
+ tw.line(line)
+
+class ReprFuncArgs(TerminalRepr):
+ def __init__(self, args):
+ self.args = args
+
+ def toterminal(self, tw):
+ if self.args:
+ linesofar = ""
+ for name, value in self.args:
+ ns = "%s = %s" %(name, value)
+ if len(ns) + len(linesofar) + 2 > tw.fullwidth:
+ if linesofar:
+ tw.line(linesofar)
+ linesofar = ns
+ else:
+ if linesofar:
+ linesofar += ", " + ns
+ else:
+ linesofar = ns
+ if linesofar:
+ tw.line(linesofar)
+ tw.line("")
+
+
+def getrawcode(obj, trycall=True):
+ """ return code object for given function. """
+ try:
+ return obj.__code__
+ except AttributeError:
+ obj = getattr(obj, 'im_func', obj)
+ obj = getattr(obj, 'func_code', obj)
+ obj = getattr(obj, 'f_code', obj)
+ obj = getattr(obj, '__code__', obj)
+ if trycall and not hasattr(obj, 'co_firstlineno'):
+ if hasattr(obj, '__call__') and not py.std.inspect.isclass(obj):
+ x = getrawcode(obj.__call__, trycall=False)
+ if hasattr(x, 'co_firstlineno'):
+ return x
+ return obj
+
+
+if sys.version_info[:2] >= (3, 5): # RecursionError introduced in 3.5
+ def is_recursion_error(excinfo):
+ return excinfo.errisinstance(RecursionError) # noqa
+else:
+ def is_recursion_error(excinfo):
+ if not excinfo.errisinstance(RuntimeError):
+ return False
+ try:
+ return "maximum recursion depth exceeded" in str(excinfo.value)
+ except UnicodeError:
+ return False
diff --git a/lib/spack/external/_pytest/_code/source.py b/lib/spack/external/_pytest/_code/source.py
new file mode 100644
index 0000000000..fcec0f5ca7
--- /dev/null
+++ b/lib/spack/external/_pytest/_code/source.py
@@ -0,0 +1,414 @@
+from __future__ import generators
+
+from bisect import bisect_right
+import sys
+import inspect, tokenize
+import py
+cpy_compile = compile
+
+try:
+ import _ast
+ from _ast import PyCF_ONLY_AST as _AST_FLAG
+except ImportError:
+ _AST_FLAG = 0
+ _ast = None
+
+
+class Source(object):
+ """ a immutable object holding a source code fragment,
+ possibly deindenting it.
+ """
+ _compilecounter = 0
+ def __init__(self, *parts, **kwargs):
+ self.lines = lines = []
+ de = kwargs.get('deindent', True)
+ rstrip = kwargs.get('rstrip', True)
+ for part in parts:
+ if not part:
+ partlines = []
+ if isinstance(part, Source):
+ partlines = part.lines
+ elif isinstance(part, (tuple, list)):
+ partlines = [x.rstrip("\n") for x in part]
+ elif isinstance(part, py.builtin._basestring):
+ partlines = part.split('\n')
+ if rstrip:
+ while partlines:
+ if partlines[-1].strip():
+ break
+ partlines.pop()
+ else:
+ partlines = getsource(part, deindent=de).lines
+ if de:
+ partlines = deindent(partlines)
+ lines.extend(partlines)
+
+ def __eq__(self, other):
+ try:
+ return self.lines == other.lines
+ except AttributeError:
+ if isinstance(other, str):
+ return str(self) == other
+ return False
+
+ __hash__ = None
+
+ def __getitem__(self, key):
+ if isinstance(key, int):
+ return self.lines[key]
+ else:
+ if key.step not in (None, 1):
+ raise IndexError("cannot slice a Source with a step")
+ newsource = Source()
+ newsource.lines = self.lines[key.start:key.stop]
+ return newsource
+
+ def __len__(self):
+ return len(self.lines)
+
+ def strip(self):
+ """ return new source object with trailing
+ and leading blank lines removed.
+ """
+ start, end = 0, len(self)
+ while start < end and not self.lines[start].strip():
+ start += 1
+ while end > start and not self.lines[end-1].strip():
+ end -= 1
+ source = Source()
+ source.lines[:] = self.lines[start:end]
+ return source
+
+ def putaround(self, before='', after='', indent=' ' * 4):
+ """ return a copy of the source object with
+ 'before' and 'after' wrapped around it.
+ """
+ before = Source(before)
+ after = Source(after)
+ newsource = Source()
+ lines = [ (indent + line) for line in self.lines]
+ newsource.lines = before.lines + lines + after.lines
+ return newsource
+
+ def indent(self, indent=' ' * 4):
+ """ return a copy of the source object with
+ all lines indented by the given indent-string.
+ """
+ newsource = Source()
+ newsource.lines = [(indent+line) for line in self.lines]
+ return newsource
+
+ def getstatement(self, lineno, assertion=False):
+ """ return Source statement which contains the
+ given linenumber (counted from 0).
+ """
+ start, end = self.getstatementrange(lineno, assertion)
+ return self[start:end]
+
+ def getstatementrange(self, lineno, assertion=False):
+ """ return (start, end) tuple which spans the minimal
+ statement region which containing the given lineno.
+ """
+ if not (0 <= lineno < len(self)):
+ raise IndexError("lineno out of range")
+ ast, start, end = getstatementrange_ast(lineno, self)
+ return start, end
+
+ def deindent(self, offset=None):
+ """ return a new source object deindented by offset.
+ If offset is None then guess an indentation offset from
+ the first non-blank line. Subsequent lines which have a
+ lower indentation offset will be copied verbatim as
+ they are assumed to be part of multilines.
+ """
+ # XXX maybe use the tokenizer to properly handle multiline
+ # strings etc.pp?
+ newsource = Source()
+ newsource.lines[:] = deindent(self.lines, offset)
+ return newsource
+
+ def isparseable(self, deindent=True):
+ """ return True if source is parseable, heuristically
+ deindenting it by default.
+ """
+ try:
+ import parser
+ except ImportError:
+ syntax_checker = lambda x: compile(x, 'asd', 'exec')
+ else:
+ syntax_checker = parser.suite
+
+ if deindent:
+ source = str(self.deindent())
+ else:
+ source = str(self)
+ try:
+ #compile(source+'\n', "x", "exec")
+ syntax_checker(source+'\n')
+ except KeyboardInterrupt:
+ raise
+ except Exception:
+ return False
+ else:
+ return True
+
+ def __str__(self):
+ return "\n".join(self.lines)
+
+ def compile(self, filename=None, mode='exec',
+ flag=generators.compiler_flag,
+ dont_inherit=0, _genframe=None):
+ """ return compiled code object. if filename is None
+ invent an artificial filename which displays
+ the source/line position of the caller frame.
+ """
+ if not filename or py.path.local(filename).check(file=0):
+ if _genframe is None:
+ _genframe = sys._getframe(1) # the caller
+ fn,lineno = _genframe.f_code.co_filename, _genframe.f_lineno
+ base = "<%d-codegen " % self._compilecounter
+ self.__class__._compilecounter += 1
+ if not filename:
+ filename = base + '%s:%d>' % (fn, lineno)
+ else:
+ filename = base + '%r %s:%d>' % (filename, fn, lineno)
+ source = "\n".join(self.lines) + '\n'
+ try:
+ co = cpy_compile(source, filename, mode, flag)
+ except SyntaxError:
+ ex = sys.exc_info()[1]
+ # re-represent syntax errors from parsing python strings
+ msglines = self.lines[:ex.lineno]
+ if ex.offset:
+ msglines.append(" "*ex.offset + '^')
+ msglines.append("(code was compiled probably from here: %s)" % filename)
+ newex = SyntaxError('\n'.join(msglines))
+ newex.offset = ex.offset
+ newex.lineno = ex.lineno
+ newex.text = ex.text
+ raise newex
+ else:
+ if flag & _AST_FLAG:
+ return co
+ lines = [(x + "\n") for x in self.lines]
+ py.std.linecache.cache[filename] = (1, None, lines, filename)
+ return co
+
+#
+# public API shortcut functions
+#
+
+def compile_(source, filename=None, mode='exec', flags=
+ generators.compiler_flag, dont_inherit=0):
+ """ compile the given source to a raw code object,
+ and maintain an internal cache which allows later
+ retrieval of the source code for the code object
+ and any recursively created code objects.
+ """
+ if _ast is not None and isinstance(source, _ast.AST):
+ # XXX should Source support having AST?
+ return cpy_compile(source, filename, mode, flags, dont_inherit)
+ _genframe = sys._getframe(1) # the caller
+ s = Source(source)
+ co = s.compile(filename, mode, flags, _genframe=_genframe)
+ return co
+
+
+def getfslineno(obj):
+ """ Return source location (path, lineno) for the given object.
+ If the source cannot be determined return ("", -1)
+ """
+ import _pytest._code
+ try:
+ code = _pytest._code.Code(obj)
+ except TypeError:
+ try:
+ fn = (py.std.inspect.getsourcefile(obj) or
+ py.std.inspect.getfile(obj))
+ except TypeError:
+ return "", -1
+
+ fspath = fn and py.path.local(fn) or None
+ lineno = -1
+ if fspath:
+ try:
+ _, lineno = findsource(obj)
+ except IOError:
+ pass
+ else:
+ fspath = code.path
+ lineno = code.firstlineno
+ assert isinstance(lineno, int)
+ return fspath, lineno
+
+#
+# helper functions
+#
+
+def findsource(obj):
+ try:
+ sourcelines, lineno = py.std.inspect.findsource(obj)
+ except py.builtin._sysex:
+ raise
+ except:
+ return None, -1
+ source = Source()
+ source.lines = [line.rstrip() for line in sourcelines]
+ return source, lineno
+
+
+def getsource(obj, **kwargs):
+ import _pytest._code
+ obj = _pytest._code.getrawcode(obj)
+ try:
+ strsrc = inspect.getsource(obj)
+ except IndentationError:
+ strsrc = "\"Buggy python version consider upgrading, cannot get source\""
+ assert isinstance(strsrc, str)
+ return Source(strsrc, **kwargs)
+
+
+def deindent(lines, offset=None):
+ if offset is None:
+ for line in lines:
+ line = line.expandtabs()
+ s = line.lstrip()
+ if s:
+ offset = len(line)-len(s)
+ break
+ else:
+ offset = 0
+ if offset == 0:
+ return list(lines)
+ newlines = []
+
+ def readline_generator(lines):
+ for line in lines:
+ yield line + '\n'
+ while True:
+ yield ''
+
+ it = readline_generator(lines)
+
+ try:
+ for _, _, (sline, _), (eline, _), _ in tokenize.generate_tokens(lambda: next(it)):
+ if sline > len(lines):
+ break # End of input reached
+ if sline > len(newlines):
+ line = lines[sline - 1].expandtabs()
+ if line.lstrip() and line[:offset].isspace():
+ line = line[offset:] # Deindent
+ newlines.append(line)
+
+ for i in range(sline, eline):
+ # Don't deindent continuing lines of
+ # multiline tokens (i.e. multiline strings)
+ newlines.append(lines[i])
+ except (IndentationError, tokenize.TokenError):
+ pass
+ # Add any lines we didn't see. E.g. if an exception was raised.
+ newlines.extend(lines[len(newlines):])
+ return newlines
+
+
+def get_statement_startend2(lineno, node):
+ import ast
+ # flatten all statements and except handlers into one lineno-list
+ # AST's line numbers start indexing at 1
+ l = []
+ for x in ast.walk(node):
+ if isinstance(x, _ast.stmt) or isinstance(x, _ast.ExceptHandler):
+ l.append(x.lineno - 1)
+ for name in "finalbody", "orelse":
+ val = getattr(x, name, None)
+ if val:
+ # treat the finally/orelse part as its own statement
+ l.append(val[0].lineno - 1 - 1)
+ l.sort()
+ insert_index = bisect_right(l, lineno)
+ start = l[insert_index - 1]
+ if insert_index >= len(l):
+ end = None
+ else:
+ end = l[insert_index]
+ return start, end
+
+
+def getstatementrange_ast(lineno, source, assertion=False, astnode=None):
+ if astnode is None:
+ content = str(source)
+ if sys.version_info < (2,7):
+ content += "\n"
+ try:
+ astnode = compile(content, "source", "exec", 1024) # 1024 for AST
+ except ValueError:
+ start, end = getstatementrange_old(lineno, source, assertion)
+ return None, start, end
+ start, end = get_statement_startend2(lineno, astnode)
+ # we need to correct the end:
+ # - ast-parsing strips comments
+ # - there might be empty lines
+ # - we might have lesser indented code blocks at the end
+ if end is None:
+ end = len(source.lines)
+
+ if end > start + 1:
+ # make sure we don't span differently indented code blocks
+ # by using the BlockFinder helper used which inspect.getsource() uses itself
+ block_finder = inspect.BlockFinder()
+ # if we start with an indented line, put blockfinder to "started" mode
+ block_finder.started = source.lines[start][0].isspace()
+ it = ((x + "\n") for x in source.lines[start:end])
+ try:
+ for tok in tokenize.generate_tokens(lambda: next(it)):
+ block_finder.tokeneater(*tok)
+ except (inspect.EndOfBlock, IndentationError):
+ end = block_finder.last + start
+ except Exception:
+ pass
+
+ # the end might still point to a comment or empty line, correct it
+ while end:
+ line = source.lines[end - 1].lstrip()
+ if line.startswith("#") or not line:
+ end -= 1
+ else:
+ break
+ return astnode, start, end
+
+
+def getstatementrange_old(lineno, source, assertion=False):
+ """ return (start, end) tuple which spans the minimal
+ statement region which containing the given lineno.
+ raise an IndexError if no such statementrange can be found.
+ """
+ # XXX this logic is only used on python2.4 and below
+ # 1. find the start of the statement
+ from codeop import compile_command
+ for start in range(lineno, -1, -1):
+ if assertion:
+ line = source.lines[start]
+ # the following lines are not fully tested, change with care
+ if 'super' in line and 'self' in line and '__init__' in line:
+ raise IndexError("likely a subclass")
+ if "assert" not in line and "raise" not in line:
+ continue
+ trylines = source.lines[start:lineno+1]
+ # quick hack to prepare parsing an indented line with
+ # compile_command() (which errors on "return" outside defs)
+ trylines.insert(0, 'def xxx():')
+ trysource = '\n '.join(trylines)
+ # ^ space here
+ try:
+ compile_command(trysource)
+ except (SyntaxError, OverflowError, ValueError):
+ continue
+
+ # 2. find the end of the statement
+ for end in range(lineno+1, len(source)+1):
+ trysource = source[start:end]
+ if trysource.isparseable():
+ return start, end
+ raise SyntaxError("no valid source range around line %d " % (lineno,))
+
+
diff --git a/lib/spack/external/_pytest/_pluggy.py b/lib/spack/external/_pytest/_pluggy.py
new file mode 100644
index 0000000000..87d32cf8dd
--- /dev/null
+++ b/lib/spack/external/_pytest/_pluggy.py
@@ -0,0 +1,11 @@
+"""
+imports symbols from vendored "pluggy" if available, otherwise
+falls back to importing "pluggy" from the default namespace.
+"""
+
+try:
+ from _pytest.vendored_packages.pluggy import * # noqa
+ from _pytest.vendored_packages.pluggy import __version__ # noqa
+except ImportError:
+ from pluggy import * # noqa
+ from pluggy import __version__ # noqa
diff --git a/lib/spack/external/_pytest/assertion/__init__.py b/lib/spack/external/_pytest/assertion/__init__.py
new file mode 100644
index 0000000000..3f14a7ae76
--- /dev/null
+++ b/lib/spack/external/_pytest/assertion/__init__.py
@@ -0,0 +1,164 @@
+"""
+support for presenting detailed information in failing assertions.
+"""
+import py
+import os
+import sys
+
+from _pytest.assertion import util
+from _pytest.assertion import rewrite
+
+
+def pytest_addoption(parser):
+ group = parser.getgroup("debugconfig")
+ group.addoption('--assert',
+ action="store",
+ dest="assertmode",
+ choices=("rewrite", "plain",),
+ default="rewrite",
+ metavar="MODE",
+ help="""Control assertion debugging tools. 'plain'
+ performs no assertion debugging. 'rewrite'
+ (the default) rewrites assert statements in
+ test modules on import to provide assert
+ expression information.""")
+
+
+def pytest_namespace():
+ return {'register_assert_rewrite': register_assert_rewrite}
+
+
+def register_assert_rewrite(*names):
+ """Register one or more module names to be rewritten on import.
+
+ This function will make sure that this module or all modules inside
+ the package will get their assert statements rewritten.
+ Thus you should make sure to call this before the module is
+ actually imported, usually in your __init__.py if you are a plugin
+ using a package.
+
+ :raise TypeError: if the given module names are not strings.
+ """
+ for name in names:
+ if not isinstance(name, str):
+ msg = 'expected module names as *args, got {0} instead'
+ raise TypeError(msg.format(repr(names)))
+ for hook in sys.meta_path:
+ if isinstance(hook, rewrite.AssertionRewritingHook):
+ importhook = hook
+ break
+ else:
+ importhook = DummyRewriteHook()
+ importhook.mark_rewrite(*names)
+
+
+class DummyRewriteHook(object):
+ """A no-op import hook for when rewriting is disabled."""
+
+ def mark_rewrite(self, *names):
+ pass
+
+
+class AssertionState:
+ """State for the assertion plugin."""
+
+ def __init__(self, config, mode):
+ self.mode = mode
+ self.trace = config.trace.root.get("assertion")
+ self.hook = None
+
+
+def install_importhook(config):
+ """Try to install the rewrite hook, raise SystemError if it fails."""
+ # Both Jython and CPython 2.6.0 have AST bugs that make the
+ # assertion rewriting hook malfunction.
+ if (sys.platform.startswith('java') or
+ sys.version_info[:3] == (2, 6, 0)):
+ raise SystemError('rewrite not supported')
+
+ config._assertstate = AssertionState(config, 'rewrite')
+ config._assertstate.hook = hook = rewrite.AssertionRewritingHook(config)
+ sys.meta_path.insert(0, hook)
+ config._assertstate.trace('installed rewrite import hook')
+
+ def undo():
+ hook = config._assertstate.hook
+ if hook is not None and hook in sys.meta_path:
+ sys.meta_path.remove(hook)
+
+ config.add_cleanup(undo)
+ return hook
+
+
+def pytest_collection(session):
+ # this hook is only called when test modules are collected
+ # so for example not in the master process of pytest-xdist
+ # (which does not collect test modules)
+ assertstate = getattr(session.config, '_assertstate', None)
+ if assertstate:
+ if assertstate.hook is not None:
+ assertstate.hook.set_session(session)
+
+
+def _running_on_ci():
+ """Check if we're currently running on a CI system."""
+ env_vars = ['CI', 'BUILD_NUMBER']
+ return any(var in os.environ for var in env_vars)
+
+
+def pytest_runtest_setup(item):
+ """Setup the pytest_assertrepr_compare hook
+
+ The newinterpret and rewrite modules will use util._reprcompare if
+ it exists to use custom reporting via the
+ pytest_assertrepr_compare hook. This sets up this custom
+ comparison for the test.
+ """
+ def callbinrepr(op, left, right):
+ """Call the pytest_assertrepr_compare hook and prepare the result
+
+ This uses the first result from the hook and then ensures the
+ following:
+ * Overly verbose explanations are dropped unless -vv was used or
+ running on a CI.
+ * Embedded newlines are escaped to help util.format_explanation()
+ later.
+ * If the rewrite mode is used embedded %-characters are replaced
+ to protect later % formatting.
+
+ The result can be formatted by util.format_explanation() for
+ pretty printing.
+ """
+ hook_result = item.ihook.pytest_assertrepr_compare(
+ config=item.config, op=op, left=left, right=right)
+ for new_expl in hook_result:
+ if new_expl:
+ if (sum(len(p) for p in new_expl[1:]) > 80*8 and
+ item.config.option.verbose < 2 and
+ not _running_on_ci()):
+ show_max = 10
+ truncated_lines = len(new_expl) - show_max
+ new_expl[show_max:] = [py.builtin._totext(
+ 'Detailed information truncated (%d more lines)'
+ ', use "-vv" to show' % truncated_lines)]
+ new_expl = [line.replace("\n", "\\n") for line in new_expl]
+ res = py.builtin._totext("\n~").join(new_expl)
+ if item.config.getvalue("assertmode") == "rewrite":
+ res = res.replace("%", "%%")
+ return res
+ util._reprcompare = callbinrepr
+
+
+def pytest_runtest_teardown(item):
+ util._reprcompare = None
+
+
+def pytest_sessionfinish(session):
+ assertstate = getattr(session.config, '_assertstate', None)
+ if assertstate:
+ if assertstate.hook is not None:
+ assertstate.hook.set_session(None)
+
+
+# Expose this plugin's implementation for the pytest_assertrepr_compare hook
+pytest_assertrepr_compare = util.assertrepr_compare
diff --git a/lib/spack/external/_pytest/assertion/rewrite.py b/lib/spack/external/_pytest/assertion/rewrite.py
new file mode 100644
index 0000000000..abf5b491fe
--- /dev/null
+++ b/lib/spack/external/_pytest/assertion/rewrite.py
@@ -0,0 +1,945 @@
+"""Rewrite assertion AST to produce nice error messages"""
+
+import ast
+import _ast
+import errno
+import itertools
+import imp
+import marshal
+import os
+import re
+import struct
+import sys
+import types
+from fnmatch import fnmatch
+
+import py
+from _pytest.assertion import util
+
+
+# pytest caches rewritten pycs in __pycache__.
+if hasattr(imp, "get_tag"):
+ PYTEST_TAG = imp.get_tag() + "-PYTEST"
+else:
+ if hasattr(sys, "pypy_version_info"):
+ impl = "pypy"
+ elif sys.platform == "java":
+ impl = "jython"
+ else:
+ impl = "cpython"
+ ver = sys.version_info
+ PYTEST_TAG = "%s-%s%s-PYTEST" % (impl, ver[0], ver[1])
+ del ver, impl
+
+PYC_EXT = ".py" + (__debug__ and "c" or "o")
+PYC_TAIL = "." + PYTEST_TAG + PYC_EXT
+
+REWRITE_NEWLINES = sys.version_info[:2] != (2, 7) and sys.version_info < (3, 2)
+ASCII_IS_DEFAULT_ENCODING = sys.version_info[0] < 3
+
+if sys.version_info >= (3,5):
+ ast_Call = ast.Call
+else:
+ ast_Call = lambda a,b,c: ast.Call(a, b, c, None, None)
+
+
+class AssertionRewritingHook(object):
+ """PEP302 Import hook which rewrites asserts."""
+
+ def __init__(self, config):
+ self.config = config
+ self.fnpats = config.getini("python_files")
+ self.session = None
+ self.modules = {}
+ self._rewritten_names = set()
+ self._register_with_pkg_resources()
+ self._must_rewrite = set()
+
+ def set_session(self, session):
+ self.session = session
+
+ def find_module(self, name, path=None):
+ state = self.config._assertstate
+ state.trace("find_module called for: %s" % name)
+ names = name.rsplit(".", 1)
+ lastname = names[-1]
+ pth = None
+ if path is not None:
+ # Starting with Python 3.3, path is a _NamespacePath(), which
+ # causes problems if not converted to list.
+ path = list(path)
+ if len(path) == 1:
+ pth = path[0]
+ if pth is None:
+ try:
+ fd, fn, desc = imp.find_module(lastname, path)
+ except ImportError:
+ return None
+ if fd is not None:
+ fd.close()
+ tp = desc[2]
+ if tp == imp.PY_COMPILED:
+ if hasattr(imp, "source_from_cache"):
+ try:
+ fn = imp.source_from_cache(fn)
+ except ValueError:
+ # Python 3 doesn't like orphaned but still-importable
+ # .pyc files.
+ fn = fn[:-1]
+ else:
+ fn = fn[:-1]
+ elif tp != imp.PY_SOURCE:
+ # Don't know what this is.
+ return None
+ else:
+ fn = os.path.join(pth, name.rpartition(".")[2] + ".py")
+
+ fn_pypath = py.path.local(fn)
+ if not self._should_rewrite(name, fn_pypath, state):
+ return None
+
+ self._rewritten_names.add(name)
+
+ # The requested module looks like a test file, so rewrite it. This is
+ # the most magical part of the process: load the source, rewrite the
+ # asserts, and load the rewritten source. We also cache the rewritten
+ # module code in a special pyc. We must be aware of the possibility of
+ # concurrent pytest processes rewriting and loading pycs. To avoid
+ # tricky race conditions, we maintain the following invariant: The
+ # cached pyc is always a complete, valid pyc. Operations on it must be
+ # atomic. POSIX's atomic rename comes in handy.
+ write = not sys.dont_write_bytecode
+ cache_dir = os.path.join(fn_pypath.dirname, "__pycache__")
+ if write:
+ try:
+ os.mkdir(cache_dir)
+ except OSError:
+ e = sys.exc_info()[1].errno
+ if e == errno.EEXIST:
+ # Either the __pycache__ directory already exists (the
+ # common case) or it's blocked by a non-dir node. In the
+ # latter case, we'll ignore it in _write_pyc.
+ pass
+ elif e in [errno.ENOENT, errno.ENOTDIR]:
+ # One of the path components was not a directory, likely
+ # because we're in a zip file.
+ write = False
+ elif e in [errno.EACCES, errno.EROFS, errno.EPERM]:
+ state.trace("read only directory: %r" % fn_pypath.dirname)
+ write = False
+ else:
+ raise
+ cache_name = fn_pypath.basename[:-3] + PYC_TAIL
+ pyc = os.path.join(cache_dir, cache_name)
+ # Notice that even if we're in a read-only directory, I'm going
+ # to check for a cached pyc. This may not be optimal...
+ co = _read_pyc(fn_pypath, pyc, state.trace)
+ if co is None:
+ state.trace("rewriting %r" % (fn,))
+ source_stat, co = _rewrite_test(self.config, fn_pypath)
+ if co is None:
+ # Probably a SyntaxError in the test.
+ return None
+ if write:
+ _make_rewritten_pyc(state, source_stat, pyc, co)
+ else:
+ state.trace("found cached rewritten pyc for %r" % (fn,))
+ self.modules[name] = co, pyc
+ return self
+
+ def _should_rewrite(self, name, fn_pypath, state):
+ # always rewrite conftest files
+ fn = str(fn_pypath)
+ if fn_pypath.basename == 'conftest.py':
+ state.trace("rewriting conftest file: %r" % (fn,))
+ return True
+
+ if self.session is not None:
+ if self.session.isinitpath(fn):
+ state.trace("matched test file (was specified on cmdline): %r" %
+ (fn,))
+ return True
+
+ # modules not passed explicitly on the command line are only
+ # rewritten if they match the naming convention for test files
+ for pat in self.fnpats:
+ # use fnmatch instead of fn_pypath.fnmatch because the
+ # latter might trigger an import to fnmatch.fnmatch
+ # internally, which would cause this method to be
+ # called recursively
+ if fnmatch(fn_pypath.basename, pat):
+ state.trace("matched test file %r" % (fn,))
+ return True
+
+ for marked in self._must_rewrite:
+ if name.startswith(marked):
+ state.trace("matched marked file %r (from %r)" % (name, marked))
+ return True
+
+ return False
+
+ def mark_rewrite(self, *names):
+ """Mark import names as needing to be re-written.
+
+ The named module or package as well as any nested modules will
+ be re-written on import.
+ """
+ already_imported = set(names).intersection(set(sys.modules))
+ if already_imported:
+ for name in already_imported:
+ if name not in self._rewritten_names:
+ self._warn_already_imported(name)
+ self._must_rewrite.update(names)
+
+ def _warn_already_imported(self, name):
+ self.config.warn(
+ 'P1',
+ 'Module already imported so can not be re-written: %s' % name)
+
+ def load_module(self, name):
+ # If there is an existing module object named 'fullname' in
+ # sys.modules, the loader must use that existing module. (Otherwise,
+ # the reload() builtin will not work correctly.)
+ if name in sys.modules:
+ return sys.modules[name]
+
+ co, pyc = self.modules.pop(name)
+ # I wish I could just call imp.load_compiled here, but __file__ has to
+ # be set properly. In Python 3.2+, this all would be handled correctly
+ # by load_compiled.
+ mod = sys.modules[name] = imp.new_module(name)
+ try:
+ mod.__file__ = co.co_filename
+ # Normally, this attribute is 3.2+.
+ mod.__cached__ = pyc
+ mod.__loader__ = self
+ py.builtin.exec_(co, mod.__dict__)
+ except:
+ del sys.modules[name]
+ raise
+ return sys.modules[name]
+
+
+
+ def is_package(self, name):
+ try:
+ fd, fn, desc = imp.find_module(name)
+ except ImportError:
+ return False
+ if fd is not None:
+ fd.close()
+ tp = desc[2]
+ return tp == imp.PKG_DIRECTORY
+
+ @classmethod
+ def _register_with_pkg_resources(cls):
+ """
+ Ensure package resources can be loaded from this loader. May be called
+ multiple times, as the operation is idempotent.
+ """
+ try:
+ import pkg_resources
+ # access an attribute in case a deferred importer is present
+ pkg_resources.__name__
+ except ImportError:
+ return
+
+ # Since pytest tests are always located in the file system, the
+ # DefaultProvider is appropriate.
+ pkg_resources.register_loader_type(cls, pkg_resources.DefaultProvider)
+
+ def get_data(self, pathname):
+ """Optional PEP302 get_data API.
+ """
+ with open(pathname, 'rb') as f:
+ return f.read()
+
+
+def _write_pyc(state, co, source_stat, pyc):
+ # Technically, we don't have to have the same pyc format as
+ # (C)Python, since these "pycs" should never be seen by builtin
+ # import. However, there's little reason deviate, and I hope
+ # sometime to be able to use imp.load_compiled to load them. (See
+ # the comment in load_module above.)
+ try:
+ fp = open(pyc, "wb")
+ except IOError:
+ err = sys.exc_info()[1].errno
+ state.trace("error writing pyc file at %s: errno=%s" %(pyc, err))
+ # we ignore any failure to write the cache file
+ # there are many reasons, permission-denied, __pycache__ being a
+ # file etc.
+ return False
+ try:
+ fp.write(imp.get_magic())
+ mtime = int(source_stat.mtime)
+ size = source_stat.size & 0xFFFFFFFF
+ fp.write(struct.pack("<ll", mtime, size))
+ marshal.dump(co, fp)
+ finally:
+ fp.close()
+ return True
+
+
+RN = "\r\n".encode("utf-8")
+N = "\n".encode("utf-8")
+
+cookie_re = re.compile(r"^[ \t\f]*#.*coding[:=][ \t]*[-\w.]+")
+BOM_UTF8 = '\xef\xbb\xbf'
+
+def _rewrite_test(config, fn):
+ """Try to read and rewrite *fn* and return the code object."""
+ state = config._assertstate
+ try:
+ stat = fn.stat()
+ source = fn.read("rb")
+ except EnvironmentError:
+ return None, None
+ if ASCII_IS_DEFAULT_ENCODING:
+ # ASCII is the default encoding in Python 2. Without a coding
+ # declaration, Python 2 will complain about any bytes in the file
+ # outside the ASCII range. Sadly, this behavior does not extend to
+ # compile() or ast.parse(), which prefer to interpret the bytes as
+ # latin-1. (At least they properly handle explicit coding cookies.) To
+ # preserve this error behavior, we could force ast.parse() to use ASCII
+ # as the encoding by inserting a coding cookie. Unfortunately, that
+ # messes up line numbers. Thus, we have to check ourselves if anything
+ # is outside the ASCII range in the case no encoding is explicitly
+ # declared. For more context, see issue #269. Yay for Python 3 which
+ # gets this right.
+ end1 = source.find("\n")
+ end2 = source.find("\n", end1 + 1)
+ if (not source.startswith(BOM_UTF8) and
+ cookie_re.match(source[0:end1]) is None and
+ cookie_re.match(source[end1 + 1:end2]) is None):
+ if hasattr(state, "_indecode"):
+ # encodings imported us again, so don't rewrite.
+ return None, None
+ state._indecode = True
+ try:
+ try:
+ source.decode("ascii")
+ except UnicodeDecodeError:
+ # Let it fail in real import.
+ return None, None
+ finally:
+ del state._indecode
+ # On Python versions which are not 2.7 and less than or equal to 3.1, the
+ # parser expects *nix newlines.
+ if REWRITE_NEWLINES:
+ source = source.replace(RN, N) + N
+ try:
+ tree = ast.parse(source)
+ except SyntaxError:
+ # Let this pop up again in the real import.
+ state.trace("failed to parse: %r" % (fn,))
+ return None, None
+ rewrite_asserts(tree, fn, config)
+ try:
+ co = compile(tree, fn.strpath, "exec")
+ except SyntaxError:
+ # It's possible that this error is from some bug in the
+ # assertion rewriting, but I don't know of a fast way to tell.
+ state.trace("failed to compile: %r" % (fn,))
+ return None, None
+ return stat, co
+
+def _make_rewritten_pyc(state, source_stat, pyc, co):
+ """Try to dump rewritten code to *pyc*."""
+ if sys.platform.startswith("win"):
+ # Windows grants exclusive access to open files and doesn't have atomic
+ # rename, so just write into the final file.
+ _write_pyc(state, co, source_stat, pyc)
+ else:
+ # When not on windows, assume rename is atomic. Dump the code object
+ # into a file specific to this process and atomically replace it.
+ proc_pyc = pyc + "." + str(os.getpid())
+ if _write_pyc(state, co, source_stat, proc_pyc):
+ os.rename(proc_pyc, pyc)
+
+def _read_pyc(source, pyc, trace=lambda x: None):
+ """Possibly read a pytest pyc containing rewritten code.
+
+ Return rewritten code if successful or None if not.
+ """
+ try:
+ fp = open(pyc, "rb")
+ except IOError:
+ return None
+ with fp:
+ try:
+ mtime = int(source.mtime())
+ size = source.size()
+ data = fp.read(12)
+ except EnvironmentError as e:
+ trace('_read_pyc(%s): EnvironmentError %s' % (source, e))
+ return None
+ # Check for invalid or out of date pyc file.
+ if (len(data) != 12 or data[:4] != imp.get_magic() or
+ struct.unpack("<ll", data[4:]) != (mtime, size)):
+ trace('_read_pyc(%s): invalid or out of date pyc' % source)
+ return None
+ try:
+ co = marshal.load(fp)
+ except Exception as e:
+ trace('_read_pyc(%s): marshal.load error %s' % (source, e))
+ return None
+ if not isinstance(co, types.CodeType):
+ trace('_read_pyc(%s): not a code object' % source)
+ return None
+ return co
+
+
+def rewrite_asserts(mod, module_path=None, config=None):
+ """Rewrite the assert statements in mod."""
+ AssertionRewriter(module_path, config).run(mod)
+
+
+def _saferepr(obj):
+ """Get a safe repr of an object for assertion error messages.
+
+ The assertion formatting (util.format_explanation()) requires
+ newlines to be escaped since they are a special character for it.
+ Normally assertion.util.format_explanation() does this but for a
+ custom repr it is possible to contain one of the special escape
+ sequences, especially '\n{' and '\n}' are likely to be present in
+ JSON reprs.
+
+ """
+ repr = py.io.saferepr(obj)
+ if py.builtin._istext(repr):
+ t = py.builtin.text
+ else:
+ t = py.builtin.bytes
+ return repr.replace(t("\n"), t("\\n"))
+
+
+from _pytest.assertion.util import format_explanation as _format_explanation # noqa
+
+def _format_assertmsg(obj):
+ """Format the custom assertion message given.
+
+ For strings this simply replaces newlines with '\n~' so that
+ util.format_explanation() will preserve them instead of escaping
+ newlines. For other objects py.io.saferepr() is used first.
+
+ """
+ # reprlib appears to have a bug which means that if a string
+ # contains a newline it gets escaped, however if an object has a
+ # .__repr__() which contains newlines it does not get escaped.
+ # However in either case we want to preserve the newline.
+ if py.builtin._istext(obj) or py.builtin._isbytes(obj):
+ s = obj
+ is_repr = False
+ else:
+ s = py.io.saferepr(obj)
+ is_repr = True
+ if py.builtin._istext(s):
+ t = py.builtin.text
+ else:
+ t = py.builtin.bytes
+ s = s.replace(t("\n"), t("\n~")).replace(t("%"), t("%%"))
+ if is_repr:
+ s = s.replace(t("\\n"), t("\n~"))
+ return s
+
+def _should_repr_global_name(obj):
+ return not hasattr(obj, "__name__") and not py.builtin.callable(obj)
+
+def _format_boolop(explanations, is_or):
+ explanation = "(" + (is_or and " or " or " and ").join(explanations) + ")"
+ if py.builtin._istext(explanation):
+ t = py.builtin.text
+ else:
+ t = py.builtin.bytes
+ return explanation.replace(t('%'), t('%%'))
+
+def _call_reprcompare(ops, results, expls, each_obj):
+ for i, res, expl in zip(range(len(ops)), results, expls):
+ try:
+ done = not res
+ except Exception:
+ done = True
+ if done:
+ break
+ if util._reprcompare is not None:
+ custom = util._reprcompare(ops[i], each_obj[i], each_obj[i + 1])
+ if custom is not None:
+ return custom
+ return expl
+
+
+unary_map = {
+ ast.Not: "not %s",
+ ast.Invert: "~%s",
+ ast.USub: "-%s",
+ ast.UAdd: "+%s"
+}
+
+binop_map = {
+ ast.BitOr: "|",
+ ast.BitXor: "^",
+ ast.BitAnd: "&",
+ ast.LShift: "<<",
+ ast.RShift: ">>",
+ ast.Add: "+",
+ ast.Sub: "-",
+ ast.Mult: "*",
+ ast.Div: "/",
+ ast.FloorDiv: "//",
+ ast.Mod: "%%", # escaped for string formatting
+ ast.Eq: "==",
+ ast.NotEq: "!=",
+ ast.Lt: "<",
+ ast.LtE: "<=",
+ ast.Gt: ">",
+ ast.GtE: ">=",
+ ast.Pow: "**",
+ ast.Is: "is",
+ ast.IsNot: "is not",
+ ast.In: "in",
+ ast.NotIn: "not in"
+}
+# Python 3.5+ compatibility
+try:
+ binop_map[ast.MatMult] = "@"
+except AttributeError:
+ pass
+
+# Python 3.4+ compatibility
+if hasattr(ast, "NameConstant"):
+ _NameConstant = ast.NameConstant
+else:
+ def _NameConstant(c):
+ return ast.Name(str(c), ast.Load())
+
+
+def set_location(node, lineno, col_offset):
+ """Set node location information recursively."""
+ def _fix(node, lineno, col_offset):
+ if "lineno" in node._attributes:
+ node.lineno = lineno
+ if "col_offset" in node._attributes:
+ node.col_offset = col_offset
+ for child in ast.iter_child_nodes(node):
+ _fix(child, lineno, col_offset)
+ _fix(node, lineno, col_offset)
+ return node
+
+
+class AssertionRewriter(ast.NodeVisitor):
+ """Assertion rewriting implementation.
+
+ The main entrypoint is to call .run() with an ast.Module instance,
+ this will then find all the assert statements and re-write them to
+ provide intermediate values and a detailed assertion error. See
+ http://pybites.blogspot.be/2011/07/behind-scenes-of-pytests-new-assertion.html
+ for an overview of how this works.
+
+ The entry point here is .run() which will iterate over all the
+ statements in an ast.Module and for each ast.Assert statement it
+ finds call .visit() with it. Then .visit_Assert() takes over and
+ is responsible for creating new ast statements to replace the
+ original assert statement: it re-writes the test of an assertion
+ to provide intermediate values and replace it with an if statement
+ which raises an assertion error with a detailed explanation in
+ case the expression is false.
+
+ For this .visit_Assert() uses the visitor pattern to visit all the
+ AST nodes of the ast.Assert.test field, each visit call returning
+ an AST node and the corresponding explanation string. During this
+ state is kept in several instance attributes:
+
+ :statements: All the AST statements which will replace the assert
+ statement.
+
+ :variables: This is populated by .variable() with each variable
+ used by the statements so that they can all be set to None at
+ the end of the statements.
+
+ :variable_counter: Counter to create new unique variables needed
+ by statements. Variables are created using .variable() and
+ have the form of "@py_assert0".
+
+ :on_failure: The AST statements which will be executed if the
+ assertion test fails. This is the code which will construct
+ the failure message and raises the AssertionError.
+
+ :explanation_specifiers: A dict filled by .explanation_param()
+ with %-formatting placeholders and their corresponding
+ expressions to use in the building of an assertion message.
+ This is used by .pop_format_context() to build a message.
+
+ :stack: A stack of the explanation_specifiers dicts maintained by
+ .push_format_context() and .pop_format_context() which allows
+ to build another %-formatted string while already building one.
+
+ This state is reset on every new assert statement visited and used
+ by the other visitors.
+
+ """
+
+ def __init__(self, module_path, config):
+ super(AssertionRewriter, self).__init__()
+ self.module_path = module_path
+ self.config = config
+
+ def run(self, mod):
+ """Find all assert statements in *mod* and rewrite them."""
+ if not mod.body:
+ # Nothing to do.
+ return
+ # Insert some special imports at the top of the module but after any
+ # docstrings and __future__ imports.
+ aliases = [ast.alias(py.builtin.builtins.__name__, "@py_builtins"),
+ ast.alias("_pytest.assertion.rewrite", "@pytest_ar")]
+ expect_docstring = True
+ pos = 0
+ lineno = 0
+ for item in mod.body:
+ if (expect_docstring and isinstance(item, ast.Expr) and
+ isinstance(item.value, ast.Str)):
+ doc = item.value.s
+ if "PYTEST_DONT_REWRITE" in doc:
+ # The module has disabled assertion rewriting.
+ return
+ lineno += len(doc) - 1
+ expect_docstring = False
+ elif (not isinstance(item, ast.ImportFrom) or item.level > 0 or
+ item.module != "__future__"):
+ lineno = item.lineno
+ break
+ pos += 1
+ imports = [ast.Import([alias], lineno=lineno, col_offset=0)
+ for alias in aliases]
+ mod.body[pos:pos] = imports
+ # Collect asserts.
+ nodes = [mod]
+ while nodes:
+ node = nodes.pop()
+ for name, field in ast.iter_fields(node):
+ if isinstance(field, list):
+ new = []
+ for i, child in enumerate(field):
+ if isinstance(child, ast.Assert):
+ # Transform assert.
+ new.extend(self.visit(child))
+ else:
+ new.append(child)
+ if isinstance(child, ast.AST):
+ nodes.append(child)
+ setattr(node, name, new)
+ elif (isinstance(field, ast.AST) and
+ # Don't recurse into expressions as they can't contain
+ # asserts.
+ not isinstance(field, ast.expr)):
+ nodes.append(field)
+
+ def variable(self):
+ """Get a new variable."""
+ # Use a character invalid in python identifiers to avoid clashing.
+ name = "@py_assert" + str(next(self.variable_counter))
+ self.variables.append(name)
+ return name
+
+ def assign(self, expr):
+ """Give *expr* a name."""
+ name = self.variable()
+ self.statements.append(ast.Assign([ast.Name(name, ast.Store())], expr))
+ return ast.Name(name, ast.Load())
+
+ def display(self, expr):
+ """Call py.io.saferepr on the expression."""
+ return self.helper("saferepr", expr)
+
+ def helper(self, name, *args):
+ """Call a helper in this module."""
+ py_name = ast.Name("@pytest_ar", ast.Load())
+ attr = ast.Attribute(py_name, "_" + name, ast.Load())
+ return ast_Call(attr, list(args), [])
+
+ def builtin(self, name):
+ """Return the builtin called *name*."""
+ builtin_name = ast.Name("@py_builtins", ast.Load())
+ return ast.Attribute(builtin_name, name, ast.Load())
+
+ def explanation_param(self, expr):
+ """Return a new named %-formatting placeholder for expr.
+
+ This creates a %-formatting placeholder for expr in the
+ current formatting context, e.g. ``%(py0)s``. The placeholder
+ and expr are placed in the current format context so that it
+ can be used on the next call to .pop_format_context().
+
+ """
+ specifier = "py" + str(next(self.variable_counter))
+ self.explanation_specifiers[specifier] = expr
+ return "%(" + specifier + ")s"
+
+ def push_format_context(self):
+ """Create a new formatting context.
+
+ The format context is used for when an explanation wants to
+ have a variable value formatted in the assertion message. In
+ this case the value required can be added using
+ .explanation_param(). Finally .pop_format_context() is used
+ to format a string of %-formatted values as added by
+ .explanation_param().
+
+ """
+ self.explanation_specifiers = {}
+ self.stack.append(self.explanation_specifiers)
+
+ def pop_format_context(self, expl_expr):
+ """Format the %-formatted string with current format context.
+
+ The expl_expr should be an ast.Str instance constructed from
+ the %-placeholders created by .explanation_param(). This will
+ add the required code to format said string to .on_failure and
+ return the ast.Name instance of the formatted string.
+
+ """
+ current = self.stack.pop()
+ if self.stack:
+ self.explanation_specifiers = self.stack[-1]
+ keys = [ast.Str(key) for key in current.keys()]
+ format_dict = ast.Dict(keys, list(current.values()))
+ form = ast.BinOp(expl_expr, ast.Mod(), format_dict)
+ name = "@py_format" + str(next(self.variable_counter))
+ self.on_failure.append(ast.Assign([ast.Name(name, ast.Store())], form))
+ return ast.Name(name, ast.Load())
+
+ def generic_visit(self, node):
+ """Handle expressions we don't have custom code for."""
+ assert isinstance(node, ast.expr)
+ res = self.assign(node)
+ return res, self.explanation_param(self.display(res))
+
+ def visit_Assert(self, assert_):
+ """Return the AST statements to replace the ast.Assert instance.
+
+ This re-writes the test of an assertion to provide
+ intermediate values and replace it with an if statement which
+ raises an assertion error with a detailed explanation in case
+ the expression is false.
+
+ """
+ if isinstance(assert_.test, ast.Tuple) and self.config is not None:
+ fslocation = (self.module_path, assert_.lineno)
+ self.config.warn('R1', 'assertion is always true, perhaps '
+ 'remove parentheses?', fslocation=fslocation)
+ self.statements = []
+ self.variables = []
+ self.variable_counter = itertools.count()
+ self.stack = []
+ self.on_failure = []
+ self.push_format_context()
+ # Rewrite assert into a bunch of statements.
+ top_condition, explanation = self.visit(assert_.test)
+ # Create failure message.
+ body = self.on_failure
+ negation = ast.UnaryOp(ast.Not(), top_condition)
+ self.statements.append(ast.If(negation, body, []))
+ if assert_.msg:
+ assertmsg = self.helper('format_assertmsg', assert_.msg)
+ explanation = "\n>assert " + explanation
+ else:
+ assertmsg = ast.Str("")
+ explanation = "assert " + explanation
+ template = ast.BinOp(assertmsg, ast.Add(), ast.Str(explanation))
+ msg = self.pop_format_context(template)
+ fmt = self.helper("format_explanation", msg)
+ err_name = ast.Name("AssertionError", ast.Load())
+ exc = ast_Call(err_name, [fmt], [])
+ if sys.version_info[0] >= 3:
+ raise_ = ast.Raise(exc, None)
+ else:
+ raise_ = ast.Raise(exc, None, None)
+ body.append(raise_)
+ # Clear temporary variables by setting them to None.
+ if self.variables:
+ variables = [ast.Name(name, ast.Store())
+ for name in self.variables]
+ clear = ast.Assign(variables, _NameConstant(None))
+ self.statements.append(clear)
+ # Fix line numbers.
+ for stmt in self.statements:
+ set_location(stmt, assert_.lineno, assert_.col_offset)
+ return self.statements
+
+ def visit_Name(self, name):
+ # Display the repr of the name if it's a local variable or
+ # _should_repr_global_name() thinks it's acceptable.
+ locs = ast_Call(self.builtin("locals"), [], [])
+ inlocs = ast.Compare(ast.Str(name.id), [ast.In()], [locs])
+ dorepr = self.helper("should_repr_global_name", name)
+ test = ast.BoolOp(ast.Or(), [inlocs, dorepr])
+ expr = ast.IfExp(test, self.display(name), ast.Str(name.id))
+ return name, self.explanation_param(expr)
+
+ def visit_BoolOp(self, boolop):
+ res_var = self.variable()
+ expl_list = self.assign(ast.List([], ast.Load()))
+ app = ast.Attribute(expl_list, "append", ast.Load())
+ is_or = int(isinstance(boolop.op, ast.Or))
+ body = save = self.statements
+ fail_save = self.on_failure
+ levels = len(boolop.values) - 1
+ self.push_format_context()
+ # Process each operand, short-circuting if needed.
+ for i, v in enumerate(boolop.values):
+ if i:
+ fail_inner = []
+ # cond is set in a prior loop iteration below
+ self.on_failure.append(ast.If(cond, fail_inner, [])) # noqa
+ self.on_failure = fail_inner
+ self.push_format_context()
+ res, expl = self.visit(v)
+ body.append(ast.Assign([ast.Name(res_var, ast.Store())], res))
+ expl_format = self.pop_format_context(ast.Str(expl))
+ call = ast_Call(app, [expl_format], [])
+ self.on_failure.append(ast.Expr(call))
+ if i < levels:
+ cond = res
+ if is_or:
+ cond = ast.UnaryOp(ast.Not(), cond)
+ inner = []
+ self.statements.append(ast.If(cond, inner, []))
+ self.statements = body = inner
+ self.statements = save
+ self.on_failure = fail_save
+ expl_template = self.helper("format_boolop", expl_list, ast.Num(is_or))
+ expl = self.pop_format_context(expl_template)
+ return ast.Name(res_var, ast.Load()), self.explanation_param(expl)
+
+ def visit_UnaryOp(self, unary):
+ pattern = unary_map[unary.op.__class__]
+ operand_res, operand_expl = self.visit(unary.operand)
+ res = self.assign(ast.UnaryOp(unary.op, operand_res))
+ return res, pattern % (operand_expl,)
+
+ def visit_BinOp(self, binop):
+ symbol = binop_map[binop.op.__class__]
+ left_expr, left_expl = self.visit(binop.left)
+ right_expr, right_expl = self.visit(binop.right)
+ explanation = "(%s %s %s)" % (left_expl, symbol, right_expl)
+ res = self.assign(ast.BinOp(left_expr, binop.op, right_expr))
+ return res, explanation
+
+ def visit_Call_35(self, call):
+ """
+ visit `ast.Call` nodes on Python3.5 and after
+ """
+ new_func, func_expl = self.visit(call.func)
+ arg_expls = []
+ new_args = []
+ new_kwargs = []
+ for arg in call.args:
+ res, expl = self.visit(arg)
+ arg_expls.append(expl)
+ new_args.append(res)
+ for keyword in call.keywords:
+ res, expl = self.visit(keyword.value)
+ new_kwargs.append(ast.keyword(keyword.arg, res))
+ if keyword.arg:
+ arg_expls.append(keyword.arg + "=" + expl)
+ else: ## **args have `arg` keywords with an .arg of None
+ arg_expls.append("**" + expl)
+
+ expl = "%s(%s)" % (func_expl, ', '.join(arg_expls))
+ new_call = ast.Call(new_func, new_args, new_kwargs)
+ res = self.assign(new_call)
+ res_expl = self.explanation_param(self.display(res))
+ outer_expl = "%s\n{%s = %s\n}" % (res_expl, res_expl, expl)
+ return res, outer_expl
+
+ def visit_Starred(self, starred):
+ # From Python 3.5, a Starred node can appear in a function call
+ res, expl = self.visit(starred.value)
+ return starred, '*' + expl
+
+ def visit_Call_legacy(self, call):
+ """
+ visit `ast.Call nodes on 3.4 and below`
+ """
+ new_func, func_expl = self.visit(call.func)
+ arg_expls = []
+ new_args = []
+ new_kwargs = []
+ new_star = new_kwarg = None
+ for arg in call.args:
+ res, expl = self.visit(arg)
+ new_args.append(res)
+ arg_expls.append(expl)
+ for keyword in call.keywords:
+ res, expl = self.visit(keyword.value)
+ new_kwargs.append(ast.keyword(keyword.arg, res))
+ arg_expls.append(keyword.arg + "=" + expl)
+ if call.starargs:
+ new_star, expl = self.visit(call.starargs)
+ arg_expls.append("*" + expl)
+ if call.kwargs:
+ new_kwarg, expl = self.visit(call.kwargs)
+ arg_expls.append("**" + expl)
+ expl = "%s(%s)" % (func_expl, ', '.join(arg_expls))
+ new_call = ast.Call(new_func, new_args, new_kwargs,
+ new_star, new_kwarg)
+ res = self.assign(new_call)
+ res_expl = self.explanation_param(self.display(res))
+ outer_expl = "%s\n{%s = %s\n}" % (res_expl, res_expl, expl)
+ return res, outer_expl
+
+ # ast.Call signature changed on 3.5,
+ # conditionally change which methods is named
+ # visit_Call depending on Python version
+ if sys.version_info >= (3, 5):
+ visit_Call = visit_Call_35
+ else:
+ visit_Call = visit_Call_legacy
+
+
+ def visit_Attribute(self, attr):
+ if not isinstance(attr.ctx, ast.Load):
+ return self.generic_visit(attr)
+ value, value_expl = self.visit(attr.value)
+ res = self.assign(ast.Attribute(value, attr.attr, ast.Load()))
+ res_expl = self.explanation_param(self.display(res))
+ pat = "%s\n{%s = %s.%s\n}"
+ expl = pat % (res_expl, res_expl, value_expl, attr.attr)
+ return res, expl
+
+ def visit_Compare(self, comp):
+ self.push_format_context()
+ left_res, left_expl = self.visit(comp.left)
+ if isinstance(comp.left, (_ast.Compare, _ast.BoolOp)):
+ left_expl = "({0})".format(left_expl)
+ res_variables = [self.variable() for i in range(len(comp.ops))]
+ load_names = [ast.Name(v, ast.Load()) for v in res_variables]
+ store_names = [ast.Name(v, ast.Store()) for v in res_variables]
+ it = zip(range(len(comp.ops)), comp.ops, comp.comparators)
+ expls = []
+ syms = []
+ results = [left_res]
+ for i, op, next_operand in it:
+ next_res, next_expl = self.visit(next_operand)
+ if isinstance(next_operand, (_ast.Compare, _ast.BoolOp)):
+ next_expl = "({0})".format(next_expl)
+ results.append(next_res)
+ sym = binop_map[op.__class__]
+ syms.append(ast.Str(sym))
+ expl = "%s %s %s" % (left_expl, sym, next_expl)
+ expls.append(ast.Str(expl))
+ res_expr = ast.Compare(left_res, [op], [next_res])
+ self.statements.append(ast.Assign([store_names[i]], res_expr))
+ left_res, left_expl = next_res, next_expl
+ # Use pytest.assertion.util._reprcompare if that's available.
+ expl_call = self.helper("call_reprcompare",
+ ast.Tuple(syms, ast.Load()),
+ ast.Tuple(load_names, ast.Load()),
+ ast.Tuple(expls, ast.Load()),
+ ast.Tuple(results, ast.Load()))
+ if len(comp.ops) > 1:
+ res = ast.BoolOp(ast.And(), load_names)
+ else:
+ res = load_names[0]
+ return res, self.explanation_param(self.pop_format_context(expl_call))
diff --git a/lib/spack/external/_pytest/assertion/util.py b/lib/spack/external/_pytest/assertion/util.py
new file mode 100644
index 0000000000..4a0a4e4310
--- /dev/null
+++ b/lib/spack/external/_pytest/assertion/util.py
@@ -0,0 +1,300 @@
+"""Utilities for assertion debugging"""
+import pprint
+
+import _pytest._code
+import py
+try:
+ from collections import Sequence
+except ImportError:
+ Sequence = list
+
+BuiltinAssertionError = py.builtin.builtins.AssertionError
+u = py.builtin._totext
+
+# The _reprcompare attribute on the util module is used by the new assertion
+# interpretation code and assertion rewriter to detect this plugin was
+# loaded and in turn call the hooks defined here as part of the
+# DebugInterpreter.
+_reprcompare = None
+
+
+# the re-encoding is needed for python2 repr
+# with non-ascii characters (see issue 877 and 1379)
+def ecu(s):
+ try:
+ return u(s, 'utf-8', 'replace')
+ except TypeError:
+ return s
+
+
+def format_explanation(explanation):
+ """This formats an explanation
+
+ Normally all embedded newlines are escaped, however there are
+ three exceptions: \n{, \n} and \n~. The first two are intended
+ cover nested explanations, see function and attribute explanations
+ for examples (.visit_Call(), visit_Attribute()). The last one is
+ for when one explanation needs to span multiple lines, e.g. when
+ displaying diffs.
+ """
+ explanation = ecu(explanation)
+ lines = _split_explanation(explanation)
+ result = _format_lines(lines)
+ return u('\n').join(result)
+
+
+def _split_explanation(explanation):
+ """Return a list of individual lines in the explanation
+
+ This will return a list of lines split on '\n{', '\n}' and '\n~'.
+ Any other newlines will be escaped and appear in the line as the
+ literal '\n' characters.
+ """
+ raw_lines = (explanation or u('')).split('\n')
+ lines = [raw_lines[0]]
+ for l in raw_lines[1:]:
+ if l and l[0] in ['{', '}', '~', '>']:
+ lines.append(l)
+ else:
+ lines[-1] += '\\n' + l
+ return lines
+
+
+def _format_lines(lines):
+ """Format the individual lines
+
+ This will replace the '{', '}' and '~' characters of our mini
+ formatting language with the proper 'where ...', 'and ...' and ' +
+ ...' text, taking care of indentation along the way.
+
+ Return a list of formatted lines.
+ """
+ result = lines[:1]
+ stack = [0]
+ stackcnt = [0]
+ for line in lines[1:]:
+ if line.startswith('{'):
+ if stackcnt[-1]:
+ s = u('and ')
+ else:
+ s = u('where ')
+ stack.append(len(result))
+ stackcnt[-1] += 1
+ stackcnt.append(0)
+ result.append(u(' +') + u(' ')*(len(stack)-1) + s + line[1:])
+ elif line.startswith('}'):
+ stack.pop()
+ stackcnt.pop()
+ result[stack[-1]] += line[1:]
+ else:
+ assert line[0] in ['~', '>']
+ stack[-1] += 1
+ indent = len(stack) if line.startswith('~') else len(stack) - 1
+ result.append(u(' ')*indent + line[1:])
+ assert len(stack) == 1
+ return result
+
+
+# Provide basestring in python3
+try:
+ basestring = basestring
+except NameError:
+ basestring = str
+
+
+def assertrepr_compare(config, op, left, right):
+ """Return specialised explanations for some operators/operands"""
+ width = 80 - 15 - len(op) - 2 # 15 chars indentation, 1 space around op
+ left_repr = py.io.saferepr(left, maxsize=int(width//2))
+ right_repr = py.io.saferepr(right, maxsize=width-len(left_repr))
+
+ summary = u('%s %s %s') % (ecu(left_repr), op, ecu(right_repr))
+
+ issequence = lambda x: (isinstance(x, (list, tuple, Sequence)) and
+ not isinstance(x, basestring))
+ istext = lambda x: isinstance(x, basestring)
+ isdict = lambda x: isinstance(x, dict)
+ isset = lambda x: isinstance(x, (set, frozenset))
+
+ def isiterable(obj):
+ try:
+ iter(obj)
+ return not istext(obj)
+ except TypeError:
+ return False
+
+ verbose = config.getoption('verbose')
+ explanation = None
+ try:
+ if op == '==':
+ if istext(left) and istext(right):
+ explanation = _diff_text(left, right, verbose)
+ else:
+ if issequence(left) and issequence(right):
+ explanation = _compare_eq_sequence(left, right, verbose)
+ elif isset(left) and isset(right):
+ explanation = _compare_eq_set(left, right, verbose)
+ elif isdict(left) and isdict(right):
+ explanation = _compare_eq_dict(left, right, verbose)
+ if isiterable(left) and isiterable(right):
+ expl = _compare_eq_iterable(left, right, verbose)
+ if explanation is not None:
+ explanation.extend(expl)
+ else:
+ explanation = expl
+ elif op == 'not in':
+ if istext(left) and istext(right):
+ explanation = _notin_text(left, right, verbose)
+ except Exception:
+ explanation = [
+ u('(pytest_assertion plugin: representation of details failed. '
+ 'Probably an object has a faulty __repr__.)'),
+ u(_pytest._code.ExceptionInfo())]
+
+ if not explanation:
+ return None
+
+ return [summary] + explanation
+
+
+def _diff_text(left, right, verbose=False):
+ """Return the explanation for the diff between text or bytes
+
+ Unless --verbose is used this will skip leading and trailing
+ characters which are identical to keep the diff minimal.
+
+ If the input are bytes they will be safely converted to text.
+ """
+ from difflib import ndiff
+ explanation = []
+ if isinstance(left, py.builtin.bytes):
+ left = u(repr(left)[1:-1]).replace(r'\n', '\n')
+ if isinstance(right, py.builtin.bytes):
+ right = u(repr(right)[1:-1]).replace(r'\n', '\n')
+ if not verbose:
+ i = 0 # just in case left or right has zero length
+ for i in range(min(len(left), len(right))):
+ if left[i] != right[i]:
+ break
+ if i > 42:
+ i -= 10 # Provide some context
+ explanation = [u('Skipping %s identical leading '
+ 'characters in diff, use -v to show') % i]
+ left = left[i:]
+ right = right[i:]
+ if len(left) == len(right):
+ for i in range(len(left)):
+ if left[-i] != right[-i]:
+ break
+ if i > 42:
+ i -= 10 # Provide some context
+ explanation += [u('Skipping %s identical trailing '
+ 'characters in diff, use -v to show') % i]
+ left = left[:-i]
+ right = right[:-i]
+ keepends = True
+ explanation += [line.strip('\n')
+ for line in ndiff(left.splitlines(keepends),
+ right.splitlines(keepends))]
+ return explanation
+
+
+def _compare_eq_iterable(left, right, verbose=False):
+ if not verbose:
+ return [u('Use -v to get the full diff')]
+ # dynamic import to speedup pytest
+ import difflib
+
+ try:
+ left_formatting = pprint.pformat(left).splitlines()
+ right_formatting = pprint.pformat(right).splitlines()
+ explanation = [u('Full diff:')]
+ except Exception:
+ # hack: PrettyPrinter.pformat() in python 2 fails when formatting items that can't be sorted(), ie, calling
+ # sorted() on a list would raise. See issue #718.
+ # As a workaround, the full diff is generated by using the repr() string of each item of each container.
+ left_formatting = sorted(repr(x) for x in left)
+ right_formatting = sorted(repr(x) for x in right)
+ explanation = [u('Full diff (fallback to calling repr on each item):')]
+ explanation.extend(line.strip() for line in difflib.ndiff(left_formatting, right_formatting))
+ return explanation
+
+
+def _compare_eq_sequence(left, right, verbose=False):
+ explanation = []
+ for i in range(min(len(left), len(right))):
+ if left[i] != right[i]:
+ explanation += [u('At index %s diff: %r != %r')
+ % (i, left[i], right[i])]
+ break
+ if len(left) > len(right):
+ explanation += [u('Left contains more items, first extra item: %s')
+ % py.io.saferepr(left[len(right)],)]
+ elif len(left) < len(right):
+ explanation += [
+ u('Right contains more items, first extra item: %s') %
+ py.io.saferepr(right[len(left)],)]
+ return explanation
+
+
+def _compare_eq_set(left, right, verbose=False):
+ explanation = []
+ diff_left = left - right
+ diff_right = right - left
+ if diff_left:
+ explanation.append(u('Extra items in the left set:'))
+ for item in diff_left:
+ explanation.append(py.io.saferepr(item))
+ if diff_right:
+ explanation.append(u('Extra items in the right set:'))
+ for item in diff_right:
+ explanation.append(py.io.saferepr(item))
+ return explanation
+
+
+def _compare_eq_dict(left, right, verbose=False):
+ explanation = []
+ common = set(left).intersection(set(right))
+ same = dict((k, left[k]) for k in common if left[k] == right[k])
+ if same and not verbose:
+ explanation += [u('Omitting %s identical items, use -v to show') %
+ len(same)]
+ elif same:
+ explanation += [u('Common items:')]
+ explanation += pprint.pformat(same).splitlines()
+ diff = set(k for k in common if left[k] != right[k])
+ if diff:
+ explanation += [u('Differing items:')]
+ for k in diff:
+ explanation += [py.io.saferepr({k: left[k]}) + ' != ' +
+ py.io.saferepr({k: right[k]})]
+ extra_left = set(left) - set(right)
+ if extra_left:
+ explanation.append(u('Left contains more items:'))
+ explanation.extend(pprint.pformat(
+ dict((k, left[k]) for k in extra_left)).splitlines())
+ extra_right = set(right) - set(left)
+ if extra_right:
+ explanation.append(u('Right contains more items:'))
+ explanation.extend(pprint.pformat(
+ dict((k, right[k]) for k in extra_right)).splitlines())
+ return explanation
+
+
+def _notin_text(term, text, verbose=False):
+ index = text.find(term)
+ head = text[:index]
+ tail = text[index+len(term):]
+ correct_text = head + tail
+ diff = _diff_text(correct_text, text, verbose)
+ newdiff = [u('%s is contained here:') % py.io.saferepr(term, maxsize=42)]
+ for line in diff:
+ if line.startswith(u('Skipping')):
+ continue
+ if line.startswith(u('- ')):
+ continue
+ if line.startswith(u('+ ')):
+ newdiff.append(u(' ') + line[2:])
+ else:
+ newdiff.append(line)
+ return newdiff
diff --git a/lib/spack/external/_pytest/cacheprovider.py b/lib/spack/external/_pytest/cacheprovider.py
new file mode 100644
index 0000000000..0657001f2d
--- /dev/null
+++ b/lib/spack/external/_pytest/cacheprovider.py
@@ -0,0 +1,245 @@
+"""
+merged implementation of the cache provider
+
+the name cache was not choosen to ensure pluggy automatically
+ignores the external pytest-cache
+"""
+
+import py
+import pytest
+import json
+from os.path import sep as _sep, altsep as _altsep
+
+
+class Cache(object):
+ def __init__(self, config):
+ self.config = config
+ self._cachedir = config.rootdir.join(".cache")
+ self.trace = config.trace.root.get("cache")
+ if config.getvalue("cacheclear"):
+ self.trace("clearing cachedir")
+ if self._cachedir.check():
+ self._cachedir.remove()
+ self._cachedir.mkdir()
+
+ def makedir(self, name):
+ """ return a directory path object with the given name. If the
+ directory does not yet exist, it will be created. You can use it
+ to manage files likes e. g. store/retrieve database
+ dumps across test sessions.
+
+ :param name: must be a string not containing a ``/`` separator.
+ Make sure the name contains your plugin or application
+ identifiers to prevent clashes with other cache users.
+ """
+ if _sep in name or _altsep is not None and _altsep in name:
+ raise ValueError("name is not allowed to contain path separators")
+ return self._cachedir.ensure_dir("d", name)
+
+ def _getvaluepath(self, key):
+ return self._cachedir.join('v', *key.split('/'))
+
+ def get(self, key, default):
+ """ return cached value for the given key. If no value
+ was yet cached or the value cannot be read, the specified
+ default is returned.
+
+ :param key: must be a ``/`` separated value. Usually the first
+ name is the name of your plugin or your application.
+ :param default: must be provided in case of a cache-miss or
+ invalid cache values.
+
+ """
+ path = self._getvaluepath(key)
+ if path.check():
+ try:
+ with path.open("r") as f:
+ return json.load(f)
+ except ValueError:
+ self.trace("cache-invalid at %s" % (path,))
+ return default
+
+ def set(self, key, value):
+ """ save value for the given key.
+
+ :param key: must be a ``/`` separated value. Usually the first
+ name is the name of your plugin or your application.
+ :param value: must be of any combination of basic
+ python types, including nested types
+ like e. g. lists of dictionaries.
+ """
+ path = self._getvaluepath(key)
+ try:
+ path.dirpath().ensure_dir()
+ except (py.error.EEXIST, py.error.EACCES):
+ self.config.warn(
+ code='I9', message='could not create cache path %s' % (path,)
+ )
+ return
+ try:
+ f = path.open('w')
+ except py.error.ENOTDIR:
+ self.config.warn(
+ code='I9', message='cache could not write path %s' % (path,))
+ else:
+ with f:
+ self.trace("cache-write %s: %r" % (key, value,))
+ json.dump(value, f, indent=2, sort_keys=True)
+
+
+class LFPlugin:
+ """ Plugin which implements the --lf (run last-failing) option """
+ def __init__(self, config):
+ self.config = config
+ active_keys = 'lf', 'failedfirst'
+ self.active = any(config.getvalue(key) for key in active_keys)
+ if self.active:
+ self.lastfailed = config.cache.get("cache/lastfailed", {})
+ else:
+ self.lastfailed = {}
+
+ def pytest_report_header(self):
+ if self.active:
+ if not self.lastfailed:
+ mode = "run all (no recorded failures)"
+ else:
+ mode = "rerun last %d failures%s" % (
+ len(self.lastfailed),
+ " first" if self.config.getvalue("failedfirst") else "")
+ return "run-last-failure: %s" % mode
+
+ def pytest_runtest_logreport(self, report):
+ if report.failed and "xfail" not in report.keywords:
+ self.lastfailed[report.nodeid] = True
+ elif not report.failed:
+ if report.when == "call":
+ self.lastfailed.pop(report.nodeid, None)
+
+ def pytest_collectreport(self, report):
+ passed = report.outcome in ('passed', 'skipped')
+ if passed:
+ if report.nodeid in self.lastfailed:
+ self.lastfailed.pop(report.nodeid)
+ self.lastfailed.update(
+ (item.nodeid, True)
+ for item in report.result)
+ else:
+ self.lastfailed[report.nodeid] = True
+
+ def pytest_collection_modifyitems(self, session, config, items):
+ if self.active and self.lastfailed:
+ previously_failed = []
+ previously_passed = []
+ for item in items:
+ if item.nodeid in self.lastfailed:
+ previously_failed.append(item)
+ else:
+ previously_passed.append(item)
+ if not previously_failed and previously_passed:
+ # running a subset of all tests with recorded failures outside
+ # of the set of tests currently executing
+ pass
+ elif self.config.getvalue("failedfirst"):
+ items[:] = previously_failed + previously_passed
+ else:
+ items[:] = previously_failed
+ config.hook.pytest_deselected(items=previously_passed)
+
+ def pytest_sessionfinish(self, session):
+ config = self.config
+ if config.getvalue("cacheshow") or hasattr(config, "slaveinput"):
+ return
+ prev_failed = config.cache.get("cache/lastfailed", None) is not None
+ if (session.testscollected and prev_failed) or self.lastfailed:
+ config.cache.set("cache/lastfailed", self.lastfailed)
+
+
+def pytest_addoption(parser):
+ group = parser.getgroup("general")
+ group.addoption(
+ '--lf', '--last-failed', action='store_true', dest="lf",
+ help="rerun only the tests that failed "
+ "at the last run (or all if none failed)")
+ group.addoption(
+ '--ff', '--failed-first', action='store_true', dest="failedfirst",
+ help="run all tests but run the last failures first. "
+ "This may re-order tests and thus lead to "
+ "repeated fixture setup/teardown")
+ group.addoption(
+ '--cache-show', action='store_true', dest="cacheshow",
+ help="show cache contents, don't perform collection or tests")
+ group.addoption(
+ '--cache-clear', action='store_true', dest="cacheclear",
+ help="remove all cache contents at start of test run.")
+
+
+def pytest_cmdline_main(config):
+ if config.option.cacheshow:
+ from _pytest.main import wrap_session
+ return wrap_session(config, cacheshow)
+
+
+
+@pytest.hookimpl(tryfirst=True)
+def pytest_configure(config):
+ config.cache = Cache(config)
+ config.pluginmanager.register(LFPlugin(config), "lfplugin")
+
+
+@pytest.fixture
+def cache(request):
+ """
+ Return a cache object that can persist state between testing sessions.
+
+ cache.get(key, default)
+ cache.set(key, value)
+
+ Keys must be a ``/`` separated value, where the first part is usually the
+ name of your plugin or application to avoid clashes with other cache users.
+
+ Values can be any object handled by the json stdlib module.
+ """
+ return request.config.cache
+
+
+def pytest_report_header(config):
+ if config.option.verbose:
+ relpath = py.path.local().bestrelpath(config.cache._cachedir)
+ return "cachedir: %s" % relpath
+
+
+def cacheshow(config, session):
+ from pprint import pprint
+ tw = py.io.TerminalWriter()
+ tw.line("cachedir: " + str(config.cache._cachedir))
+ if not config.cache._cachedir.check():
+ tw.line("cache is empty")
+ return 0
+ dummy = object()
+ basedir = config.cache._cachedir
+ vdir = basedir.join("v")
+ tw.sep("-", "cache values")
+ for valpath in vdir.visit(lambda x: x.isfile()):
+ key = valpath.relto(vdir).replace(valpath.sep, "/")
+ val = config.cache.get(key, dummy)
+ if val is dummy:
+ tw.line("%s contains unreadable content, "
+ "will be ignored" % key)
+ else:
+ tw.line("%s contains:" % key)
+ stream = py.io.TextIO()
+ pprint(val, stream=stream)
+ for line in stream.getvalue().splitlines():
+ tw.line(" " + line)
+
+ ddir = basedir.join("d")
+ if ddir.isdir() and ddir.listdir():
+ tw.sep("-", "cache directories")
+ for p in basedir.join("d").visit():
+ #if p.check(dir=1):
+ # print("%s/" % p.relto(basedir))
+ if p.isfile():
+ key = p.relto(basedir)
+ tw.line("%s is a file of length %d" % (
+ key, p.size()))
+ return 0
diff --git a/lib/spack/external/_pytest/capture.py b/lib/spack/external/_pytest/capture.py
new file mode 100644
index 0000000000..eea81ca187
--- /dev/null
+++ b/lib/spack/external/_pytest/capture.py
@@ -0,0 +1,491 @@
+"""
+per-test stdout/stderr capturing mechanism.
+
+"""
+from __future__ import with_statement
+
+import contextlib
+import sys
+import os
+from tempfile import TemporaryFile
+
+import py
+import pytest
+
+from py.io import TextIO
+unicode = py.builtin.text
+
+patchsysdict = {0: 'stdin', 1: 'stdout', 2: 'stderr'}
+
+
+def pytest_addoption(parser):
+ group = parser.getgroup("general")
+ group._addoption(
+ '--capture', action="store",
+ default="fd" if hasattr(os, "dup") else "sys",
+ metavar="method", choices=['fd', 'sys', 'no'],
+ help="per-test capturing method: one of fd|sys|no.")
+ group._addoption(
+ '-s', action="store_const", const="no", dest="capture",
+ help="shortcut for --capture=no.")
+
+
+@pytest.hookimpl(hookwrapper=True)
+def pytest_load_initial_conftests(early_config, parser, args):
+ _readline_workaround()
+ ns = early_config.known_args_namespace
+ pluginmanager = early_config.pluginmanager
+ capman = CaptureManager(ns.capture)
+ pluginmanager.register(capman, "capturemanager")
+
+ # make sure that capturemanager is properly reset at final shutdown
+ early_config.add_cleanup(capman.reset_capturings)
+
+ # make sure logging does not raise exceptions at the end
+ def silence_logging_at_shutdown():
+ if "logging" in sys.modules:
+ sys.modules["logging"].raiseExceptions = False
+ early_config.add_cleanup(silence_logging_at_shutdown)
+
+ # finally trigger conftest loading but while capturing (issue93)
+ capman.init_capturings()
+ outcome = yield
+ out, err = capman.suspendcapture()
+ if outcome.excinfo is not None:
+ sys.stdout.write(out)
+ sys.stderr.write(err)
+
+
+class CaptureManager:
+ def __init__(self, method):
+ self._method = method
+
+ def _getcapture(self, method):
+ if method == "fd":
+ return MultiCapture(out=True, err=True, Capture=FDCapture)
+ elif method == "sys":
+ return MultiCapture(out=True, err=True, Capture=SysCapture)
+ elif method == "no":
+ return MultiCapture(out=False, err=False, in_=False)
+ else:
+ raise ValueError("unknown capturing method: %r" % method)
+
+ def init_capturings(self):
+ assert not hasattr(self, "_capturing")
+ self._capturing = self._getcapture(self._method)
+ self._capturing.start_capturing()
+
+ def reset_capturings(self):
+ cap = self.__dict__.pop("_capturing", None)
+ if cap is not None:
+ cap.pop_outerr_to_orig()
+ cap.stop_capturing()
+
+ def resumecapture(self):
+ self._capturing.resume_capturing()
+
+ def suspendcapture(self, in_=False):
+ self.deactivate_funcargs()
+ cap = getattr(self, "_capturing", None)
+ if cap is not None:
+ try:
+ outerr = cap.readouterr()
+ finally:
+ cap.suspend_capturing(in_=in_)
+ return outerr
+
+ def activate_funcargs(self, pyfuncitem):
+ capfuncarg = pyfuncitem.__dict__.pop("_capfuncarg", None)
+ if capfuncarg is not None:
+ capfuncarg._start()
+ self._capfuncarg = capfuncarg
+
+ def deactivate_funcargs(self):
+ capfuncarg = self.__dict__.pop("_capfuncarg", None)
+ if capfuncarg is not None:
+ capfuncarg.close()
+
+ @pytest.hookimpl(hookwrapper=True)
+ def pytest_make_collect_report(self, collector):
+ if isinstance(collector, pytest.File):
+ self.resumecapture()
+ outcome = yield
+ out, err = self.suspendcapture()
+ rep = outcome.get_result()
+ if out:
+ rep.sections.append(("Captured stdout", out))
+ if err:
+ rep.sections.append(("Captured stderr", err))
+ else:
+ yield
+
+ @pytest.hookimpl(hookwrapper=True)
+ def pytest_runtest_setup(self, item):
+ self.resumecapture()
+ yield
+ self.suspendcapture_item(item, "setup")
+
+ @pytest.hookimpl(hookwrapper=True)
+ def pytest_runtest_call(self, item):
+ self.resumecapture()
+ self.activate_funcargs(item)
+ yield
+ #self.deactivate_funcargs() called from suspendcapture()
+ self.suspendcapture_item(item, "call")
+
+ @pytest.hookimpl(hookwrapper=True)
+ def pytest_runtest_teardown(self, item):
+ self.resumecapture()
+ yield
+ self.suspendcapture_item(item, "teardown")
+
+ @pytest.hookimpl(tryfirst=True)
+ def pytest_keyboard_interrupt(self, excinfo):
+ self.reset_capturings()
+
+ @pytest.hookimpl(tryfirst=True)
+ def pytest_internalerror(self, excinfo):
+ self.reset_capturings()
+
+ def suspendcapture_item(self, item, when, in_=False):
+ out, err = self.suspendcapture(in_=in_)
+ item.add_report_section(when, "stdout", out)
+ item.add_report_section(when, "stderr", err)
+
+
+error_capsysfderror = "cannot use capsys and capfd at the same time"
+
+
+@pytest.fixture
+def capsys(request):
+ """Enable capturing of writes to sys.stdout/sys.stderr and make
+ captured output available via ``capsys.readouterr()`` method calls
+ which return a ``(out, err)`` tuple.
+ """
+ if "capfd" in request.fixturenames:
+ raise request.raiseerror(error_capsysfderror)
+ request.node._capfuncarg = c = CaptureFixture(SysCapture, request)
+ return c
+
+@pytest.fixture
+def capfd(request):
+ """Enable capturing of writes to file descriptors 1 and 2 and make
+ captured output available via ``capfd.readouterr()`` method calls
+ which return a ``(out, err)`` tuple.
+ """
+ if "capsys" in request.fixturenames:
+ request.raiseerror(error_capsysfderror)
+ if not hasattr(os, 'dup'):
+ pytest.skip("capfd funcarg needs os.dup")
+ request.node._capfuncarg = c = CaptureFixture(FDCapture, request)
+ return c
+
+
+class CaptureFixture:
+ def __init__(self, captureclass, request):
+ self.captureclass = captureclass
+ self.request = request
+
+ def _start(self):
+ self._capture = MultiCapture(out=True, err=True, in_=False,
+ Capture=self.captureclass)
+ self._capture.start_capturing()
+
+ def close(self):
+ cap = self.__dict__.pop("_capture", None)
+ if cap is not None:
+ self._outerr = cap.pop_outerr_to_orig()
+ cap.stop_capturing()
+
+ def readouterr(self):
+ try:
+ return self._capture.readouterr()
+ except AttributeError:
+ return self._outerr
+
+ @contextlib.contextmanager
+ def disabled(self):
+ capmanager = self.request.config.pluginmanager.getplugin('capturemanager')
+ capmanager.suspendcapture_item(self.request.node, "call", in_=True)
+ try:
+ yield
+ finally:
+ capmanager.resumecapture()
+
+
+def safe_text_dupfile(f, mode, default_encoding="UTF8"):
+ """ return a open text file object that's a duplicate of f on the
+ FD-level if possible.
+ """
+ encoding = getattr(f, "encoding", None)
+ try:
+ fd = f.fileno()
+ except Exception:
+ if "b" not in getattr(f, "mode", "") and hasattr(f, "encoding"):
+ # we seem to have a text stream, let's just use it
+ return f
+ else:
+ newfd = os.dup(fd)
+ if "b" not in mode:
+ mode += "b"
+ f = os.fdopen(newfd, mode, 0) # no buffering
+ return EncodedFile(f, encoding or default_encoding)
+
+
+class EncodedFile(object):
+ errors = "strict" # possibly needed by py3 code (issue555)
+ def __init__(self, buffer, encoding):
+ self.buffer = buffer
+ self.encoding = encoding
+
+ def write(self, obj):
+ if isinstance(obj, unicode):
+ obj = obj.encode(self.encoding, "replace")
+ self.buffer.write(obj)
+
+ def writelines(self, linelist):
+ data = ''.join(linelist)
+ self.write(data)
+
+ def __getattr__(self, name):
+ return getattr(object.__getattribute__(self, "buffer"), name)
+
+
+class MultiCapture(object):
+ out = err = in_ = None
+
+ def __init__(self, out=True, err=True, in_=True, Capture=None):
+ if in_:
+ self.in_ = Capture(0)
+ if out:
+ self.out = Capture(1)
+ if err:
+ self.err = Capture(2)
+
+ def start_capturing(self):
+ if self.in_:
+ self.in_.start()
+ if self.out:
+ self.out.start()
+ if self.err:
+ self.err.start()
+
+ def pop_outerr_to_orig(self):
+ """ pop current snapshot out/err capture and flush to orig streams. """
+ out, err = self.readouterr()
+ if out:
+ self.out.writeorg(out)
+ if err:
+ self.err.writeorg(err)
+ return out, err
+
+ def suspend_capturing(self, in_=False):
+ if self.out:
+ self.out.suspend()
+ if self.err:
+ self.err.suspend()
+ if in_ and self.in_:
+ self.in_.suspend()
+ self._in_suspended = True
+
+ def resume_capturing(self):
+ if self.out:
+ self.out.resume()
+ if self.err:
+ self.err.resume()
+ if hasattr(self, "_in_suspended"):
+ self.in_.resume()
+ del self._in_suspended
+
+ def stop_capturing(self):
+ """ stop capturing and reset capturing streams """
+ if hasattr(self, '_reset'):
+ raise ValueError("was already stopped")
+ self._reset = True
+ if self.out:
+ self.out.done()
+ if self.err:
+ self.err.done()
+ if self.in_:
+ self.in_.done()
+
+ def readouterr(self):
+ """ return snapshot unicode value of stdout/stderr capturings. """
+ return (self.out.snap() if self.out is not None else "",
+ self.err.snap() if self.err is not None else "")
+
+class NoCapture:
+ __init__ = start = done = suspend = resume = lambda *args: None
+
+class FDCapture:
+ """ Capture IO to/from a given os-level filedescriptor. """
+
+ def __init__(self, targetfd, tmpfile=None):
+ self.targetfd = targetfd
+ try:
+ self.targetfd_save = os.dup(self.targetfd)
+ except OSError:
+ self.start = lambda: None
+ self.done = lambda: None
+ else:
+ if targetfd == 0:
+ assert not tmpfile, "cannot set tmpfile with stdin"
+ tmpfile = open(os.devnull, "r")
+ self.syscapture = SysCapture(targetfd)
+ else:
+ if tmpfile is None:
+ f = TemporaryFile()
+ with f:
+ tmpfile = safe_text_dupfile(f, mode="wb+")
+ if targetfd in patchsysdict:
+ self.syscapture = SysCapture(targetfd, tmpfile)
+ else:
+ self.syscapture = NoCapture()
+ self.tmpfile = tmpfile
+ self.tmpfile_fd = tmpfile.fileno()
+
+ def __repr__(self):
+ return "<FDCapture %s oldfd=%s>" % (self.targetfd, self.targetfd_save)
+
+ def start(self):
+ """ Start capturing on targetfd using memorized tmpfile. """
+ try:
+ os.fstat(self.targetfd_save)
+ except (AttributeError, OSError):
+ raise ValueError("saved filedescriptor not valid anymore")
+ os.dup2(self.tmpfile_fd, self.targetfd)
+ self.syscapture.start()
+
+ def snap(self):
+ f = self.tmpfile
+ f.seek(0)
+ res = f.read()
+ if res:
+ enc = getattr(f, "encoding", None)
+ if enc and isinstance(res, bytes):
+ res = py.builtin._totext(res, enc, "replace")
+ f.truncate(0)
+ f.seek(0)
+ return res
+ return ''
+
+ def done(self):
+ """ stop capturing, restore streams, return original capture file,
+ seeked to position zero. """
+ targetfd_save = self.__dict__.pop("targetfd_save")
+ os.dup2(targetfd_save, self.targetfd)
+ os.close(targetfd_save)
+ self.syscapture.done()
+ self.tmpfile.close()
+
+ def suspend(self):
+ self.syscapture.suspend()
+ os.dup2(self.targetfd_save, self.targetfd)
+
+ def resume(self):
+ self.syscapture.resume()
+ os.dup2(self.tmpfile_fd, self.targetfd)
+
+ def writeorg(self, data):
+ """ write to original file descriptor. """
+ if py.builtin._istext(data):
+ data = data.encode("utf8") # XXX use encoding of original stream
+ os.write(self.targetfd_save, data)
+
+
+class SysCapture:
+ def __init__(self, fd, tmpfile=None):
+ name = patchsysdict[fd]
+ self._old = getattr(sys, name)
+ self.name = name
+ if tmpfile is None:
+ if name == "stdin":
+ tmpfile = DontReadFromInput()
+ else:
+ tmpfile = TextIO()
+ self.tmpfile = tmpfile
+
+ def start(self):
+ setattr(sys, self.name, self.tmpfile)
+
+ def snap(self):
+ f = self.tmpfile
+ res = f.getvalue()
+ f.truncate(0)
+ f.seek(0)
+ return res
+
+ def done(self):
+ setattr(sys, self.name, self._old)
+ del self._old
+ self.tmpfile.close()
+
+ def suspend(self):
+ setattr(sys, self.name, self._old)
+
+ def resume(self):
+ setattr(sys, self.name, self.tmpfile)
+
+ def writeorg(self, data):
+ self._old.write(data)
+ self._old.flush()
+
+
+class DontReadFromInput:
+ """Temporary stub class. Ideally when stdin is accessed, the
+ capturing should be turned off, with possibly all data captured
+ so far sent to the screen. This should be configurable, though,
+ because in automated test runs it is better to crash than
+ hang indefinitely.
+ """
+
+ encoding = None
+
+ def read(self, *args):
+ raise IOError("reading from stdin while output is captured")
+ readline = read
+ readlines = read
+ __iter__ = read
+
+ def fileno(self):
+ raise ValueError("redirected Stdin is pseudofile, has no fileno()")
+
+ def isatty(self):
+ return False
+
+ def close(self):
+ pass
+
+ @property
+ def buffer(self):
+ if sys.version_info >= (3,0):
+ return self
+ else:
+ raise AttributeError('redirected stdin has no attribute buffer')
+
+
+def _readline_workaround():
+ """
+ Ensure readline is imported so that it attaches to the correct stdio
+ handles on Windows.
+
+ Pdb uses readline support where available--when not running from the Python
+ prompt, the readline module is not imported until running the pdb REPL. If
+ running pytest with the --pdb option this means the readline module is not
+ imported until after I/O capture has been started.
+
+ This is a problem for pyreadline, which is often used to implement readline
+ support on Windows, as it does not attach to the correct handles for stdout
+ and/or stdin if they have been redirected by the FDCapture mechanism. This
+ workaround ensures that readline is imported before I/O capture is setup so
+ that it can attach to the actual stdin/out for the console.
+
+ See https://github.com/pytest-dev/pytest/pull/1281
+ """
+
+ if not sys.platform.startswith('win32'):
+ return
+ try:
+ import readline # noqa
+ except ImportError:
+ pass
diff --git a/lib/spack/external/_pytest/compat.py b/lib/spack/external/_pytest/compat.py
new file mode 100644
index 0000000000..51fc3bc5c1
--- /dev/null
+++ b/lib/spack/external/_pytest/compat.py
@@ -0,0 +1,230 @@
+"""
+python version compatibility code
+"""
+import sys
+import inspect
+import types
+import re
+import functools
+
+import py
+
+import _pytest
+
+
+
+try:
+ import enum
+except ImportError: # pragma: no cover
+ # Only available in Python 3.4+ or as a backport
+ enum = None
+
+_PY3 = sys.version_info > (3, 0)
+_PY2 = not _PY3
+
+
+NoneType = type(None)
+NOTSET = object()
+
+if hasattr(inspect, 'signature'):
+ def _format_args(func):
+ return str(inspect.signature(func))
+else:
+ def _format_args(func):
+ return inspect.formatargspec(*inspect.getargspec(func))
+
+isfunction = inspect.isfunction
+isclass = inspect.isclass
+# used to work around a python2 exception info leak
+exc_clear = getattr(sys, 'exc_clear', lambda: None)
+# The type of re.compile objects is not exposed in Python.
+REGEX_TYPE = type(re.compile(''))
+
+
+def is_generator(func):
+ try:
+ return _pytest._code.getrawcode(func).co_flags & 32 # generator function
+ except AttributeError: # builtin functions have no bytecode
+ # assume them to not be generators
+ return False
+
+
+def getlocation(function, curdir):
+ import inspect
+ fn = py.path.local(inspect.getfile(function))
+ lineno = py.builtin._getcode(function).co_firstlineno
+ if fn.relto(curdir):
+ fn = fn.relto(curdir)
+ return "%s:%d" %(fn, lineno+1)
+
+
+def num_mock_patch_args(function):
+ """ return number of arguments used up by mock arguments (if any) """
+ patchings = getattr(function, "patchings", None)
+ if not patchings:
+ return 0
+ mock = sys.modules.get("mock", sys.modules.get("unittest.mock", None))
+ if mock is not None:
+ return len([p for p in patchings
+ if not p.attribute_name and p.new is mock.DEFAULT])
+ return len(patchings)
+
+
+def getfuncargnames(function, startindex=None):
+ # XXX merge with main.py's varnames
+ #assert not isclass(function)
+ realfunction = function
+ while hasattr(realfunction, "__wrapped__"):
+ realfunction = realfunction.__wrapped__
+ if startindex is None:
+ startindex = inspect.ismethod(function) and 1 or 0
+ if realfunction != function:
+ startindex += num_mock_patch_args(function)
+ function = realfunction
+ if isinstance(function, functools.partial):
+ argnames = inspect.getargs(_pytest._code.getrawcode(function.func))[0]
+ partial = function
+ argnames = argnames[len(partial.args):]
+ if partial.keywords:
+ for kw in partial.keywords:
+ argnames.remove(kw)
+ else:
+ argnames = inspect.getargs(_pytest._code.getrawcode(function))[0]
+ defaults = getattr(function, 'func_defaults',
+ getattr(function, '__defaults__', None)) or ()
+ numdefaults = len(defaults)
+ if numdefaults:
+ return tuple(argnames[startindex:-numdefaults])
+ return tuple(argnames[startindex:])
+
+
+
+if sys.version_info[:2] == (2, 6):
+ def isclass(object):
+ """ Return true if the object is a class. Overrides inspect.isclass for
+ python 2.6 because it will return True for objects which always return
+ something on __getattr__ calls (see #1035).
+ Backport of https://hg.python.org/cpython/rev/35bf8f7a8edc
+ """
+ return isinstance(object, (type, types.ClassType))
+
+
+if _PY3:
+ import codecs
+
+ STRING_TYPES = bytes, str
+
+ def _escape_strings(val):
+ """If val is pure ascii, returns it as a str(). Otherwise, escapes
+ bytes objects into a sequence of escaped bytes:
+
+ b'\xc3\xb4\xc5\xd6' -> u'\\xc3\\xb4\\xc5\\xd6'
+
+ and escapes unicode objects into a sequence of escaped unicode
+ ids, e.g.:
+
+ '4\\nV\\U00043efa\\x0eMXWB\\x1e\\u3028\\u15fd\\xcd\\U0007d944'
+
+ note:
+ the obvious "v.decode('unicode-escape')" will return
+ valid utf-8 unicode if it finds them in bytes, but we
+ want to return escaped bytes for any byte, even if they match
+ a utf-8 string.
+
+ """
+ if isinstance(val, bytes):
+ if val:
+ # source: http://goo.gl/bGsnwC
+ encoded_bytes, _ = codecs.escape_encode(val)
+ return encoded_bytes.decode('ascii')
+ else:
+ # empty bytes crashes codecs.escape_encode (#1087)
+ return ''
+ else:
+ return val.encode('unicode_escape').decode('ascii')
+else:
+ STRING_TYPES = bytes, str, unicode
+
+ def _escape_strings(val):
+ """In py2 bytes and str are the same type, so return if it's a bytes
+ object, return it unchanged if it is a full ascii string,
+ otherwise escape it into its binary form.
+
+ If it's a unicode string, change the unicode characters into
+ unicode escapes.
+
+ """
+ if isinstance(val, bytes):
+ try:
+ return val.encode('ascii')
+ except UnicodeDecodeError:
+ return val.encode('string-escape')
+ else:
+ return val.encode('unicode-escape')
+
+
+def get_real_func(obj):
+ """ gets the real function object of the (possibly) wrapped object by
+ functools.wraps or functools.partial.
+ """
+ while hasattr(obj, "__wrapped__"):
+ obj = obj.__wrapped__
+ if isinstance(obj, functools.partial):
+ obj = obj.func
+ return obj
+
+
+def getfslineno(obj):
+ # xxx let decorators etc specify a sane ordering
+ obj = get_real_func(obj)
+ if hasattr(obj, 'place_as'):
+ obj = obj.place_as
+ fslineno = _pytest._code.getfslineno(obj)
+ assert isinstance(fslineno[1], int), obj
+ return fslineno
+
+
+def getimfunc(func):
+ try:
+ return func.__func__
+ except AttributeError:
+ try:
+ return func.im_func
+ except AttributeError:
+ return func
+
+
+def safe_getattr(object, name, default):
+ """ Like getattr but return default upon any Exception.
+
+ Attribute access can potentially fail for 'evil' Python objects.
+ See issue214
+ """
+ try:
+ return getattr(object, name, default)
+ except Exception:
+ return default
+
+
+def _is_unittest_unexpected_success_a_failure():
+ """Return if the test suite should fail if a @expectedFailure unittest test PASSES.
+
+ From https://docs.python.org/3/library/unittest.html?highlight=unittest#unittest.TestResult.wasSuccessful:
+ Changed in version 3.4: Returns False if there were any
+ unexpectedSuccesses from tests marked with the expectedFailure() decorator.
+ """
+ return sys.version_info >= (3, 4)
+
+
+if _PY3:
+ def safe_str(v):
+ """returns v as string"""
+ return str(v)
+else:
+ def safe_str(v):
+ """returns v as string, converting to ascii if necessary"""
+ try:
+ return str(v)
+ except UnicodeError:
+ errors = 'replace'
+ return v.encode('ascii', errors)
diff --git a/lib/spack/external/_pytest/config.py b/lib/spack/external/_pytest/config.py
new file mode 100644
index 0000000000..fe386ed0b1
--- /dev/null
+++ b/lib/spack/external/_pytest/config.py
@@ -0,0 +1,1340 @@
+""" command line options, ini-file and conftest.py processing. """
+import argparse
+import shlex
+import traceback
+import types
+import warnings
+
+import py
+# DON't import pytest here because it causes import cycle troubles
+import sys, os
+import _pytest._code
+import _pytest.hookspec # the extension point definitions
+import _pytest.assertion
+from _pytest._pluggy import PluginManager, HookimplMarker, HookspecMarker
+from _pytest.compat import safe_str
+
+hookimpl = HookimplMarker("pytest")
+hookspec = HookspecMarker("pytest")
+
+# pytest startup
+#
+
+
+class ConftestImportFailure(Exception):
+ def __init__(self, path, excinfo):
+ Exception.__init__(self, path, excinfo)
+ self.path = path
+ self.excinfo = excinfo
+
+ def __str__(self):
+ etype, evalue, etb = self.excinfo
+ formatted = traceback.format_tb(etb)
+ # The level of the tracebacks we want to print is hand crafted :(
+ return repr(evalue) + '\n' + ''.join(formatted[2:])
+
+
+def main(args=None, plugins=None):
+ """ return exit code, after performing an in-process test run.
+
+ :arg args: list of command line arguments.
+
+ :arg plugins: list of plugin objects to be auto-registered during
+ initialization.
+ """
+ try:
+ try:
+ config = _prepareconfig(args, plugins)
+ except ConftestImportFailure as e:
+ tw = py.io.TerminalWriter(sys.stderr)
+ for line in traceback.format_exception(*e.excinfo):
+ tw.line(line.rstrip(), red=True)
+ tw.line("ERROR: could not load %s\n" % (e.path), red=True)
+ return 4
+ else:
+ try:
+ config.pluginmanager.check_pending()
+ return config.hook.pytest_cmdline_main(config=config)
+ finally:
+ config._ensure_unconfigure()
+ except UsageError as e:
+ for msg in e.args:
+ sys.stderr.write("ERROR: %s\n" %(msg,))
+ return 4
+
+class cmdline: # compatibility namespace
+ main = staticmethod(main)
+
+
+class UsageError(Exception):
+ """ error in pytest usage or invocation"""
+
+
+def filename_arg(path, optname):
+ """ Argparse type validator for filename arguments.
+
+ :path: path of filename
+ :optname: name of the option
+ """
+ if os.path.isdir(path):
+ raise UsageError("{0} must be a filename, given: {1}".format(optname, path))
+ return path
+
+
+def directory_arg(path, optname):
+ """Argparse type validator for directory arguments.
+
+ :path: path of directory
+ :optname: name of the option
+ """
+ if not os.path.isdir(path):
+ raise UsageError("{0} must be a directory, given: {1}".format(optname, path))
+ return path
+
+
+_preinit = []
+
+default_plugins = (
+ "mark main terminal runner python fixtures debugging unittest capture skipping "
+ "tmpdir monkeypatch recwarn pastebin helpconfig nose assertion "
+ "junitxml resultlog doctest cacheprovider freeze_support "
+ "setuponly setupplan").split()
+
+builtin_plugins = set(default_plugins)
+builtin_plugins.add("pytester")
+
+
+def _preloadplugins():
+ assert not _preinit
+ _preinit.append(get_config())
+
+def get_config():
+ if _preinit:
+ return _preinit.pop(0)
+ # subsequent calls to main will create a fresh instance
+ pluginmanager = PytestPluginManager()
+ config = Config(pluginmanager)
+ for spec in default_plugins:
+ pluginmanager.import_plugin(spec)
+ return config
+
+def get_plugin_manager():
+ """
+ Obtain a new instance of the
+ :py:class:`_pytest.config.PytestPluginManager`, with default plugins
+ already loaded.
+
+ This function can be used by integration with other tools, like hooking
+ into pytest to run tests into an IDE.
+ """
+ return get_config().pluginmanager
+
+def _prepareconfig(args=None, plugins=None):
+ warning = None
+ if args is None:
+ args = sys.argv[1:]
+ elif isinstance(args, py.path.local):
+ args = [str(args)]
+ elif not isinstance(args, (tuple, list)):
+ if not isinstance(args, str):
+ raise ValueError("not a string or argument list: %r" % (args,))
+ args = shlex.split(args, posix=sys.platform != "win32")
+ from _pytest import deprecated
+ warning = deprecated.MAIN_STR_ARGS
+ config = get_config()
+ pluginmanager = config.pluginmanager
+ try:
+ if plugins:
+ for plugin in plugins:
+ if isinstance(plugin, py.builtin._basestring):
+ pluginmanager.consider_pluginarg(plugin)
+ else:
+ pluginmanager.register(plugin)
+ if warning:
+ config.warn('C1', warning)
+ return pluginmanager.hook.pytest_cmdline_parse(
+ pluginmanager=pluginmanager, args=args)
+ except BaseException:
+ config._ensure_unconfigure()
+ raise
+
+
+class PytestPluginManager(PluginManager):
+ """
+ Overwrites :py:class:`pluggy.PluginManager` to add pytest-specific
+ functionality:
+
+ * loading plugins from the command line, ``PYTEST_PLUGIN`` env variable and
+ ``pytest_plugins`` global variables found in plugins being loaded;
+ * ``conftest.py`` loading during start-up;
+ """
+ def __init__(self):
+ super(PytestPluginManager, self).__init__("pytest", implprefix="pytest_")
+ self._conftest_plugins = set()
+
+ # state related to local conftest plugins
+ self._path2confmods = {}
+ self._conftestpath2mod = {}
+ self._confcutdir = None
+ self._noconftest = False
+ self._duplicatepaths = set()
+
+ self.add_hookspecs(_pytest.hookspec)
+ self.register(self)
+ if os.environ.get('PYTEST_DEBUG'):
+ err = sys.stderr
+ encoding = getattr(err, 'encoding', 'utf8')
+ try:
+ err = py.io.dupfile(err, encoding=encoding)
+ except Exception:
+ pass
+ self.trace.root.setwriter(err.write)
+ self.enable_tracing()
+
+ # Config._consider_importhook will set a real object if required.
+ self.rewrite_hook = _pytest.assertion.DummyRewriteHook()
+
+ def addhooks(self, module_or_class):
+ """
+ .. deprecated:: 2.8
+
+ Use :py:meth:`pluggy.PluginManager.add_hookspecs` instead.
+ """
+ warning = dict(code="I2",
+ fslocation=_pytest._code.getfslineno(sys._getframe(1)),
+ nodeid=None,
+ message="use pluginmanager.add_hookspecs instead of "
+ "deprecated addhooks() method.")
+ self._warn(warning)
+ return self.add_hookspecs(module_or_class)
+
+ def parse_hookimpl_opts(self, plugin, name):
+ # pytest hooks are always prefixed with pytest_
+ # so we avoid accessing possibly non-readable attributes
+ # (see issue #1073)
+ if not name.startswith("pytest_"):
+ return
+ # ignore some historic special names which can not be hooks anyway
+ if name == "pytest_plugins" or name.startswith("pytest_funcarg__"):
+ return
+
+ method = getattr(plugin, name)
+ opts = super(PytestPluginManager, self).parse_hookimpl_opts(plugin, name)
+ if opts is not None:
+ for name in ("tryfirst", "trylast", "optionalhook", "hookwrapper"):
+ opts.setdefault(name, hasattr(method, name))
+ return opts
+
+ def parse_hookspec_opts(self, module_or_class, name):
+ opts = super(PytestPluginManager, self).parse_hookspec_opts(
+ module_or_class, name)
+ if opts is None:
+ method = getattr(module_or_class, name)
+ if name.startswith("pytest_"):
+ opts = {"firstresult": hasattr(method, "firstresult"),
+ "historic": hasattr(method, "historic")}
+ return opts
+
+ def _verify_hook(self, hook, hookmethod):
+ super(PytestPluginManager, self)._verify_hook(hook, hookmethod)
+ if "__multicall__" in hookmethod.argnames:
+ fslineno = _pytest._code.getfslineno(hookmethod.function)
+ warning = dict(code="I1",
+ fslocation=fslineno,
+ nodeid=None,
+ message="%r hook uses deprecated __multicall__ "
+ "argument" % (hook.name))
+ self._warn(warning)
+
+ def register(self, plugin, name=None):
+ ret = super(PytestPluginManager, self).register(plugin, name)
+ if ret:
+ self.hook.pytest_plugin_registered.call_historic(
+ kwargs=dict(plugin=plugin, manager=self))
+ return ret
+
+ def getplugin(self, name):
+ # support deprecated naming because plugins (xdist e.g.) use it
+ return self.get_plugin(name)
+
+ def hasplugin(self, name):
+ """Return True if the plugin with the given name is registered."""
+ return bool(self.get_plugin(name))
+
+ def pytest_configure(self, config):
+ # XXX now that the pluginmanager exposes hookimpl(tryfirst...)
+ # we should remove tryfirst/trylast as markers
+ config.addinivalue_line("markers",
+ "tryfirst: mark a hook implementation function such that the "
+ "plugin machinery will try to call it first/as early as possible.")
+ config.addinivalue_line("markers",
+ "trylast: mark a hook implementation function such that the "
+ "plugin machinery will try to call it last/as late as possible.")
+
+ def _warn(self, message):
+ kwargs = message if isinstance(message, dict) else {
+ 'code': 'I1',
+ 'message': message,
+ 'fslocation': None,
+ 'nodeid': None,
+ }
+ self.hook.pytest_logwarning.call_historic(kwargs=kwargs)
+
+ #
+ # internal API for local conftest plugin handling
+ #
+ def _set_initial_conftests(self, namespace):
+ """ load initial conftest files given a preparsed "namespace".
+ As conftest files may add their own command line options
+ which have arguments ('--my-opt somepath') we might get some
+ false positives. All builtin and 3rd party plugins will have
+ been loaded, however, so common options will not confuse our logic
+ here.
+ """
+ current = py.path.local()
+ self._confcutdir = current.join(namespace.confcutdir, abs=True) \
+ if namespace.confcutdir else None
+ self._noconftest = namespace.noconftest
+ testpaths = namespace.file_or_dir
+ foundanchor = False
+ for path in testpaths:
+ path = str(path)
+ # remove node-id syntax
+ i = path.find("::")
+ if i != -1:
+ path = path[:i]
+ anchor = current.join(path, abs=1)
+ if exists(anchor): # we found some file object
+ self._try_load_conftest(anchor)
+ foundanchor = True
+ if not foundanchor:
+ self._try_load_conftest(current)
+
+ def _try_load_conftest(self, anchor):
+ self._getconftestmodules(anchor)
+ # let's also consider test* subdirs
+ if anchor.check(dir=1):
+ for x in anchor.listdir("test*"):
+ if x.check(dir=1):
+ self._getconftestmodules(x)
+
+ def _getconftestmodules(self, path):
+ if self._noconftest:
+ return []
+ try:
+ return self._path2confmods[path]
+ except KeyError:
+ if path.isfile():
+ clist = self._getconftestmodules(path.dirpath())
+ else:
+ # XXX these days we may rather want to use config.rootdir
+ # and allow users to opt into looking into the rootdir parent
+ # directories instead of requiring to specify confcutdir
+ clist = []
+ for parent in path.parts():
+ if self._confcutdir and self._confcutdir.relto(parent):
+ continue
+ conftestpath = parent.join("conftest.py")
+ if conftestpath.isfile():
+ mod = self._importconftest(conftestpath)
+ clist.append(mod)
+
+ self._path2confmods[path] = clist
+ return clist
+
+ def _rget_with_confmod(self, name, path):
+ modules = self._getconftestmodules(path)
+ for mod in reversed(modules):
+ try:
+ return mod, getattr(mod, name)
+ except AttributeError:
+ continue
+ raise KeyError(name)
+
+ def _importconftest(self, conftestpath):
+ try:
+ return self._conftestpath2mod[conftestpath]
+ except KeyError:
+ pkgpath = conftestpath.pypkgpath()
+ if pkgpath is None:
+ _ensure_removed_sysmodule(conftestpath.purebasename)
+ try:
+ mod = conftestpath.pyimport()
+ except Exception:
+ raise ConftestImportFailure(conftestpath, sys.exc_info())
+
+ self._conftest_plugins.add(mod)
+ self._conftestpath2mod[conftestpath] = mod
+ dirpath = conftestpath.dirpath()
+ if dirpath in self._path2confmods:
+ for path, mods in self._path2confmods.items():
+ if path and path.relto(dirpath) or path == dirpath:
+ assert mod not in mods
+ mods.append(mod)
+ self.trace("loaded conftestmodule %r" %(mod))
+ self.consider_conftest(mod)
+ return mod
+
+ #
+ # API for bootstrapping plugin loading
+ #
+ #
+
+ def consider_preparse(self, args):
+ for opt1,opt2 in zip(args, args[1:]):
+ if opt1 == "-p":
+ self.consider_pluginarg(opt2)
+
+ def consider_pluginarg(self, arg):
+ if arg.startswith("no:"):
+ name = arg[3:]
+ self.set_blocked(name)
+ if not name.startswith("pytest_"):
+ self.set_blocked("pytest_" + name)
+ else:
+ self.import_plugin(arg)
+
+ def consider_conftest(self, conftestmodule):
+ if self.register(conftestmodule, name=conftestmodule.__file__):
+ self.consider_module(conftestmodule)
+
+ def consider_env(self):
+ self._import_plugin_specs(os.environ.get("PYTEST_PLUGINS"))
+
+ def consider_module(self, mod):
+ plugins = getattr(mod, 'pytest_plugins', [])
+ if isinstance(plugins, str):
+ plugins = [plugins]
+ self.rewrite_hook.mark_rewrite(*plugins)
+ self._import_plugin_specs(plugins)
+
+ def _import_plugin_specs(self, spec):
+ if spec:
+ if isinstance(spec, str):
+ spec = spec.split(",")
+ for import_spec in spec:
+ self.import_plugin(import_spec)
+
+ def import_plugin(self, modname):
+ # most often modname refers to builtin modules, e.g. "pytester",
+ # "terminal" or "capture". Those plugins are registered under their
+ # basename for historic purposes but must be imported with the
+ # _pytest prefix.
+ assert isinstance(modname, str)
+ if self.get_plugin(modname) is not None:
+ return
+ if modname in builtin_plugins:
+ importspec = "_pytest." + modname
+ else:
+ importspec = modname
+ try:
+ __import__(importspec)
+ except ImportError as e:
+ new_exc = ImportError('Error importing plugin "%s": %s' % (modname, safe_str(e.args[0])))
+ # copy over name and path attributes
+ for attr in ('name', 'path'):
+ if hasattr(e, attr):
+ setattr(new_exc, attr, getattr(e, attr))
+ raise new_exc
+ except Exception as e:
+ import pytest
+ if not hasattr(pytest, 'skip') or not isinstance(e, pytest.skip.Exception):
+ raise
+ self._warn("skipped plugin %r: %s" %((modname, e.msg)))
+ else:
+ mod = sys.modules[importspec]
+ self.register(mod, modname)
+ self.consider_module(mod)
+
+
+class Parser:
+ """ Parser for command line arguments and ini-file values.
+
+ :ivar extra_info: dict of generic param -> value to display in case
+ there's an error processing the command line arguments.
+ """
+
+ def __init__(self, usage=None, processopt=None):
+ self._anonymous = OptionGroup("custom options", parser=self)
+ self._groups = []
+ self._processopt = processopt
+ self._usage = usage
+ self._inidict = {}
+ self._ininames = []
+ self.extra_info = {}
+
+ def processoption(self, option):
+ if self._processopt:
+ if option.dest:
+ self._processopt(option)
+
+ def getgroup(self, name, description="", after=None):
+ """ get (or create) a named option Group.
+
+ :name: name of the option group.
+ :description: long description for --help output.
+ :after: name of other group, used for ordering --help output.
+
+ The returned group object has an ``addoption`` method with the same
+ signature as :py:func:`parser.addoption
+ <_pytest.config.Parser.addoption>` but will be shown in the
+ respective group in the output of ``pytest. --help``.
+ """
+ for group in self._groups:
+ if group.name == name:
+ return group
+ group = OptionGroup(name, description, parser=self)
+ i = 0
+ for i, grp in enumerate(self._groups):
+ if grp.name == after:
+ break
+ self._groups.insert(i+1, group)
+ return group
+
+ def addoption(self, *opts, **attrs):
+ """ register a command line option.
+
+ :opts: option names, can be short or long options.
+ :attrs: same attributes which the ``add_option()`` function of the
+ `argparse library
+ <http://docs.python.org/2/library/argparse.html>`_
+ accepts.
+
+ After command line parsing options are available on the pytest config
+ object via ``config.option.NAME`` where ``NAME`` is usually set
+ by passing a ``dest`` attribute, for example
+ ``addoption("--long", dest="NAME", ...)``.
+ """
+ self._anonymous.addoption(*opts, **attrs)
+
+ def parse(self, args, namespace=None):
+ from _pytest._argcomplete import try_argcomplete
+ self.optparser = self._getparser()
+ try_argcomplete(self.optparser)
+ return self.optparser.parse_args([str(x) for x in args], namespace=namespace)
+
+ def _getparser(self):
+ from _pytest._argcomplete import filescompleter
+ optparser = MyOptionParser(self, self.extra_info)
+ groups = self._groups + [self._anonymous]
+ for group in groups:
+ if group.options:
+ desc = group.description or group.name
+ arggroup = optparser.add_argument_group(desc)
+ for option in group.options:
+ n = option.names()
+ a = option.attrs()
+ arggroup.add_argument(*n, **a)
+ # bash like autocompletion for dirs (appending '/')
+ optparser.add_argument(FILE_OR_DIR, nargs='*').completer=filescompleter
+ return optparser
+
+ def parse_setoption(self, args, option, namespace=None):
+ parsedoption = self.parse(args, namespace=namespace)
+ for name, value in parsedoption.__dict__.items():
+ setattr(option, name, value)
+ return getattr(parsedoption, FILE_OR_DIR)
+
+ def parse_known_args(self, args, namespace=None):
+ """parses and returns a namespace object with known arguments at this
+ point.
+ """
+ return self.parse_known_and_unknown_args(args, namespace=namespace)[0]
+
+ def parse_known_and_unknown_args(self, args, namespace=None):
+ """parses and returns a namespace object with known arguments, and
+ the remaining arguments unknown at this point.
+ """
+ optparser = self._getparser()
+ args = [str(x) for x in args]
+ return optparser.parse_known_args(args, namespace=namespace)
+
+ def addini(self, name, help, type=None, default=None):
+ """ register an ini-file option.
+
+ :name: name of the ini-variable
+ :type: type of the variable, can be ``pathlist``, ``args``, ``linelist``
+ or ``bool``.
+ :default: default value if no ini-file option exists but is queried.
+
+ The value of ini-variables can be retrieved via a call to
+ :py:func:`config.getini(name) <_pytest.config.Config.getini>`.
+ """
+ assert type in (None, "pathlist", "args", "linelist", "bool")
+ self._inidict[name] = (help, type, default)
+ self._ininames.append(name)
+
+
+class ArgumentError(Exception):
+ """
+ Raised if an Argument instance is created with invalid or
+ inconsistent arguments.
+ """
+
+ def __init__(self, msg, option):
+ self.msg = msg
+ self.option_id = str(option)
+
+ def __str__(self):
+ if self.option_id:
+ return "option %s: %s" % (self.option_id, self.msg)
+ else:
+ return self.msg
+
+
+class Argument:
+ """class that mimics the necessary behaviour of optparse.Option
+
+ its currently a least effort implementation
+ and ignoring choices and integer prefixes
+ https://docs.python.org/3/library/optparse.html#optparse-standard-option-types
+ """
+ _typ_map = {
+ 'int': int,
+ 'string': str,
+ 'float': float,
+ 'complex': complex,
+ }
+
+ def __init__(self, *names, **attrs):
+ """store parms in private vars for use in add_argument"""
+ self._attrs = attrs
+ self._short_opts = []
+ self._long_opts = []
+ self.dest = attrs.get('dest')
+ if '%default' in (attrs.get('help') or ''):
+ warnings.warn(
+ 'pytest now uses argparse. "%default" should be'
+ ' changed to "%(default)s" ',
+ DeprecationWarning,
+ stacklevel=3)
+ try:
+ typ = attrs['type']
+ except KeyError:
+ pass
+ else:
+ # this might raise a keyerror as well, don't want to catch that
+ if isinstance(typ, py.builtin._basestring):
+ if typ == 'choice':
+ warnings.warn(
+ 'type argument to addoption() is a string %r.'
+ ' For parsearg this is optional and when supplied'
+ ' should be a type.'
+ ' (options: %s)' % (typ, names),
+ DeprecationWarning,
+ stacklevel=3)
+ # argparse expects a type here take it from
+ # the type of the first element
+ attrs['type'] = type(attrs['choices'][0])
+ else:
+ warnings.warn(
+ 'type argument to addoption() is a string %r.'
+ ' For parsearg this should be a type.'
+ ' (options: %s)' % (typ, names),
+ DeprecationWarning,
+ stacklevel=3)
+ attrs['type'] = Argument._typ_map[typ]
+ # used in test_parseopt -> test_parse_defaultgetter
+ self.type = attrs['type']
+ else:
+ self.type = typ
+ try:
+ # attribute existence is tested in Config._processopt
+ self.default = attrs['default']
+ except KeyError:
+ pass
+ self._set_opt_strings(names)
+ if not self.dest:
+ if self._long_opts:
+ self.dest = self._long_opts[0][2:].replace('-', '_')
+ else:
+ try:
+ self.dest = self._short_opts[0][1:]
+ except IndexError:
+ raise ArgumentError(
+ 'need a long or short option', self)
+
+ def names(self):
+ return self._short_opts + self._long_opts
+
+ def attrs(self):
+ # update any attributes set by processopt
+ attrs = 'default dest help'.split()
+ if self.dest:
+ attrs.append(self.dest)
+ for attr in attrs:
+ try:
+ self._attrs[attr] = getattr(self, attr)
+ except AttributeError:
+ pass
+ if self._attrs.get('help'):
+ a = self._attrs['help']
+ a = a.replace('%default', '%(default)s')
+ #a = a.replace('%prog', '%(prog)s')
+ self._attrs['help'] = a
+ return self._attrs
+
+ def _set_opt_strings(self, opts):
+ """directly from optparse
+
+ might not be necessary as this is passed to argparse later on"""
+ for opt in opts:
+ if len(opt) < 2:
+ raise ArgumentError(
+ "invalid option string %r: "
+ "must be at least two characters long" % opt, self)
+ elif len(opt) == 2:
+ if not (opt[0] == "-" and opt[1] != "-"):
+ raise ArgumentError(
+ "invalid short option string %r: "
+ "must be of the form -x, (x any non-dash char)" % opt,
+ self)
+ self._short_opts.append(opt)
+ else:
+ if not (opt[0:2] == "--" and opt[2] != "-"):
+ raise ArgumentError(
+ "invalid long option string %r: "
+ "must start with --, followed by non-dash" % opt,
+ self)
+ self._long_opts.append(opt)
+
+ def __repr__(self):
+ args = []
+ if self._short_opts:
+ args += ['_short_opts: ' + repr(self._short_opts)]
+ if self._long_opts:
+ args += ['_long_opts: ' + repr(self._long_opts)]
+ args += ['dest: ' + repr(self.dest)]
+ if hasattr(self, 'type'):
+ args += ['type: ' + repr(self.type)]
+ if hasattr(self, 'default'):
+ args += ['default: ' + repr(self.default)]
+ return 'Argument({0})'.format(', '.join(args))
+
+
+class OptionGroup:
+ def __init__(self, name, description="", parser=None):
+ self.name = name
+ self.description = description
+ self.options = []
+ self.parser = parser
+
+ def addoption(self, *optnames, **attrs):
+ """ add an option to this group.
+
+ if a shortened version of a long option is specified it will
+ be suppressed in the help. addoption('--twowords', '--two-words')
+ results in help showing '--two-words' only, but --twowords gets
+ accepted **and** the automatic destination is in args.twowords
+ """
+ conflict = set(optnames).intersection(
+ name for opt in self.options for name in opt.names())
+ if conflict:
+ raise ValueError("option names %s already added" % conflict)
+ option = Argument(*optnames, **attrs)
+ self._addoption_instance(option, shortupper=False)
+
+ def _addoption(self, *optnames, **attrs):
+ option = Argument(*optnames, **attrs)
+ self._addoption_instance(option, shortupper=True)
+
+ def _addoption_instance(self, option, shortupper=False):
+ if not shortupper:
+ for opt in option._short_opts:
+ if opt[0] == '-' and opt[1].islower():
+ raise ValueError("lowercase shortoptions reserved")
+ if self.parser:
+ self.parser.processoption(option)
+ self.options.append(option)
+
+
+class MyOptionParser(argparse.ArgumentParser):
+ def __init__(self, parser, extra_info=None):
+ if not extra_info:
+ extra_info = {}
+ self._parser = parser
+ argparse.ArgumentParser.__init__(self, usage=parser._usage,
+ add_help=False, formatter_class=DropShorterLongHelpFormatter)
+ # extra_info is a dict of (param -> value) to display if there's
+ # an usage error to provide more contextual information to the user
+ self.extra_info = extra_info
+
+ def parse_args(self, args=None, namespace=None):
+ """allow splitting of positional arguments"""
+ args, argv = self.parse_known_args(args, namespace)
+ if argv:
+ for arg in argv:
+ if arg and arg[0] == '-':
+ lines = ['unrecognized arguments: %s' % (' '.join(argv))]
+ for k, v in sorted(self.extra_info.items()):
+ lines.append(' %s: %s' % (k, v))
+ self.error('\n'.join(lines))
+ getattr(args, FILE_OR_DIR).extend(argv)
+ return args
+
+
+class DropShorterLongHelpFormatter(argparse.HelpFormatter):
+ """shorten help for long options that differ only in extra hyphens
+
+ - collapse **long** options that are the same except for extra hyphens
+ - special action attribute map_long_option allows surpressing additional
+ long options
+ - shortcut if there are only two options and one of them is a short one
+ - cache result on action object as this is called at least 2 times
+ """
+ def _format_action_invocation(self, action):
+ orgstr = argparse.HelpFormatter._format_action_invocation(self, action)
+ if orgstr and orgstr[0] != '-': # only optional arguments
+ return orgstr
+ res = getattr(action, '_formatted_action_invocation', None)
+ if res:
+ return res
+ options = orgstr.split(', ')
+ if len(options) == 2 and (len(options[0]) == 2 or len(options[1]) == 2):
+ # a shortcut for '-h, --help' or '--abc', '-a'
+ action._formatted_action_invocation = orgstr
+ return orgstr
+ return_list = []
+ option_map = getattr(action, 'map_long_option', {})
+ if option_map is None:
+ option_map = {}
+ short_long = {}
+ for option in options:
+ if len(option) == 2 or option[2] == ' ':
+ continue
+ if not option.startswith('--'):
+ raise ArgumentError('long optional argument without "--": [%s]'
+ % (option), self)
+ xxoption = option[2:]
+ if xxoption.split()[0] not in option_map:
+ shortened = xxoption.replace('-', '')
+ if shortened not in short_long or \
+ len(short_long[shortened]) < len(xxoption):
+ short_long[shortened] = xxoption
+ # now short_long has been filled out to the longest with dashes
+ # **and** we keep the right option ordering from add_argument
+ for option in options: #
+ if len(option) == 2 or option[2] == ' ':
+ return_list.append(option)
+ if option[2:] == short_long.get(option.replace('-', '')):
+ return_list.append(option.replace(' ', '=', 1))
+ action._formatted_action_invocation = ', '.join(return_list)
+ return action._formatted_action_invocation
+
+
+
+def _ensure_removed_sysmodule(modname):
+ try:
+ del sys.modules[modname]
+ except KeyError:
+ pass
+
+class CmdOptions(object):
+ """ holds cmdline options as attributes."""
+ def __init__(self, values=()):
+ self.__dict__.update(values)
+ def __repr__(self):
+ return "<CmdOptions %r>" %(self.__dict__,)
+ def copy(self):
+ return CmdOptions(self.__dict__)
+
+class Notset:
+ def __repr__(self):
+ return "<NOTSET>"
+
+
+notset = Notset()
+FILE_OR_DIR = 'file_or_dir'
+
+
+class Config(object):
+ """ access to configuration values, pluginmanager and plugin hooks. """
+
+ def __init__(self, pluginmanager):
+ #: access to command line option as attributes.
+ #: (deprecated), use :py:func:`getoption() <_pytest.config.Config.getoption>` instead
+ self.option = CmdOptions()
+ _a = FILE_OR_DIR
+ self._parser = Parser(
+ usage="%%(prog)s [options] [%s] [%s] [...]" % (_a, _a),
+ processopt=self._processopt,
+ )
+ #: a pluginmanager instance
+ self.pluginmanager = pluginmanager
+ self.trace = self.pluginmanager.trace.root.get("config")
+ self.hook = self.pluginmanager.hook
+ self._inicache = {}
+ self._opt2dest = {}
+ self._cleanup = []
+ self._warn = self.pluginmanager._warn
+ self.pluginmanager.register(self, "pytestconfig")
+ self._configured = False
+
+ def do_setns(dic):
+ import pytest
+ setns(pytest, dic)
+
+ self.hook.pytest_namespace.call_historic(do_setns, {})
+ self.hook.pytest_addoption.call_historic(kwargs=dict(parser=self._parser))
+
+ def add_cleanup(self, func):
+ """ Add a function to be called when the config object gets out of
+ use (usually coninciding with pytest_unconfigure)."""
+ self._cleanup.append(func)
+
+ def _do_configure(self):
+ assert not self._configured
+ self._configured = True
+ self.hook.pytest_configure.call_historic(kwargs=dict(config=self))
+
+ def _ensure_unconfigure(self):
+ if self._configured:
+ self._configured = False
+ self.hook.pytest_unconfigure(config=self)
+ self.hook.pytest_configure._call_history = []
+ while self._cleanup:
+ fin = self._cleanup.pop()
+ fin()
+
+ def warn(self, code, message, fslocation=None):
+ """ generate a warning for this test session. """
+ self.hook.pytest_logwarning.call_historic(kwargs=dict(
+ code=code, message=message,
+ fslocation=fslocation, nodeid=None))
+
+ def get_terminal_writer(self):
+ return self.pluginmanager.get_plugin("terminalreporter")._tw
+
+ def pytest_cmdline_parse(self, pluginmanager, args):
+ # REF1 assert self == pluginmanager.config, (self, pluginmanager.config)
+ self.parse(args)
+ return self
+
+ def notify_exception(self, excinfo, option=None):
+ if option and option.fulltrace:
+ style = "long"
+ else:
+ style = "native"
+ excrepr = excinfo.getrepr(funcargs=True,
+ showlocals=getattr(option, 'showlocals', False),
+ style=style,
+ )
+ res = self.hook.pytest_internalerror(excrepr=excrepr,
+ excinfo=excinfo)
+ if not py.builtin.any(res):
+ for line in str(excrepr).split("\n"):
+ sys.stderr.write("INTERNALERROR> %s\n" %line)
+ sys.stderr.flush()
+
+ def cwd_relative_nodeid(self, nodeid):
+ # nodeid's are relative to the rootpath, compute relative to cwd
+ if self.invocation_dir != self.rootdir:
+ fullpath = self.rootdir.join(nodeid)
+ nodeid = self.invocation_dir.bestrelpath(fullpath)
+ return nodeid
+
+ @classmethod
+ def fromdictargs(cls, option_dict, args):
+ """ constructor useable for subprocesses. """
+ config = get_config()
+ config.option.__dict__.update(option_dict)
+ config.parse(args, addopts=False)
+ for x in config.option.plugins:
+ config.pluginmanager.consider_pluginarg(x)
+ return config
+
+ def _processopt(self, opt):
+ for name in opt._short_opts + opt._long_opts:
+ self._opt2dest[name] = opt.dest
+
+ if hasattr(opt, 'default') and opt.dest:
+ if not hasattr(self.option, opt.dest):
+ setattr(self.option, opt.dest, opt.default)
+
+ @hookimpl(trylast=True)
+ def pytest_load_initial_conftests(self, early_config):
+ self.pluginmanager._set_initial_conftests(early_config.known_args_namespace)
+
+ def _initini(self, args):
+ ns, unknown_args = self._parser.parse_known_and_unknown_args(args, namespace=self.option.copy())
+ r = determine_setup(ns.inifilename, ns.file_or_dir + unknown_args, warnfunc=self.warn)
+ self.rootdir, self.inifile, self.inicfg = r
+ self._parser.extra_info['rootdir'] = self.rootdir
+ self._parser.extra_info['inifile'] = self.inifile
+ self.invocation_dir = py.path.local()
+ self._parser.addini('addopts', 'extra command line options', 'args')
+ self._parser.addini('minversion', 'minimally required pytest version')
+
+ def _consider_importhook(self, args, entrypoint_name):
+ """Install the PEP 302 import hook if using assertion re-writing.
+
+ Needs to parse the --assert=<mode> option from the commandline
+ and find all the installed plugins to mark them for re-writing
+ by the importhook.
+ """
+ ns, unknown_args = self._parser.parse_known_and_unknown_args(args)
+ mode = ns.assertmode
+ if mode == 'rewrite':
+ try:
+ hook = _pytest.assertion.install_importhook(self)
+ except SystemError:
+ mode = 'plain'
+ else:
+ import pkg_resources
+ self.pluginmanager.rewrite_hook = hook
+ for entrypoint in pkg_resources.iter_entry_points('pytest11'):
+ # 'RECORD' available for plugins installed normally (pip install)
+ # 'SOURCES.txt' available for plugins installed in dev mode (pip install -e)
+ # for installed plugins 'SOURCES.txt' returns an empty list, and vice-versa
+ # so it shouldn't be an issue
+ for metadata in ('RECORD', 'SOURCES.txt'):
+ for entry in entrypoint.dist._get_metadata(metadata):
+ fn = entry.split(',')[0]
+ is_simple_module = os.sep not in fn and fn.endswith('.py')
+ is_package = fn.count(os.sep) == 1 and fn.endswith('__init__.py')
+ if is_simple_module:
+ module_name, ext = os.path.splitext(fn)
+ hook.mark_rewrite(module_name)
+ elif is_package:
+ package_name = os.path.dirname(fn)
+ hook.mark_rewrite(package_name)
+ self._warn_about_missing_assertion(mode)
+
+ def _warn_about_missing_assertion(self, mode):
+ try:
+ assert False
+ except AssertionError:
+ pass
+ else:
+ if mode == 'plain':
+ sys.stderr.write("WARNING: ASSERTIONS ARE NOT EXECUTED"
+ " and FAILING TESTS WILL PASS. Are you"
+ " using python -O?")
+ else:
+ sys.stderr.write("WARNING: assertions not in test modules or"
+ " plugins will be ignored"
+ " because assert statements are not executed "
+ "by the underlying Python interpreter "
+ "(are you using python -O?)\n")
+
+ def _preparse(self, args, addopts=True):
+ self._initini(args)
+ if addopts:
+ args[:] = shlex.split(os.environ.get('PYTEST_ADDOPTS', '')) + args
+ args[:] = self.getini("addopts") + args
+ self._checkversion()
+ entrypoint_name = 'pytest11'
+ self._consider_importhook(args, entrypoint_name)
+ self.pluginmanager.consider_preparse(args)
+ self.pluginmanager.load_setuptools_entrypoints(entrypoint_name)
+ self.pluginmanager.consider_env()
+ self.known_args_namespace = ns = self._parser.parse_known_args(args, namespace=self.option.copy())
+ confcutdir = self.known_args_namespace.confcutdir
+ if self.known_args_namespace.confcutdir is None and self.inifile:
+ confcutdir = py.path.local(self.inifile).dirname
+ self.known_args_namespace.confcutdir = confcutdir
+ try:
+ self.hook.pytest_load_initial_conftests(early_config=self,
+ args=args, parser=self._parser)
+ except ConftestImportFailure:
+ e = sys.exc_info()[1]
+ if ns.help or ns.version:
+ # we don't want to prevent --help/--version to work
+ # so just let is pass and print a warning at the end
+ self._warn("could not load initial conftests (%s)\n" % e.path)
+ else:
+ raise
+
+ def _checkversion(self):
+ import pytest
+ minver = self.inicfg.get('minversion', None)
+ if minver:
+ ver = minver.split(".")
+ myver = pytest.__version__.split(".")
+ if myver < ver:
+ raise pytest.UsageError(
+ "%s:%d: requires pytest-%s, actual pytest-%s'" %(
+ self.inicfg.config.path, self.inicfg.lineof('minversion'),
+ minver, pytest.__version__))
+
+ def parse(self, args, addopts=True):
+ # parse given cmdline arguments into this config object.
+ assert not hasattr(self, 'args'), (
+ "can only parse cmdline args at most once per Config object")
+ self._origargs = args
+ self.hook.pytest_addhooks.call_historic(
+ kwargs=dict(pluginmanager=self.pluginmanager))
+ self._preparse(args, addopts=addopts)
+ # XXX deprecated hook:
+ self.hook.pytest_cmdline_preparse(config=self, args=args)
+ args = self._parser.parse_setoption(args, self.option, namespace=self.option)
+ if not args:
+ cwd = os.getcwd()
+ if cwd == self.rootdir:
+ args = self.getini('testpaths')
+ if not args:
+ args = [cwd]
+ self.args = args
+
+ def addinivalue_line(self, name, line):
+ """ add a line to an ini-file option. The option must have been
+ declared but might not yet be set in which case the line becomes the
+ the first line in its value. """
+ x = self.getini(name)
+ assert isinstance(x, list)
+ x.append(line) # modifies the cached list inline
+
+ def getini(self, name):
+ """ return configuration value from an :ref:`ini file <inifiles>`. If the
+ specified name hasn't been registered through a prior
+ :py:func:`parser.addini <pytest.config.Parser.addini>`
+ call (usually from a plugin), a ValueError is raised. """
+ try:
+ return self._inicache[name]
+ except KeyError:
+ self._inicache[name] = val = self._getini(name)
+ return val
+
+ def _getini(self, name):
+ try:
+ description, type, default = self._parser._inidict[name]
+ except KeyError:
+ raise ValueError("unknown configuration value: %r" %(name,))
+ value = self._get_override_ini_value(name)
+ if value is None:
+ try:
+ value = self.inicfg[name]
+ except KeyError:
+ if default is not None:
+ return default
+ if type is None:
+ return ''
+ return []
+ if type == "pathlist":
+ dp = py.path.local(self.inicfg.config.path).dirpath()
+ l = []
+ for relpath in shlex.split(value):
+ l.append(dp.join(relpath, abs=True))
+ return l
+ elif type == "args":
+ return shlex.split(value)
+ elif type == "linelist":
+ return [t for t in map(lambda x: x.strip(), value.split("\n")) if t]
+ elif type == "bool":
+ return bool(_strtobool(value.strip()))
+ else:
+ assert type is None
+ return value
+
+ def _getconftest_pathlist(self, name, path):
+ try:
+ mod, relroots = self.pluginmanager._rget_with_confmod(name, path)
+ except KeyError:
+ return None
+ modpath = py.path.local(mod.__file__).dirpath()
+ l = []
+ for relroot in relroots:
+ if not isinstance(relroot, py.path.local):
+ relroot = relroot.replace("/", py.path.local.sep)
+ relroot = modpath.join(relroot, abs=True)
+ l.append(relroot)
+ return l
+
+ def _get_override_ini_value(self, name):
+ value = None
+ # override_ini is a list of list, to support both -o foo1=bar1 foo2=bar2 and
+ # and -o foo1=bar1 -o foo2=bar2 options
+ # always use the last item if multiple value set for same ini-name,
+ # e.g. -o foo=bar1 -o foo=bar2 will set foo to bar2
+ if self.getoption("override_ini", None):
+ for ini_config_list in self.option.override_ini:
+ for ini_config in ini_config_list:
+ try:
+ (key, user_ini_value) = ini_config.split("=", 1)
+ except ValueError:
+ raise UsageError("-o/--override-ini expects option=value style.")
+ if key == name:
+ value = user_ini_value
+ return value
+
+ def getoption(self, name, default=notset, skip=False):
+ """ return command line option value.
+
+ :arg name: name of the option. You may also specify
+ the literal ``--OPT`` option instead of the "dest" option name.
+ :arg default: default value if no option of that name exists.
+ :arg skip: if True raise pytest.skip if option does not exists
+ or has a None value.
+ """
+ name = self._opt2dest.get(name, name)
+ try:
+ val = getattr(self.option, name)
+ if val is None and skip:
+ raise AttributeError(name)
+ return val
+ except AttributeError:
+ if default is not notset:
+ return default
+ if skip:
+ import pytest
+ pytest.skip("no %r option found" %(name,))
+ raise ValueError("no option named %r" % (name,))
+
+ def getvalue(self, name, path=None):
+ """ (deprecated, use getoption()) """
+ return self.getoption(name)
+
+ def getvalueorskip(self, name, path=None):
+ """ (deprecated, use getoption(skip=True)) """
+ return self.getoption(name, skip=True)
+
+def exists(path, ignore=EnvironmentError):
+ try:
+ return path.check()
+ except ignore:
+ return False
+
+def getcfg(args, warnfunc=None):
+ """
+ Search the list of arguments for a valid ini-file for pytest,
+ and return a tuple of (rootdir, inifile, cfg-dict).
+
+ note: warnfunc is an optional function used to warn
+ about ini-files that use deprecated features.
+ This parameter should be removed when pytest
+ adopts standard deprecation warnings (#1804).
+ """
+ from _pytest.deprecated import SETUP_CFG_PYTEST
+ inibasenames = ["pytest.ini", "tox.ini", "setup.cfg"]
+ args = [x for x in args if not str(x).startswith("-")]
+ if not args:
+ args = [py.path.local()]
+ for arg in args:
+ arg = py.path.local(arg)
+ for base in arg.parts(reverse=True):
+ for inibasename in inibasenames:
+ p = base.join(inibasename)
+ if exists(p):
+ iniconfig = py.iniconfig.IniConfig(p)
+ if 'pytest' in iniconfig.sections:
+ if inibasename == 'setup.cfg' and warnfunc:
+ warnfunc('C1', SETUP_CFG_PYTEST)
+ return base, p, iniconfig['pytest']
+ if inibasename == 'setup.cfg' and 'tool:pytest' in iniconfig.sections:
+ return base, p, iniconfig['tool:pytest']
+ elif inibasename == "pytest.ini":
+ # allowed to be empty
+ return base, p, {}
+ return None, None, None
+
+
+def get_common_ancestor(args):
+ # args are what we get after early command line parsing (usually
+ # strings, but can be py.path.local objects as well)
+ common_ancestor = None
+ for arg in args:
+ if str(arg)[0] == "-":
+ continue
+ p = py.path.local(arg)
+ if not p.exists():
+ continue
+ if common_ancestor is None:
+ common_ancestor = p
+ else:
+ if p.relto(common_ancestor) or p == common_ancestor:
+ continue
+ elif common_ancestor.relto(p):
+ common_ancestor = p
+ else:
+ shared = p.common(common_ancestor)
+ if shared is not None:
+ common_ancestor = shared
+ if common_ancestor is None:
+ common_ancestor = py.path.local()
+ elif common_ancestor.isfile():
+ common_ancestor = common_ancestor.dirpath()
+ return common_ancestor
+
+
+def get_dirs_from_args(args):
+ return [d for d in (py.path.local(x) for x in args
+ if not str(x).startswith("-"))
+ if d.exists()]
+
+
+def determine_setup(inifile, args, warnfunc=None):
+ dirs = get_dirs_from_args(args)
+ if inifile:
+ iniconfig = py.iniconfig.IniConfig(inifile)
+ try:
+ inicfg = iniconfig["pytest"]
+ except KeyError:
+ inicfg = None
+ rootdir = get_common_ancestor(dirs)
+ else:
+ ancestor = get_common_ancestor(dirs)
+ rootdir, inifile, inicfg = getcfg([ancestor], warnfunc=warnfunc)
+ if rootdir is None:
+ for rootdir in ancestor.parts(reverse=True):
+ if rootdir.join("setup.py").exists():
+ break
+ else:
+ rootdir, inifile, inicfg = getcfg(dirs, warnfunc=warnfunc)
+ if rootdir is None:
+ rootdir = get_common_ancestor([py.path.local(), ancestor])
+ is_fs_root = os.path.splitdrive(str(rootdir))[1] == os.sep
+ if is_fs_root:
+ rootdir = ancestor
+ return rootdir, inifile, inicfg or {}
+
+
+def setns(obj, dic):
+ import pytest
+ for name, value in dic.items():
+ if isinstance(value, dict):
+ mod = getattr(obj, name, None)
+ if mod is None:
+ modname = "pytest.%s" % name
+ mod = types.ModuleType(modname)
+ sys.modules[modname] = mod
+ mod.__all__ = []
+ setattr(obj, name, mod)
+ obj.__all__.append(name)
+ setns(mod, value)
+ else:
+ setattr(obj, name, value)
+ obj.__all__.append(name)
+ #if obj != pytest:
+ # pytest.__all__.append(name)
+ setattr(pytest, name, value)
+
+
+def create_terminal_writer(config, *args, **kwargs):
+ """Create a TerminalWriter instance configured according to the options
+ in the config object. Every code which requires a TerminalWriter object
+ and has access to a config object should use this function.
+ """
+ tw = py.io.TerminalWriter(*args, **kwargs)
+ if config.option.color == 'yes':
+ tw.hasmarkup = True
+ if config.option.color == 'no':
+ tw.hasmarkup = False
+ return tw
+
+
+def _strtobool(val):
+ """Convert a string representation of truth to true (1) or false (0).
+
+ True values are 'y', 'yes', 't', 'true', 'on', and '1'; false values
+ are 'n', 'no', 'f', 'false', 'off', and '0'. Raises ValueError if
+ 'val' is anything else.
+
+ .. note:: copied from distutils.util
+ """
+ val = val.lower()
+ if val in ('y', 'yes', 't', 'true', 'on', '1'):
+ return 1
+ elif val in ('n', 'no', 'f', 'false', 'off', '0'):
+ return 0
+ else:
+ raise ValueError("invalid truth value %r" % (val,))
diff --git a/lib/spack/external/_pytest/debugging.py b/lib/spack/external/_pytest/debugging.py
new file mode 100644
index 0000000000..d96170bd8b
--- /dev/null
+++ b/lib/spack/external/_pytest/debugging.py
@@ -0,0 +1,124 @@
+""" interactive debugging with PDB, the Python Debugger. """
+from __future__ import absolute_import
+import pdb
+import sys
+
+import pytest
+
+
+def pytest_addoption(parser):
+ group = parser.getgroup("general")
+ group._addoption(
+ '--pdb', dest="usepdb", action="store_true",
+ help="start the interactive Python debugger on errors.")
+ group._addoption(
+ '--pdbcls', dest="usepdb_cls", metavar="modulename:classname",
+ help="start a custom interactive Python debugger on errors. "
+ "For example: --pdbcls=IPython.terminal.debugger:TerminalPdb")
+
+def pytest_namespace():
+ return {'set_trace': pytestPDB().set_trace}
+
+def pytest_configure(config):
+ if config.getvalue("usepdb") or config.getvalue("usepdb_cls"):
+ config.pluginmanager.register(PdbInvoke(), 'pdbinvoke')
+ if config.getvalue("usepdb_cls"):
+ modname, classname = config.getvalue("usepdb_cls").split(":")
+ __import__(modname)
+ pdb_cls = getattr(sys.modules[modname], classname)
+ else:
+ pdb_cls = pdb.Pdb
+ pytestPDB._pdb_cls = pdb_cls
+
+ old = (pdb.set_trace, pytestPDB._pluginmanager)
+
+ def fin():
+ pdb.set_trace, pytestPDB._pluginmanager = old
+ pytestPDB._config = None
+ pytestPDB._pdb_cls = pdb.Pdb
+
+ pdb.set_trace = pytest.set_trace
+ pytestPDB._pluginmanager = config.pluginmanager
+ pytestPDB._config = config
+ config._cleanup.append(fin)
+
+class pytestPDB:
+ """ Pseudo PDB that defers to the real pdb. """
+ _pluginmanager = None
+ _config = None
+ _pdb_cls = pdb.Pdb
+
+ def set_trace(self):
+ """ invoke PDB set_trace debugging, dropping any IO capturing. """
+ import _pytest.config
+ frame = sys._getframe().f_back
+ if self._pluginmanager is not None:
+ capman = self._pluginmanager.getplugin("capturemanager")
+ if capman:
+ capman.suspendcapture(in_=True)
+ tw = _pytest.config.create_terminal_writer(self._config)
+ tw.line()
+ tw.sep(">", "PDB set_trace (IO-capturing turned off)")
+ self._pluginmanager.hook.pytest_enter_pdb(config=self._config)
+ self._pdb_cls().set_trace(frame)
+
+
+class PdbInvoke:
+ def pytest_exception_interact(self, node, call, report):
+ capman = node.config.pluginmanager.getplugin("capturemanager")
+ if capman:
+ out, err = capman.suspendcapture(in_=True)
+ sys.stdout.write(out)
+ sys.stdout.write(err)
+ _enter_pdb(node, call.excinfo, report)
+
+ def pytest_internalerror(self, excrepr, excinfo):
+ for line in str(excrepr).split("\n"):
+ sys.stderr.write("INTERNALERROR> %s\n" %line)
+ sys.stderr.flush()
+ tb = _postmortem_traceback(excinfo)
+ post_mortem(tb)
+
+
+def _enter_pdb(node, excinfo, rep):
+ # XXX we re-use the TerminalReporter's terminalwriter
+ # because this seems to avoid some encoding related troubles
+ # for not completely clear reasons.
+ tw = node.config.pluginmanager.getplugin("terminalreporter")._tw
+ tw.line()
+ tw.sep(">", "traceback")
+ rep.toterminal(tw)
+ tw.sep(">", "entering PDB")
+ tb = _postmortem_traceback(excinfo)
+ post_mortem(tb)
+ rep._pdbshown = True
+ return rep
+
+
+def _postmortem_traceback(excinfo):
+ # A doctest.UnexpectedException is not useful for post_mortem.
+ # Use the underlying exception instead:
+ from doctest import UnexpectedException
+ if isinstance(excinfo.value, UnexpectedException):
+ return excinfo.value.exc_info[2]
+ else:
+ return excinfo._excinfo[2]
+
+
+def _find_last_non_hidden_frame(stack):
+ i = max(0, len(stack) - 1)
+ while i and stack[i][0].f_locals.get("__tracebackhide__", False):
+ i -= 1
+ return i
+
+
+def post_mortem(t):
+ class Pdb(pytestPDB._pdb_cls):
+ def get_stack(self, f, t):
+ stack, i = pdb.Pdb.get_stack(self, f, t)
+ if f is None:
+ i = _find_last_non_hidden_frame(stack)
+ return stack, i
+ p = Pdb()
+ p.reset()
+ p.interaction(None, t)
diff --git a/lib/spack/external/_pytest/deprecated.py b/lib/spack/external/_pytest/deprecated.py
new file mode 100644
index 0000000000..6edc475f6e
--- /dev/null
+++ b/lib/spack/external/_pytest/deprecated.py
@@ -0,0 +1,24 @@
+"""
+This module contains deprecation messages and bits of code used elsewhere in the codebase
+that is planned to be removed in the next pytest release.
+
+Keeping it in a central location makes it easy to track what is deprecated and should
+be removed when the time comes.
+"""
+
+
+MAIN_STR_ARGS = 'passing a string to pytest.main() is deprecated, ' \
+ 'pass a list of arguments instead.'
+
+YIELD_TESTS = 'yield tests are deprecated, and scheduled to be removed in pytest 4.0'
+
+FUNCARG_PREFIX = (
+ '{name}: declaring fixtures using "pytest_funcarg__" prefix is deprecated '
+ 'and scheduled to be removed in pytest 4.0. '
+ 'Please remove the prefix and use the @pytest.fixture decorator instead.')
+
+SETUP_CFG_PYTEST = '[pytest] section in setup.cfg files is deprecated, use [tool:pytest] instead.'
+
+GETFUNCARGVALUE = "use of getfuncargvalue is deprecated, use getfixturevalue"
+
+RESULT_LOG = '--result-log is deprecated and scheduled for removal in pytest 4.0'
diff --git a/lib/spack/external/_pytest/doctest.py b/lib/spack/external/_pytest/doctest.py
new file mode 100644
index 0000000000..f4782dded5
--- /dev/null
+++ b/lib/spack/external/_pytest/doctest.py
@@ -0,0 +1,331 @@
+""" discover and run doctests in modules and test files."""
+from __future__ import absolute_import
+
+import traceback
+
+import pytest
+from _pytest._code.code import ExceptionInfo, ReprFileLocation, TerminalRepr
+from _pytest.fixtures import FixtureRequest
+
+
+DOCTEST_REPORT_CHOICE_NONE = 'none'
+DOCTEST_REPORT_CHOICE_CDIFF = 'cdiff'
+DOCTEST_REPORT_CHOICE_NDIFF = 'ndiff'
+DOCTEST_REPORT_CHOICE_UDIFF = 'udiff'
+DOCTEST_REPORT_CHOICE_ONLY_FIRST_FAILURE = 'only_first_failure'
+
+DOCTEST_REPORT_CHOICES = (
+ DOCTEST_REPORT_CHOICE_NONE,
+ DOCTEST_REPORT_CHOICE_CDIFF,
+ DOCTEST_REPORT_CHOICE_NDIFF,
+ DOCTEST_REPORT_CHOICE_UDIFF,
+ DOCTEST_REPORT_CHOICE_ONLY_FIRST_FAILURE,
+)
+
+def pytest_addoption(parser):
+ parser.addini('doctest_optionflags', 'option flags for doctests',
+ type="args", default=["ELLIPSIS"])
+ group = parser.getgroup("collect")
+ group.addoption("--doctest-modules",
+ action="store_true", default=False,
+ help="run doctests in all .py modules",
+ dest="doctestmodules")
+ group.addoption("--doctest-report",
+ type=str.lower, default="udiff",
+ help="choose another output format for diffs on doctest failure",
+ choices=DOCTEST_REPORT_CHOICES,
+ dest="doctestreport")
+ group.addoption("--doctest-glob",
+ action="append", default=[], metavar="pat",
+ help="doctests file matching pattern, default: test*.txt",
+ dest="doctestglob")
+ group.addoption("--doctest-ignore-import-errors",
+ action="store_true", default=False,
+ help="ignore doctest ImportErrors",
+ dest="doctest_ignore_import_errors")
+
+
+def pytest_collect_file(path, parent):
+ config = parent.config
+ if path.ext == ".py":
+ if config.option.doctestmodules:
+ return DoctestModule(path, parent)
+ elif _is_doctest(config, path, parent):
+ return DoctestTextfile(path, parent)
+
+
+def _is_doctest(config, path, parent):
+ if path.ext in ('.txt', '.rst') and parent.session.isinitpath(path):
+ return True
+ globs = config.getoption("doctestglob") or ['test*.txt']
+ for glob in globs:
+ if path.check(fnmatch=glob):
+ return True
+ return False
+
+
+class ReprFailDoctest(TerminalRepr):
+
+ def __init__(self, reprlocation, lines):
+ self.reprlocation = reprlocation
+ self.lines = lines
+
+ def toterminal(self, tw):
+ for line in self.lines:
+ tw.line(line)
+ self.reprlocation.toterminal(tw)
+
+
+class DoctestItem(pytest.Item):
+ def __init__(self, name, parent, runner=None, dtest=None):
+ super(DoctestItem, self).__init__(name, parent)
+ self.runner = runner
+ self.dtest = dtest
+ self.obj = None
+ self.fixture_request = None
+
+ def setup(self):
+ if self.dtest is not None:
+ self.fixture_request = _setup_fixtures(self)
+ globs = dict(getfixture=self.fixture_request.getfixturevalue)
+ for name, value in self.fixture_request.getfixturevalue('doctest_namespace').items():
+ globs[name] = value
+ self.dtest.globs.update(globs)
+
+ def runtest(self):
+ _check_all_skipped(self.dtest)
+ self.runner.run(self.dtest)
+
+ def repr_failure(self, excinfo):
+ import doctest
+ if excinfo.errisinstance((doctest.DocTestFailure,
+ doctest.UnexpectedException)):
+ doctestfailure = excinfo.value
+ example = doctestfailure.example
+ test = doctestfailure.test
+ filename = test.filename
+ if test.lineno is None:
+ lineno = None
+ else:
+ lineno = test.lineno + example.lineno + 1
+ message = excinfo.type.__name__
+ reprlocation = ReprFileLocation(filename, lineno, message)
+ checker = _get_checker()
+ report_choice = _get_report_choice(self.config.getoption("doctestreport"))
+ if lineno is not None:
+ lines = doctestfailure.test.docstring.splitlines(False)
+ # add line numbers to the left of the error message
+ lines = ["%03d %s" % (i + test.lineno + 1, x)
+ for (i, x) in enumerate(lines)]
+ # trim docstring error lines to 10
+ lines = lines[example.lineno - 9:example.lineno + 1]
+ else:
+ lines = ['EXAMPLE LOCATION UNKNOWN, not showing all tests of that example']
+ indent = '>>>'
+ for line in example.source.splitlines():
+ lines.append('??? %s %s' % (indent, line))
+ indent = '...'
+ if excinfo.errisinstance(doctest.DocTestFailure):
+ lines += checker.output_difference(example,
+ doctestfailure.got, report_choice).split("\n")
+ else:
+ inner_excinfo = ExceptionInfo(excinfo.value.exc_info)
+ lines += ["UNEXPECTED EXCEPTION: %s" %
+ repr(inner_excinfo.value)]
+ lines += traceback.format_exception(*excinfo.value.exc_info)
+ return ReprFailDoctest(reprlocation, lines)
+ else:
+ return super(DoctestItem, self).repr_failure(excinfo)
+
+ def reportinfo(self):
+ return self.fspath, None, "[doctest] %s" % self.name
+
+
+def _get_flag_lookup():
+ import doctest
+ return dict(DONT_ACCEPT_TRUE_FOR_1=doctest.DONT_ACCEPT_TRUE_FOR_1,
+ DONT_ACCEPT_BLANKLINE=doctest.DONT_ACCEPT_BLANKLINE,
+ NORMALIZE_WHITESPACE=doctest.NORMALIZE_WHITESPACE,
+ ELLIPSIS=doctest.ELLIPSIS,
+ IGNORE_EXCEPTION_DETAIL=doctest.IGNORE_EXCEPTION_DETAIL,
+ COMPARISON_FLAGS=doctest.COMPARISON_FLAGS,
+ ALLOW_UNICODE=_get_allow_unicode_flag(),
+ ALLOW_BYTES=_get_allow_bytes_flag(),
+ )
+
+
+def get_optionflags(parent):
+ optionflags_str = parent.config.getini("doctest_optionflags")
+ flag_lookup_table = _get_flag_lookup()
+ flag_acc = 0
+ for flag in optionflags_str:
+ flag_acc |= flag_lookup_table[flag]
+ return flag_acc
+
+
+class DoctestTextfile(pytest.Module):
+ obj = None
+
+ def collect(self):
+ import doctest
+
+ # inspired by doctest.testfile; ideally we would use it directly,
+ # but it doesn't support passing a custom checker
+ text = self.fspath.read()
+ filename = str(self.fspath)
+ name = self.fspath.basename
+ globs = {'__name__': '__main__'}
+
+
+ optionflags = get_optionflags(self)
+ runner = doctest.DebugRunner(verbose=0, optionflags=optionflags,
+ checker=_get_checker())
+
+ parser = doctest.DocTestParser()
+ test = parser.get_doctest(text, globs, name, filename, 0)
+ if test.examples:
+ yield DoctestItem(test.name, self, runner, test)
+
+
+def _check_all_skipped(test):
+ """raises pytest.skip() if all examples in the given DocTest have the SKIP
+ option set.
+ """
+ import doctest
+ all_skipped = all(x.options.get(doctest.SKIP, False) for x in test.examples)
+ if all_skipped:
+ pytest.skip('all tests skipped by +SKIP option')
+
+
+class DoctestModule(pytest.Module):
+ def collect(self):
+ import doctest
+ if self.fspath.basename == "conftest.py":
+ module = self.config.pluginmanager._importconftest(self.fspath)
+ else:
+ try:
+ module = self.fspath.pyimport()
+ except ImportError:
+ if self.config.getvalue('doctest_ignore_import_errors'):
+ pytest.skip('unable to import module %r' % self.fspath)
+ else:
+ raise
+ # uses internal doctest module parsing mechanism
+ finder = doctest.DocTestFinder()
+ optionflags = get_optionflags(self)
+ runner = doctest.DebugRunner(verbose=0, optionflags=optionflags,
+ checker=_get_checker())
+ for test in finder.find(module, module.__name__):
+ if test.examples: # skip empty doctests
+ yield DoctestItem(test.name, self, runner, test)
+
+
+def _setup_fixtures(doctest_item):
+ """
+ Used by DoctestTextfile and DoctestItem to setup fixture information.
+ """
+ def func():
+ pass
+
+ doctest_item.funcargs = {}
+ fm = doctest_item.session._fixturemanager
+ doctest_item._fixtureinfo = fm.getfixtureinfo(node=doctest_item, func=func,
+ cls=None, funcargs=False)
+ fixture_request = FixtureRequest(doctest_item)
+ fixture_request._fillfixtures()
+ return fixture_request
+
+
+def _get_checker():
+ """
+ Returns a doctest.OutputChecker subclass that takes in account the
+ ALLOW_UNICODE option to ignore u'' prefixes in strings and ALLOW_BYTES
+ to strip b'' prefixes.
+ Useful when the same doctest should run in Python 2 and Python 3.
+
+ An inner class is used to avoid importing "doctest" at the module
+ level.
+ """
+ if hasattr(_get_checker, 'LiteralsOutputChecker'):
+ return _get_checker.LiteralsOutputChecker()
+
+ import doctest
+ import re
+
+ class LiteralsOutputChecker(doctest.OutputChecker):
+ """
+ Copied from doctest_nose_plugin.py from the nltk project:
+ https://github.com/nltk/nltk
+
+ Further extended to also support byte literals.
+ """
+
+ _unicode_literal_re = re.compile(r"(\W|^)[uU]([rR]?[\'\"])", re.UNICODE)
+ _bytes_literal_re = re.compile(r"(\W|^)[bB]([rR]?[\'\"])", re.UNICODE)
+
+ def check_output(self, want, got, optionflags):
+ res = doctest.OutputChecker.check_output(self, want, got,
+ optionflags)
+ if res:
+ return True
+
+ allow_unicode = optionflags & _get_allow_unicode_flag()
+ allow_bytes = optionflags & _get_allow_bytes_flag()
+ if not allow_unicode and not allow_bytes:
+ return False
+
+ else: # pragma: no cover
+ def remove_prefixes(regex, txt):
+ return re.sub(regex, r'\1\2', txt)
+
+ if allow_unicode:
+ want = remove_prefixes(self._unicode_literal_re, want)
+ got = remove_prefixes(self._unicode_literal_re, got)
+ if allow_bytes:
+ want = remove_prefixes(self._bytes_literal_re, want)
+ got = remove_prefixes(self._bytes_literal_re, got)
+ res = doctest.OutputChecker.check_output(self, want, got,
+ optionflags)
+ return res
+
+ _get_checker.LiteralsOutputChecker = LiteralsOutputChecker
+ return _get_checker.LiteralsOutputChecker()
+
+
+def _get_allow_unicode_flag():
+ """
+ Registers and returns the ALLOW_UNICODE flag.
+ """
+ import doctest
+ return doctest.register_optionflag('ALLOW_UNICODE')
+
+
+def _get_allow_bytes_flag():
+ """
+ Registers and returns the ALLOW_BYTES flag.
+ """
+ import doctest
+ return doctest.register_optionflag('ALLOW_BYTES')
+
+
+def _get_report_choice(key):
+ """
+ This function returns the actual `doctest` module flag value, we want to do it as late as possible to avoid
+ importing `doctest` and all its dependencies when parsing options, as it adds overhead and breaks tests.
+ """
+ import doctest
+
+ return {
+ DOCTEST_REPORT_CHOICE_UDIFF: doctest.REPORT_UDIFF,
+ DOCTEST_REPORT_CHOICE_CDIFF: doctest.REPORT_CDIFF,
+ DOCTEST_REPORT_CHOICE_NDIFF: doctest.REPORT_NDIFF,
+ DOCTEST_REPORT_CHOICE_ONLY_FIRST_FAILURE: doctest.REPORT_ONLY_FIRST_FAILURE,
+ DOCTEST_REPORT_CHOICE_NONE: 0,
+ }[key]
+
+@pytest.fixture(scope='session')
+def doctest_namespace():
+ """
+ Inject names into the doctest namespace.
+ """
+ return dict()
diff --git a/lib/spack/external/_pytest/fixtures.py b/lib/spack/external/_pytest/fixtures.py
new file mode 100644
index 0000000000..28bcd4d8d7
--- /dev/null
+++ b/lib/spack/external/_pytest/fixtures.py
@@ -0,0 +1,1134 @@
+import sys
+
+from py._code.code import FormattedExcinfo
+
+import py
+import pytest
+import warnings
+
+import inspect
+import _pytest
+from _pytest._code.code import TerminalRepr
+from _pytest.compat import (
+ NOTSET, exc_clear, _format_args,
+ getfslineno, get_real_func,
+ is_generator, isclass, getimfunc,
+ getlocation, getfuncargnames,
+)
+
+def pytest_sessionstart(session):
+ session._fixturemanager = FixtureManager(session)
+
+
+scopename2class = {}
+
+
+scope2props = dict(session=())
+scope2props["module"] = ("fspath", "module")
+scope2props["class"] = scope2props["module"] + ("cls",)
+scope2props["instance"] = scope2props["class"] + ("instance", )
+scope2props["function"] = scope2props["instance"] + ("function", "keywords")
+
+def scopeproperty(name=None, doc=None):
+ def decoratescope(func):
+ scopename = name or func.__name__
+
+ def provide(self):
+ if func.__name__ in scope2props[self.scope]:
+ return func(self)
+ raise AttributeError("%s not available in %s-scoped context" % (
+ scopename, self.scope))
+
+ return property(provide, None, None, func.__doc__)
+ return decoratescope
+
+
+def pytest_namespace():
+ scopename2class.update({
+ 'class': pytest.Class,
+ 'module': pytest.Module,
+ 'function': pytest.Item,
+ })
+ return {
+ 'fixture': fixture,
+ 'yield_fixture': yield_fixture,
+ 'collect': {'_fillfuncargs': fillfixtures}
+ }
+
+
+def get_scope_node(node, scope):
+ cls = scopename2class.get(scope)
+ if cls is None:
+ if scope == "session":
+ return node.session
+ raise ValueError("unknown scope")
+ return node.getparent(cls)
+
+
+def add_funcarg_pseudo_fixture_def(collector, metafunc, fixturemanager):
+ # this function will transform all collected calls to a functions
+ # if they use direct funcargs (i.e. direct parametrization)
+ # because we want later test execution to be able to rely on
+ # an existing FixtureDef structure for all arguments.
+ # XXX we can probably avoid this algorithm if we modify CallSpec2
+ # to directly care for creating the fixturedefs within its methods.
+ if not metafunc._calls[0].funcargs:
+ return # this function call does not have direct parametrization
+ # collect funcargs of all callspecs into a list of values
+ arg2params = {}
+ arg2scope = {}
+ for callspec in metafunc._calls:
+ for argname, argvalue in callspec.funcargs.items():
+ assert argname not in callspec.params
+ callspec.params[argname] = argvalue
+ arg2params_list = arg2params.setdefault(argname, [])
+ callspec.indices[argname] = len(arg2params_list)
+ arg2params_list.append(argvalue)
+ if argname not in arg2scope:
+ scopenum = callspec._arg2scopenum.get(argname,
+ scopenum_function)
+ arg2scope[argname] = scopes[scopenum]
+ callspec.funcargs.clear()
+
+ # register artificial FixtureDef's so that later at test execution
+ # time we can rely on a proper FixtureDef to exist for fixture setup.
+ arg2fixturedefs = metafunc._arg2fixturedefs
+ for argname, valuelist in arg2params.items():
+ # if we have a scope that is higher than function we need
+ # to make sure we only ever create an according fixturedef on
+ # a per-scope basis. We thus store and cache the fixturedef on the
+ # node related to the scope.
+ scope = arg2scope[argname]
+ node = None
+ if scope != "function":
+ node = get_scope_node(collector, scope)
+ if node is None:
+ assert scope == "class" and isinstance(collector, pytest.Module)
+ # use module-level collector for class-scope (for now)
+ node = collector
+ if node and argname in node._name2pseudofixturedef:
+ arg2fixturedefs[argname] = [node._name2pseudofixturedef[argname]]
+ else:
+ fixturedef = FixtureDef(fixturemanager, '', argname,
+ get_direct_param_fixture_func,
+ arg2scope[argname],
+ valuelist, False, False)
+ arg2fixturedefs[argname] = [fixturedef]
+ if node is not None:
+ node._name2pseudofixturedef[argname] = fixturedef
+
+
+
+def getfixturemarker(obj):
+ """ return fixturemarker or None if it doesn't exist or raised
+ exceptions."""
+ try:
+ return getattr(obj, "_pytestfixturefunction", None)
+ except KeyboardInterrupt:
+ raise
+ except Exception:
+ # some objects raise errors like request (from flask import request)
+ # we don't expect them to be fixture functions
+ return None
+
+
+
+def get_parametrized_fixture_keys(item, scopenum):
+ """ return list of keys for all parametrized arguments which match
+ the specified scope. """
+ assert scopenum < scopenum_function # function
+ try:
+ cs = item.callspec
+ except AttributeError:
+ pass
+ else:
+ # cs.indictes.items() is random order of argnames but
+ # then again different functions (items) can change order of
+ # arguments so it doesn't matter much probably
+ for argname, param_index in cs.indices.items():
+ if cs._arg2scopenum[argname] != scopenum:
+ continue
+ if scopenum == 0: # session
+ key = (argname, param_index)
+ elif scopenum == 1: # module
+ key = (argname, param_index, item.fspath)
+ elif scopenum == 2: # class
+ key = (argname, param_index, item.fspath, item.cls)
+ yield key
+
+
+# algorithm for sorting on a per-parametrized resource setup basis
+# it is called for scopenum==0 (session) first and performs sorting
+# down to the lower scopes such as to minimize number of "high scope"
+# setups and teardowns
+
+def reorder_items(items):
+ argkeys_cache = {}
+ for scopenum in range(0, scopenum_function):
+ argkeys_cache[scopenum] = d = {}
+ for item in items:
+ keys = set(get_parametrized_fixture_keys(item, scopenum))
+ if keys:
+ d[item] = keys
+ return reorder_items_atscope(items, set(), argkeys_cache, 0)
+
+def reorder_items_atscope(items, ignore, argkeys_cache, scopenum):
+ if scopenum >= scopenum_function or len(items) < 3:
+ return items
+ items_done = []
+ while 1:
+ items_before, items_same, items_other, newignore = \
+ slice_items(items, ignore, argkeys_cache[scopenum])
+ items_before = reorder_items_atscope(
+ items_before, ignore, argkeys_cache,scopenum+1)
+ if items_same is None:
+ # nothing to reorder in this scope
+ assert items_other is None
+ return items_done + items_before
+ items_done.extend(items_before)
+ items = items_same + items_other
+ ignore = newignore
+
+
+def slice_items(items, ignore, scoped_argkeys_cache):
+ # we pick the first item which uses a fixture instance in the
+ # requested scope and which we haven't seen yet. We slice the input
+ # items list into a list of items_nomatch, items_same and
+ # items_other
+ if scoped_argkeys_cache: # do we need to do work at all?
+ it = iter(items)
+ # first find a slicing key
+ for i, item in enumerate(it):
+ argkeys = scoped_argkeys_cache.get(item)
+ if argkeys is not None:
+ argkeys = argkeys.difference(ignore)
+ if argkeys: # found a slicing key
+ slicing_argkey = argkeys.pop()
+ items_before = items[:i]
+ items_same = [item]
+ items_other = []
+ # now slice the remainder of the list
+ for item in it:
+ argkeys = scoped_argkeys_cache.get(item)
+ if argkeys and slicing_argkey in argkeys and \
+ slicing_argkey not in ignore:
+ items_same.append(item)
+ else:
+ items_other.append(item)
+ newignore = ignore.copy()
+ newignore.add(slicing_argkey)
+ return (items_before, items_same, items_other, newignore)
+ return items, None, None, None
+
+
+
+class FuncargnamesCompatAttr:
+ """ helper class so that Metafunc, Function and FixtureRequest
+ don't need to each define the "funcargnames" compatibility attribute.
+ """
+ @property
+ def funcargnames(self):
+ """ alias attribute for ``fixturenames`` for pre-2.3 compatibility"""
+ return self.fixturenames
+
+
+def fillfixtures(function):
+ """ fill missing funcargs for a test function. """
+ try:
+ request = function._request
+ except AttributeError:
+ # XXX this special code path is only expected to execute
+ # with the oejskit plugin. It uses classes with funcargs
+ # and we thus have to work a bit to allow this.
+ fm = function.session._fixturemanager
+ fi = fm.getfixtureinfo(function.parent, function.obj, None)
+ function._fixtureinfo = fi
+ request = function._request = FixtureRequest(function)
+ request._fillfixtures()
+ # prune out funcargs for jstests
+ newfuncargs = {}
+ for name in fi.argnames:
+ newfuncargs[name] = function.funcargs[name]
+ function.funcargs = newfuncargs
+ else:
+ request._fillfixtures()
+
+
+
+def get_direct_param_fixture_func(request):
+ return request.param
+
+class FuncFixtureInfo:
+ def __init__(self, argnames, names_closure, name2fixturedefs):
+ self.argnames = argnames
+ self.names_closure = names_closure
+ self.name2fixturedefs = name2fixturedefs
+
+
+class FixtureRequest(FuncargnamesCompatAttr):
+ """ A request for a fixture from a test or fixture function.
+
+ A request object gives access to the requesting test context
+ and has an optional ``param`` attribute in case
+ the fixture is parametrized indirectly.
+ """
+
+ def __init__(self, pyfuncitem):
+ self._pyfuncitem = pyfuncitem
+ #: fixture for which this request is being performed
+ self.fixturename = None
+ #: Scope string, one of "function", "class", "module", "session"
+ self.scope = "function"
+ self._fixture_values = {} # argname -> fixture value
+ self._fixture_defs = {} # argname -> FixtureDef
+ fixtureinfo = pyfuncitem._fixtureinfo
+ self._arg2fixturedefs = fixtureinfo.name2fixturedefs.copy()
+ self._arg2index = {}
+ self._fixturemanager = pyfuncitem.session._fixturemanager
+
+ @property
+ def fixturenames(self):
+ # backward incompatible note: now a readonly property
+ return list(self._pyfuncitem._fixtureinfo.names_closure)
+
+ @property
+ def node(self):
+ """ underlying collection node (depends on current request scope)"""
+ return self._getscopeitem(self.scope)
+
+
+ def _getnextfixturedef(self, argname):
+ fixturedefs = self._arg2fixturedefs.get(argname, None)
+ if fixturedefs is None:
+ # we arrive here because of a a dynamic call to
+ # getfixturevalue(argname) usage which was naturally
+ # not known at parsing/collection time
+ parentid = self._pyfuncitem.parent.nodeid
+ fixturedefs = self._fixturemanager.getfixturedefs(argname, parentid)
+ self._arg2fixturedefs[argname] = fixturedefs
+ # fixturedefs list is immutable so we maintain a decreasing index
+ index = self._arg2index.get(argname, 0) - 1
+ if fixturedefs is None or (-index > len(fixturedefs)):
+ raise FixtureLookupError(argname, self)
+ self._arg2index[argname] = index
+ return fixturedefs[index]
+
+ @property
+ def config(self):
+ """ the pytest config object associated with this request. """
+ return self._pyfuncitem.config
+
+
+ @scopeproperty()
+ def function(self):
+ """ test function object if the request has a per-function scope. """
+ return self._pyfuncitem.obj
+
+ @scopeproperty("class")
+ def cls(self):
+ """ class (can be None) where the test function was collected. """
+ clscol = self._pyfuncitem.getparent(pytest.Class)
+ if clscol:
+ return clscol.obj
+
+ @property
+ def instance(self):
+ """ instance (can be None) on which test function was collected. """
+ # unittest support hack, see _pytest.unittest.TestCaseFunction
+ try:
+ return self._pyfuncitem._testcase
+ except AttributeError:
+ function = getattr(self, "function", None)
+ if function is not None:
+ return py.builtin._getimself(function)
+
+ @scopeproperty()
+ def module(self):
+ """ python module object where the test function was collected. """
+ return self._pyfuncitem.getparent(pytest.Module).obj
+
+ @scopeproperty()
+ def fspath(self):
+ """ the file system path of the test module which collected this test. """
+ return self._pyfuncitem.fspath
+
+ @property
+ def keywords(self):
+ """ keywords/markers dictionary for the underlying node. """
+ return self.node.keywords
+
+ @property
+ def session(self):
+ """ pytest session object. """
+ return self._pyfuncitem.session
+
+ def addfinalizer(self, finalizer):
+ """ add finalizer/teardown function to be called after the
+ last test within the requesting test context finished
+ execution. """
+ # XXX usually this method is shadowed by fixturedef specific ones
+ self._addfinalizer(finalizer, scope=self.scope)
+
+ def _addfinalizer(self, finalizer, scope):
+ colitem = self._getscopeitem(scope)
+ self._pyfuncitem.session._setupstate.addfinalizer(
+ finalizer=finalizer, colitem=colitem)
+
+ def applymarker(self, marker):
+ """ Apply a marker to a single test function invocation.
+ This method is useful if you don't want to have a keyword/marker
+ on all function invocations.
+
+ :arg marker: a :py:class:`_pytest.mark.MarkDecorator` object
+ created by a call to ``pytest.mark.NAME(...)``.
+ """
+ try:
+ self.node.keywords[marker.markname] = marker
+ except AttributeError:
+ raise ValueError(marker)
+
+ def raiseerror(self, msg):
+ """ raise a FixtureLookupError with the given message. """
+ raise self._fixturemanager.FixtureLookupError(None, self, msg)
+
+ def _fillfixtures(self):
+ item = self._pyfuncitem
+ fixturenames = getattr(item, "fixturenames", self.fixturenames)
+ for argname in fixturenames:
+ if argname not in item.funcargs:
+ item.funcargs[argname] = self.getfixturevalue(argname)
+
+ def cached_setup(self, setup, teardown=None, scope="module", extrakey=None):
+ """ (deprecated) Return a testing resource managed by ``setup`` &
+ ``teardown`` calls. ``scope`` and ``extrakey`` determine when the
+ ``teardown`` function will be called so that subsequent calls to
+ ``setup`` would recreate the resource. With pytest-2.3 you often
+ do not need ``cached_setup()`` as you can directly declare a scope
+ on a fixture function and register a finalizer through
+ ``request.addfinalizer()``.
+
+ :arg teardown: function receiving a previously setup resource.
+ :arg setup: a no-argument function creating a resource.
+ :arg scope: a string value out of ``function``, ``class``, ``module``
+ or ``session`` indicating the caching lifecycle of the resource.
+ :arg extrakey: added to internal caching key of (funcargname, scope).
+ """
+ if not hasattr(self.config, '_setupcache'):
+ self.config._setupcache = {} # XXX weakref?
+ cachekey = (self.fixturename, self._getscopeitem(scope), extrakey)
+ cache = self.config._setupcache
+ try:
+ val = cache[cachekey]
+ except KeyError:
+ self._check_scope(self.fixturename, self.scope, scope)
+ val = setup()
+ cache[cachekey] = val
+ if teardown is not None:
+ def finalizer():
+ del cache[cachekey]
+ teardown(val)
+ self._addfinalizer(finalizer, scope=scope)
+ return val
+
+ def getfixturevalue(self, argname):
+ """ Dynamically run a named fixture function.
+
+ Declaring fixtures via function argument is recommended where possible.
+ But if you can only decide whether to use another fixture at test
+ setup time, you may use this function to retrieve it inside a fixture
+ or test function body.
+ """
+ return self._get_active_fixturedef(argname).cached_result[0]
+
+ def getfuncargvalue(self, argname):
+ """ Deprecated, use getfixturevalue. """
+ from _pytest import deprecated
+ warnings.warn(
+ deprecated.GETFUNCARGVALUE,
+ DeprecationWarning)
+ return self.getfixturevalue(argname)
+
+ def _get_active_fixturedef(self, argname):
+ try:
+ return self._fixture_defs[argname]
+ except KeyError:
+ try:
+ fixturedef = self._getnextfixturedef(argname)
+ except FixtureLookupError:
+ if argname == "request":
+ class PseudoFixtureDef:
+ cached_result = (self, [0], None)
+ scope = "function"
+ return PseudoFixtureDef
+ raise
+ # remove indent to prevent the python3 exception
+ # from leaking into the call
+ result = self._getfixturevalue(fixturedef)
+ self._fixture_values[argname] = result
+ self._fixture_defs[argname] = fixturedef
+ return fixturedef
+
+ def _get_fixturestack(self):
+ current = self
+ l = []
+ while 1:
+ fixturedef = getattr(current, "_fixturedef", None)
+ if fixturedef is None:
+ l.reverse()
+ return l
+ l.append(fixturedef)
+ current = current._parent_request
+
+ def _getfixturevalue(self, fixturedef):
+ # prepare a subrequest object before calling fixture function
+ # (latter managed by fixturedef)
+ argname = fixturedef.argname
+ funcitem = self._pyfuncitem
+ scope = fixturedef.scope
+ try:
+ param = funcitem.callspec.getparam(argname)
+ except (AttributeError, ValueError):
+ param = NOTSET
+ param_index = 0
+ if fixturedef.params is not None:
+ frame = inspect.stack()[3]
+ frameinfo = inspect.getframeinfo(frame[0])
+ source_path = frameinfo.filename
+ source_lineno = frameinfo.lineno
+ source_path = py.path.local(source_path)
+ if source_path.relto(funcitem.config.rootdir):
+ source_path = source_path.relto(funcitem.config.rootdir)
+ msg = (
+ "The requested fixture has no parameter defined for the "
+ "current test.\n\nRequested fixture '{0}' defined in:\n{1}"
+ "\n\nRequested here:\n{2}:{3}".format(
+ fixturedef.argname,
+ getlocation(fixturedef.func, funcitem.config.rootdir),
+ source_path,
+ source_lineno,
+ )
+ )
+ pytest.fail(msg)
+ else:
+ # indices might not be set if old-style metafunc.addcall() was used
+ param_index = funcitem.callspec.indices.get(argname, 0)
+ # if a parametrize invocation set a scope it will override
+ # the static scope defined with the fixture function
+ paramscopenum = funcitem.callspec._arg2scopenum.get(argname)
+ if paramscopenum is not None:
+ scope = scopes[paramscopenum]
+
+ subrequest = SubRequest(self, scope, param, param_index, fixturedef)
+
+ # check if a higher-level scoped fixture accesses a lower level one
+ subrequest._check_scope(argname, self.scope, scope)
+
+ # clear sys.exc_info before invoking the fixture (python bug?)
+ # if its not explicitly cleared it will leak into the call
+ exc_clear()
+ try:
+ # call the fixture function
+ val = fixturedef.execute(request=subrequest)
+ finally:
+ # if fixture function failed it might have registered finalizers
+ self.session._setupstate.addfinalizer(fixturedef.finish,
+ subrequest.node)
+ return val
+
+ def _check_scope(self, argname, invoking_scope, requested_scope):
+ if argname == "request":
+ return
+ if scopemismatch(invoking_scope, requested_scope):
+ # try to report something helpful
+ lines = self._factorytraceback()
+ pytest.fail("ScopeMismatch: You tried to access the %r scoped "
+ "fixture %r with a %r scoped request object, "
+ "involved factories\n%s" %(
+ (requested_scope, argname, invoking_scope, "\n".join(lines))),
+ pytrace=False)
+
+ def _factorytraceback(self):
+ lines = []
+ for fixturedef in self._get_fixturestack():
+ factory = fixturedef.func
+ fs, lineno = getfslineno(factory)
+ p = self._pyfuncitem.session.fspath.bestrelpath(fs)
+ args = _format_args(factory)
+ lines.append("%s:%d: def %s%s" %(
+ p, lineno, factory.__name__, args))
+ return lines
+
+ def _getscopeitem(self, scope):
+ if scope == "function":
+ # this might also be a non-function Item despite its attribute name
+ return self._pyfuncitem
+ node = get_scope_node(self._pyfuncitem, scope)
+ if node is None and scope == "class":
+ # fallback to function item itself
+ node = self._pyfuncitem
+ assert node
+ return node
+
+ def __repr__(self):
+ return "<FixtureRequest for %r>" %(self.node)
+
+
+class SubRequest(FixtureRequest):
+ """ a sub request for handling getting a fixture from a
+ test function/fixture. """
+ def __init__(self, request, scope, param, param_index, fixturedef):
+ self._parent_request = request
+ self.fixturename = fixturedef.argname
+ if param is not NOTSET:
+ self.param = param
+ self.param_index = param_index
+ self.scope = scope
+ self._fixturedef = fixturedef
+ self.addfinalizer = fixturedef.addfinalizer
+ self._pyfuncitem = request._pyfuncitem
+ self._fixture_values = request._fixture_values
+ self._fixture_defs = request._fixture_defs
+ self._arg2fixturedefs = request._arg2fixturedefs
+ self._arg2index = request._arg2index
+ self._fixturemanager = request._fixturemanager
+
+ def __repr__(self):
+ return "<SubRequest %r for %r>" % (self.fixturename, self._pyfuncitem)
+
+
+class ScopeMismatchError(Exception):
+ """ A fixture function tries to use a different fixture function which
+ which has a lower scope (e.g. a Session one calls a function one)
+ """
+
+
+scopes = "session module class function".split()
+scopenum_function = scopes.index("function")
+
+
+def scopemismatch(currentscope, newscope):
+ return scopes.index(newscope) > scopes.index(currentscope)
+
+
+def scope2index(scope, descr, where=None):
+ """Look up the index of ``scope`` and raise a descriptive value error
+ if not defined.
+ """
+ try:
+ return scopes.index(scope)
+ except ValueError:
+ raise ValueError(
+ "{0} {1}has an unsupported scope value '{2}'".format(
+ descr, 'from {0} '.format(where) if where else '',
+ scope)
+ )
+
+
+class FixtureLookupError(LookupError):
+ """ could not return a requested Fixture (missing or invalid). """
+ def __init__(self, argname, request, msg=None):
+ self.argname = argname
+ self.request = request
+ self.fixturestack = request._get_fixturestack()
+ self.msg = msg
+
+ def formatrepr(self):
+ tblines = []
+ addline = tblines.append
+ stack = [self.request._pyfuncitem.obj]
+ stack.extend(map(lambda x: x.func, self.fixturestack))
+ msg = self.msg
+ if msg is not None:
+ # the last fixture raise an error, let's present
+ # it at the requesting side
+ stack = stack[:-1]
+ for function in stack:
+ fspath, lineno = getfslineno(function)
+ try:
+ lines, _ = inspect.getsourcelines(get_real_func(function))
+ except (IOError, IndexError, TypeError):
+ error_msg = "file %s, line %s: source code not available"
+ addline(error_msg % (fspath, lineno+1))
+ else:
+ addline("file %s, line %s" % (fspath, lineno+1))
+ for i, line in enumerate(lines):
+ line = line.rstrip()
+ addline(" " + line)
+ if line.lstrip().startswith('def'):
+ break
+
+ if msg is None:
+ fm = self.request._fixturemanager
+ available = []
+ parentid = self.request._pyfuncitem.parent.nodeid
+ for name, fixturedefs in fm._arg2fixturedefs.items():
+ faclist = list(fm._matchfactories(fixturedefs, parentid))
+ if faclist and name not in available:
+ available.append(name)
+ msg = "fixture %r not found" % (self.argname,)
+ msg += "\n available fixtures: %s" %(", ".join(sorted(available)),)
+ msg += "\n use 'pytest --fixtures [testpath]' for help on them."
+
+ return FixtureLookupErrorRepr(fspath, lineno, tblines, msg, self.argname)
+
+
+class FixtureLookupErrorRepr(TerminalRepr):
+ def __init__(self, filename, firstlineno, tblines, errorstring, argname):
+ self.tblines = tblines
+ self.errorstring = errorstring
+ self.filename = filename
+ self.firstlineno = firstlineno
+ self.argname = argname
+
+ def toterminal(self, tw):
+ # tw.line("FixtureLookupError: %s" %(self.argname), red=True)
+ for tbline in self.tblines:
+ tw.line(tbline.rstrip())
+ lines = self.errorstring.split("\n")
+ if lines:
+ tw.line('{0} {1}'.format(FormattedExcinfo.fail_marker,
+ lines[0].strip()), red=True)
+ for line in lines[1:]:
+ tw.line('{0} {1}'.format(FormattedExcinfo.flow_marker,
+ line.strip()), red=True)
+ tw.line()
+ tw.line("%s:%d" % (self.filename, self.firstlineno+1))
+
+
+def fail_fixturefunc(fixturefunc, msg):
+ fs, lineno = getfslineno(fixturefunc)
+ location = "%s:%s" % (fs, lineno+1)
+ source = _pytest._code.Source(fixturefunc)
+ pytest.fail(msg + ":\n\n" + str(source.indent()) + "\n" + location,
+ pytrace=False)
+
+def call_fixture_func(fixturefunc, request, kwargs):
+ yieldctx = is_generator(fixturefunc)
+ if yieldctx:
+ it = fixturefunc(**kwargs)
+ res = next(it)
+
+ def teardown():
+ try:
+ next(it)
+ except StopIteration:
+ pass
+ else:
+ fail_fixturefunc(fixturefunc,
+ "yield_fixture function has more than one 'yield'")
+
+ request.addfinalizer(teardown)
+ else:
+ res = fixturefunc(**kwargs)
+ return res
+
+
+class FixtureDef:
+ """ A container for a factory definition. """
+ def __init__(self, fixturemanager, baseid, argname, func, scope, params,
+ unittest=False, ids=None):
+ self._fixturemanager = fixturemanager
+ self.baseid = baseid or ''
+ self.has_location = baseid is not None
+ self.func = func
+ self.argname = argname
+ self.scope = scope
+ self.scopenum = scope2index(
+ scope or "function",
+ descr='fixture {0}'.format(func.__name__),
+ where=baseid
+ )
+ self.params = params
+ startindex = unittest and 1 or None
+ self.argnames = getfuncargnames(func, startindex=startindex)
+ self.unittest = unittest
+ self.ids = ids
+ self._finalizer = []
+
+ def addfinalizer(self, finalizer):
+ self._finalizer.append(finalizer)
+
+ def finish(self):
+ try:
+ while self._finalizer:
+ func = self._finalizer.pop()
+ func()
+ finally:
+ ihook = self._fixturemanager.session.ihook
+ ihook.pytest_fixture_post_finalizer(fixturedef=self)
+ # even if finalization fails, we invalidate
+ # the cached fixture value
+ if hasattr(self, "cached_result"):
+ del self.cached_result
+
+ def execute(self, request):
+ # get required arguments and register our own finish()
+ # with their finalization
+ for argname in self.argnames:
+ fixturedef = request._get_active_fixturedef(argname)
+ if argname != "request":
+ fixturedef.addfinalizer(self.finish)
+
+ my_cache_key = request.param_index
+ cached_result = getattr(self, "cached_result", None)
+ if cached_result is not None:
+ result, cache_key, err = cached_result
+ if my_cache_key == cache_key:
+ if err is not None:
+ py.builtin._reraise(*err)
+ else:
+ return result
+ # we have a previous but differently parametrized fixture instance
+ # so we need to tear it down before creating a new one
+ self.finish()
+ assert not hasattr(self, "cached_result")
+
+ ihook = self._fixturemanager.session.ihook
+ return ihook.pytest_fixture_setup(fixturedef=self, request=request)
+
+ def __repr__(self):
+ return ("<FixtureDef name=%r scope=%r baseid=%r >" %
+ (self.argname, self.scope, self.baseid))
+
+def pytest_fixture_setup(fixturedef, request):
+ """ Execution of fixture setup. """
+ kwargs = {}
+ for argname in fixturedef.argnames:
+ fixdef = request._get_active_fixturedef(argname)
+ result, arg_cache_key, exc = fixdef.cached_result
+ request._check_scope(argname, request.scope, fixdef.scope)
+ kwargs[argname] = result
+
+ fixturefunc = fixturedef.func
+ if fixturedef.unittest:
+ if request.instance is not None:
+ # bind the unbound method to the TestCase instance
+ fixturefunc = fixturedef.func.__get__(request.instance)
+ else:
+ # the fixture function needs to be bound to the actual
+ # request.instance so that code working with "fixturedef" behaves
+ # as expected.
+ if request.instance is not None:
+ fixturefunc = getimfunc(fixturedef.func)
+ if fixturefunc != fixturedef.func:
+ fixturefunc = fixturefunc.__get__(request.instance)
+ my_cache_key = request.param_index
+ try:
+ result = call_fixture_func(fixturefunc, request, kwargs)
+ except Exception:
+ fixturedef.cached_result = (None, my_cache_key, sys.exc_info())
+ raise
+ fixturedef.cached_result = (result, my_cache_key, None)
+ return result
+
+
+class FixtureFunctionMarker:
+ def __init__(self, scope, params, autouse=False, ids=None, name=None):
+ self.scope = scope
+ self.params = params
+ self.autouse = autouse
+ self.ids = ids
+ self.name = name
+
+ def __call__(self, function):
+ if isclass(function):
+ raise ValueError(
+ "class fixtures not supported (may be in the future)")
+ function._pytestfixturefunction = self
+ return function
+
+
+
+def fixture(scope="function", params=None, autouse=False, ids=None, name=None):
+ """ (return a) decorator to mark a fixture factory function.
+
+ This decorator can be used (with or or without parameters) to define
+ a fixture function. The name of the fixture function can later be
+ referenced to cause its invocation ahead of running tests: test
+ modules or classes can use the pytest.mark.usefixtures(fixturename)
+ marker. Test functions can directly use fixture names as input
+ arguments in which case the fixture instance returned from the fixture
+ function will be injected.
+
+ :arg scope: the scope for which this fixture is shared, one of
+ "function" (default), "class", "module" or "session".
+
+ :arg params: an optional list of parameters which will cause multiple
+ invocations of the fixture function and all of the tests
+ using it.
+
+ :arg autouse: if True, the fixture func is activated for all tests that
+ can see it. If False (the default) then an explicit
+ reference is needed to activate the fixture.
+
+ :arg ids: list of string ids each corresponding to the params
+ so that they are part of the test id. If no ids are provided
+ they will be generated automatically from the params.
+
+ :arg name: the name of the fixture. This defaults to the name of the
+ decorated function. If a fixture is used in the same module in
+ which it is defined, the function name of the fixture will be
+ shadowed by the function arg that requests the fixture; one way
+ to resolve this is to name the decorated function
+ ``fixture_<fixturename>`` and then use
+ ``@pytest.fixture(name='<fixturename>')``.
+
+ Fixtures can optionally provide their values to test functions using a ``yield`` statement,
+ instead of ``return``. In this case, the code block after the ``yield`` statement is executed
+ as teardown code regardless of the test outcome. A fixture function must yield exactly once.
+ """
+ if callable(scope) and params is None and autouse == False:
+ # direct decoration
+ return FixtureFunctionMarker(
+ "function", params, autouse, name=name)(scope)
+ if params is not None and not isinstance(params, (list, tuple)):
+ params = list(params)
+ return FixtureFunctionMarker(scope, params, autouse, ids=ids, name=name)
+
+
+def yield_fixture(scope="function", params=None, autouse=False, ids=None, name=None):
+ """ (return a) decorator to mark a yield-fixture factory function.
+
+ .. deprecated:: 3.0
+ Use :py:func:`pytest.fixture` directly instead.
+ """
+ if callable(scope) and params is None and not autouse:
+ # direct decoration
+ return FixtureFunctionMarker(
+ "function", params, autouse, ids=ids, name=name)(scope)
+ else:
+ return FixtureFunctionMarker(scope, params, autouse, ids=ids, name=name)
+
+
+defaultfuncargprefixmarker = fixture()
+
+
+@fixture(scope="session")
+def pytestconfig(request):
+ """ the pytest config object with access to command line opts."""
+ return request.config
+
+
+class FixtureManager:
+ """
+ pytest fixtures definitions and information is stored and managed
+ from this class.
+
+ During collection fm.parsefactories() is called multiple times to parse
+ fixture function definitions into FixtureDef objects and internal
+ data structures.
+
+ During collection of test functions, metafunc-mechanics instantiate
+ a FuncFixtureInfo object which is cached per node/func-name.
+ This FuncFixtureInfo object is later retrieved by Function nodes
+ which themselves offer a fixturenames attribute.
+
+ The FuncFixtureInfo object holds information about fixtures and FixtureDefs
+ relevant for a particular function. An initial list of fixtures is
+ assembled like this:
+
+ - ini-defined usefixtures
+ - autouse-marked fixtures along the collection chain up from the function
+ - usefixtures markers at module/class/function level
+ - test function funcargs
+
+ Subsequently the funcfixtureinfo.fixturenames attribute is computed
+ as the closure of the fixtures needed to setup the initial fixtures,
+ i. e. fixtures needed by fixture functions themselves are appended
+ to the fixturenames list.
+
+ Upon the test-setup phases all fixturenames are instantiated, retrieved
+ by a lookup of their FuncFixtureInfo.
+ """
+
+ _argprefix = "pytest_funcarg__"
+ FixtureLookupError = FixtureLookupError
+ FixtureLookupErrorRepr = FixtureLookupErrorRepr
+
+ def __init__(self, session):
+ self.session = session
+ self.config = session.config
+ self._arg2fixturedefs = {}
+ self._holderobjseen = set()
+ self._arg2finish = {}
+ self._nodeid_and_autousenames = [("", self.config.getini("usefixtures"))]
+ session.config.pluginmanager.register(self, "funcmanage")
+
+
+ def getfixtureinfo(self, node, func, cls, funcargs=True):
+ if funcargs and not hasattr(node, "nofuncargs"):
+ if cls is not None:
+ startindex = 1
+ else:
+ startindex = None
+ argnames = getfuncargnames(func, startindex)
+ else:
+ argnames = ()
+ usefixtures = getattr(func, "usefixtures", None)
+ initialnames = argnames
+ if usefixtures is not None:
+ initialnames = usefixtures.args + initialnames
+ fm = node.session._fixturemanager
+ names_closure, arg2fixturedefs = fm.getfixtureclosure(initialnames,
+ node)
+ return FuncFixtureInfo(argnames, names_closure, arg2fixturedefs)
+
+ def pytest_plugin_registered(self, plugin):
+ nodeid = None
+ try:
+ p = py.path.local(plugin.__file__)
+ except AttributeError:
+ pass
+ else:
+ # construct the base nodeid which is later used to check
+ # what fixtures are visible for particular tests (as denoted
+ # by their test id)
+ if p.basename.startswith("conftest.py"):
+ nodeid = p.dirpath().relto(self.config.rootdir)
+ if p.sep != "/":
+ nodeid = nodeid.replace(p.sep, "/")
+ self.parsefactories(plugin, nodeid)
+
+ def _getautousenames(self, nodeid):
+ """ return a tuple of fixture names to be used. """
+ autousenames = []
+ for baseid, basenames in self._nodeid_and_autousenames:
+ if nodeid.startswith(baseid):
+ if baseid:
+ i = len(baseid)
+ nextchar = nodeid[i:i+1]
+ if nextchar and nextchar not in ":/":
+ continue
+ autousenames.extend(basenames)
+ # make sure autousenames are sorted by scope, scopenum 0 is session
+ autousenames.sort(
+ key=lambda x: self._arg2fixturedefs[x][-1].scopenum)
+ return autousenames
+
+ def getfixtureclosure(self, fixturenames, parentnode):
+ # collect the closure of all fixtures , starting with the given
+ # fixturenames as the initial set. As we have to visit all
+ # factory definitions anyway, we also return a arg2fixturedefs
+ # mapping so that the caller can reuse it and does not have
+ # to re-discover fixturedefs again for each fixturename
+ # (discovering matching fixtures for a given name/node is expensive)
+
+ parentid = parentnode.nodeid
+ fixturenames_closure = self._getautousenames(parentid)
+
+ def merge(otherlist):
+ for arg in otherlist:
+ if arg not in fixturenames_closure:
+ fixturenames_closure.append(arg)
+
+ merge(fixturenames)
+ arg2fixturedefs = {}
+ lastlen = -1
+ while lastlen != len(fixturenames_closure):
+ lastlen = len(fixturenames_closure)
+ for argname in fixturenames_closure:
+ if argname in arg2fixturedefs:
+ continue
+ fixturedefs = self.getfixturedefs(argname, parentid)
+ if fixturedefs:
+ arg2fixturedefs[argname] = fixturedefs
+ merge(fixturedefs[-1].argnames)
+ return fixturenames_closure, arg2fixturedefs
+
+ def pytest_generate_tests(self, metafunc):
+ for argname in metafunc.fixturenames:
+ faclist = metafunc._arg2fixturedefs.get(argname)
+ if faclist:
+ fixturedef = faclist[-1]
+ if fixturedef.params is not None:
+ func_params = getattr(getattr(metafunc.function, 'parametrize', None), 'args', [[None]])
+ # skip directly parametrized arguments
+ argnames = func_params[0]
+ if not isinstance(argnames, (tuple, list)):
+ argnames = [x.strip() for x in argnames.split(",") if x.strip()]
+ if argname not in func_params and argname not in argnames:
+ metafunc.parametrize(argname, fixturedef.params,
+ indirect=True, scope=fixturedef.scope,
+ ids=fixturedef.ids)
+ else:
+ continue # will raise FixtureLookupError at setup time
+
+ def pytest_collection_modifyitems(self, items):
+ # separate parametrized setups
+ items[:] = reorder_items(items)
+
+ def parsefactories(self, node_or_obj, nodeid=NOTSET, unittest=False):
+ if nodeid is not NOTSET:
+ holderobj = node_or_obj
+ else:
+ holderobj = node_or_obj.obj
+ nodeid = node_or_obj.nodeid
+ if holderobj in self._holderobjseen:
+ return
+ self._holderobjseen.add(holderobj)
+ autousenames = []
+ for name in dir(holderobj):
+ obj = getattr(holderobj, name, None)
+ # fixture functions have a pytest_funcarg__ prefix (pre-2.3 style)
+ # or are "@pytest.fixture" marked
+ marker = getfixturemarker(obj)
+ if marker is None:
+ if not name.startswith(self._argprefix):
+ continue
+ if not callable(obj):
+ continue
+ marker = defaultfuncargprefixmarker
+ from _pytest import deprecated
+ self.config.warn('C1', deprecated.FUNCARG_PREFIX.format(name=name))
+ name = name[len(self._argprefix):]
+ elif not isinstance(marker, FixtureFunctionMarker):
+ # magic globals with __getattr__ might have got us a wrong
+ # fixture attribute
+ continue
+ else:
+ if marker.name:
+ name = marker.name
+ msg = 'fixtures cannot have "pytest_funcarg__" prefix ' \
+ 'and be decorated with @pytest.fixture:\n%s' % name
+ assert not name.startswith(self._argprefix), msg
+
+ fixture_def = FixtureDef(self, nodeid, name, obj,
+ marker.scope, marker.params,
+ unittest=unittest, ids=marker.ids)
+
+ faclist = self._arg2fixturedefs.setdefault(name, [])
+ if fixture_def.has_location:
+ faclist.append(fixture_def)
+ else:
+ # fixturedefs with no location are at the front
+ # so this inserts the current fixturedef after the
+ # existing fixturedefs from external plugins but
+ # before the fixturedefs provided in conftests.
+ i = len([f for f in faclist if not f.has_location])
+ faclist.insert(i, fixture_def)
+ if marker.autouse:
+ autousenames.append(name)
+
+ if autousenames:
+ self._nodeid_and_autousenames.append((nodeid or '', autousenames))
+
+ def getfixturedefs(self, argname, nodeid):
+ """
+ Gets a list of fixtures which are applicable to the given node id.
+
+ :param str argname: name of the fixture to search for
+ :param str nodeid: full node id of the requesting test.
+ :return: list[FixtureDef]
+ """
+ try:
+ fixturedefs = self._arg2fixturedefs[argname]
+ except KeyError:
+ return None
+ else:
+ return tuple(self._matchfactories(fixturedefs, nodeid))
+
+ def _matchfactories(self, fixturedefs, nodeid):
+ for fixturedef in fixturedefs:
+ if nodeid.startswith(fixturedef.baseid):
+ yield fixturedef
+
diff --git a/lib/spack/external/_pytest/freeze_support.py b/lib/spack/external/_pytest/freeze_support.py
new file mode 100644
index 0000000000..b27f59d74a
--- /dev/null
+++ b/lib/spack/external/_pytest/freeze_support.py
@@ -0,0 +1,45 @@
+"""
+Provides a function to report all internal modules for using freezing tools
+pytest
+"""
+
+def pytest_namespace():
+ return {'freeze_includes': freeze_includes}
+
+
+def freeze_includes():
+ """
+ Returns a list of module names used by py.test that should be
+ included by cx_freeze.
+ """
+ import py
+ import _pytest
+ result = list(_iter_all_modules(py))
+ result += list(_iter_all_modules(_pytest))
+ return result
+
+
+def _iter_all_modules(package, prefix=''):
+ """
+ Iterates over the names of all modules that can be found in the given
+ package, recursively.
+ Example:
+ _iter_all_modules(_pytest) ->
+ ['_pytest.assertion.newinterpret',
+ '_pytest.capture',
+ '_pytest.core',
+ ...
+ ]
+ """
+ import os
+ import pkgutil
+ if type(package) is not str:
+ path, prefix = package.__path__[0], package.__name__ + '.'
+ else:
+ path = package
+ for _, name, is_package in pkgutil.iter_modules([path]):
+ if is_package:
+ for m in _iter_all_modules(os.path.join(path, name), prefix=name + '.'):
+ yield prefix + m
+ else:
+ yield prefix + name
diff --git a/lib/spack/external/_pytest/helpconfig.py b/lib/spack/external/_pytest/helpconfig.py
new file mode 100644
index 0000000000..6e66b11c48
--- /dev/null
+++ b/lib/spack/external/_pytest/helpconfig.py
@@ -0,0 +1,144 @@
+""" version info, help messages, tracing configuration. """
+import py
+import pytest
+import os, sys
+
+def pytest_addoption(parser):
+ group = parser.getgroup('debugconfig')
+ group.addoption('--version', action="store_true",
+ help="display pytest lib version and import information.")
+ group._addoption("-h", "--help", action="store_true", dest="help",
+ help="show help message and configuration info")
+ group._addoption('-p', action="append", dest="plugins", default = [],
+ metavar="name",
+ help="early-load given plugin (multi-allowed). "
+ "To avoid loading of plugins, use the `no:` prefix, e.g. "
+ "`no:doctest`.")
+ group.addoption('--traceconfig', '--trace-config',
+ action="store_true", default=False,
+ help="trace considerations of conftest.py files."),
+ group.addoption('--debug',
+ action="store_true", dest="debug", default=False,
+ help="store internal tracing debug information in 'pytestdebug.log'.")
+ group._addoption(
+ '-o', '--override-ini', nargs='*', dest="override_ini",
+ action="append",
+ help="override config option with option=value style, e.g. `-o xfail_strict=True`.")
+
+
+@pytest.hookimpl(hookwrapper=True)
+def pytest_cmdline_parse():
+ outcome = yield
+ config = outcome.get_result()
+ if config.option.debug:
+ path = os.path.abspath("pytestdebug.log")
+ debugfile = open(path, 'w')
+ debugfile.write("versions pytest-%s, py-%s, "
+ "python-%s\ncwd=%s\nargs=%s\n\n" %(
+ pytest.__version__, py.__version__,
+ ".".join(map(str, sys.version_info)),
+ os.getcwd(), config._origargs))
+ config.trace.root.setwriter(debugfile.write)
+ undo_tracing = config.pluginmanager.enable_tracing()
+ sys.stderr.write("writing pytestdebug information to %s\n" % path)
+
+ def unset_tracing():
+ debugfile.close()
+ sys.stderr.write("wrote pytestdebug information to %s\n" %
+ debugfile.name)
+ config.trace.root.setwriter(None)
+ undo_tracing()
+
+ config.add_cleanup(unset_tracing)
+
+def pytest_cmdline_main(config):
+ if config.option.version:
+ p = py.path.local(pytest.__file__)
+ sys.stderr.write("This is pytest version %s, imported from %s\n" %
+ (pytest.__version__, p))
+ plugininfo = getpluginversioninfo(config)
+ if plugininfo:
+ for line in plugininfo:
+ sys.stderr.write(line + "\n")
+ return 0
+ elif config.option.help:
+ config._do_configure()
+ showhelp(config)
+ config._ensure_unconfigure()
+ return 0
+
+def showhelp(config):
+ reporter = config.pluginmanager.get_plugin('terminalreporter')
+ tw = reporter._tw
+ tw.write(config._parser.optparser.format_help())
+ tw.line()
+ tw.line()
+ tw.line("[pytest] ini-options in the first "
+ "pytest.ini|tox.ini|setup.cfg file found:")
+ tw.line()
+
+ for name in config._parser._ininames:
+ help, type, default = config._parser._inidict[name]
+ if type is None:
+ type = "string"
+ spec = "%s (%s)" % (name, type)
+ line = " %-24s %s" %(spec, help)
+ tw.line(line[:tw.fullwidth])
+
+ tw.line()
+ tw.line("environment variables:")
+ vars = [
+ ("PYTEST_ADDOPTS", "extra command line options"),
+ ("PYTEST_PLUGINS", "comma-separated plugins to load during startup"),
+ ("PYTEST_DEBUG", "set to enable debug tracing of pytest's internals")
+ ]
+ for name, help in vars:
+ tw.line(" %-24s %s" % (name, help))
+ tw.line()
+ tw.line()
+
+ tw.line("to see available markers type: pytest --markers")
+ tw.line("to see available fixtures type: pytest --fixtures")
+ tw.line("(shown according to specified file_or_dir or current dir "
+ "if not specified)")
+
+ for warningreport in reporter.stats.get('warnings', []):
+ tw.line("warning : " + warningreport.message, red=True)
+ return
+
+
+conftest_options = [
+ ('pytest_plugins', 'list of plugin names to load'),
+]
+
+def getpluginversioninfo(config):
+ lines = []
+ plugininfo = config.pluginmanager.list_plugin_distinfo()
+ if plugininfo:
+ lines.append("setuptools registered plugins:")
+ for plugin, dist in plugininfo:
+ loc = getattr(plugin, '__file__', repr(plugin))
+ content = "%s-%s at %s" % (dist.project_name, dist.version, loc)
+ lines.append(" " + content)
+ return lines
+
+def pytest_report_header(config):
+ lines = []
+ if config.option.debug or config.option.traceconfig:
+ lines.append("using: pytest-%s pylib-%s" %
+ (pytest.__version__,py.__version__))
+
+ verinfo = getpluginversioninfo(config)
+ if verinfo:
+ lines.extend(verinfo)
+
+ if config.option.traceconfig:
+ lines.append("active plugins:")
+ items = config.pluginmanager.list_name_plugin()
+ for name, plugin in items:
+ if hasattr(plugin, '__file__'):
+ r = plugin.__file__
+ else:
+ r = repr(plugin)
+ lines.append(" %-20s: %s" %(name, r))
+ return lines
diff --git a/lib/spack/external/_pytest/hookspec.py b/lib/spack/external/_pytest/hookspec.py
new file mode 100644
index 0000000000..b5f51eccf5
--- /dev/null
+++ b/lib/spack/external/_pytest/hookspec.py
@@ -0,0 +1,314 @@
+""" hook specifications for pytest plugins, invoked from main.py and builtin plugins. """
+
+from _pytest._pluggy import HookspecMarker
+
+hookspec = HookspecMarker("pytest")
+
+# -------------------------------------------------------------------------
+# Initialization hooks called for every plugin
+# -------------------------------------------------------------------------
+
+@hookspec(historic=True)
+def pytest_addhooks(pluginmanager):
+ """called at plugin registration time to allow adding new hooks via a call to
+ pluginmanager.add_hookspecs(module_or_class, prefix)."""
+
+
+@hookspec(historic=True)
+def pytest_namespace():
+ """return dict of name->object to be made globally available in
+ the pytest namespace. This hook is called at plugin registration
+ time.
+ """
+
+@hookspec(historic=True)
+def pytest_plugin_registered(plugin, manager):
+ """ a new pytest plugin got registered. """
+
+
+@hookspec(historic=True)
+def pytest_addoption(parser):
+ """register argparse-style options and ini-style config values,
+ called once at the beginning of a test run.
+
+ .. note::
+
+ This function should be implemented only in plugins or ``conftest.py``
+ files situated at the tests root directory due to how pytest
+ :ref:`discovers plugins during startup <pluginorder>`.
+
+ :arg parser: To add command line options, call
+ :py:func:`parser.addoption(...) <_pytest.config.Parser.addoption>`.
+ To add ini-file values call :py:func:`parser.addini(...)
+ <_pytest.config.Parser.addini>`.
+
+ Options can later be accessed through the
+ :py:class:`config <_pytest.config.Config>` object, respectively:
+
+ - :py:func:`config.getoption(name) <_pytest.config.Config.getoption>` to
+ retrieve the value of a command line option.
+
+ - :py:func:`config.getini(name) <_pytest.config.Config.getini>` to retrieve
+ a value read from an ini-style file.
+
+ The config object is passed around on many internal objects via the ``.config``
+ attribute or can be retrieved as the ``pytestconfig`` fixture or accessed
+ via (deprecated) ``pytest.config``.
+ """
+
+@hookspec(historic=True)
+def pytest_configure(config):
+ """ called after command line options have been parsed
+ and all plugins and initial conftest files been loaded.
+ This hook is called for every plugin.
+ """
+
+# -------------------------------------------------------------------------
+# Bootstrapping hooks called for plugins registered early enough:
+# internal and 3rd party plugins as well as directly
+# discoverable conftest.py local plugins.
+# -------------------------------------------------------------------------
+
+@hookspec(firstresult=True)
+def pytest_cmdline_parse(pluginmanager, args):
+ """return initialized config object, parsing the specified args. """
+
+def pytest_cmdline_preparse(config, args):
+ """(deprecated) modify command line arguments before option parsing. """
+
+@hookspec(firstresult=True)
+def pytest_cmdline_main(config):
+ """ called for performing the main command line action. The default
+ implementation will invoke the configure hooks and runtest_mainloop. """
+
+def pytest_load_initial_conftests(early_config, parser, args):
+ """ implements the loading of initial conftest files ahead
+ of command line option parsing. """
+
+
+# -------------------------------------------------------------------------
+# collection hooks
+# -------------------------------------------------------------------------
+
+@hookspec(firstresult=True)
+def pytest_collection(session):
+ """ perform the collection protocol for the given session. """
+
+def pytest_collection_modifyitems(session, config, items):
+ """ called after collection has been performed, may filter or re-order
+ the items in-place."""
+
+def pytest_collection_finish(session):
+ """ called after collection has been performed and modified. """
+
+@hookspec(firstresult=True)
+def pytest_ignore_collect(path, config):
+ """ return True to prevent considering this path for collection.
+ This hook is consulted for all files and directories prior to calling
+ more specific hooks.
+ """
+
+@hookspec(firstresult=True)
+def pytest_collect_directory(path, parent):
+ """ called before traversing a directory for collection files. """
+
+def pytest_collect_file(path, parent):
+ """ return collection Node or None for the given path. Any new node
+ needs to have the specified ``parent`` as a parent."""
+
+# logging hooks for collection
+def pytest_collectstart(collector):
+ """ collector starts collecting. """
+
+def pytest_itemcollected(item):
+ """ we just collected a test item. """
+
+def pytest_collectreport(report):
+ """ collector finished collecting. """
+
+def pytest_deselected(items):
+ """ called for test items deselected by keyword. """
+
+@hookspec(firstresult=True)
+def pytest_make_collect_report(collector):
+ """ perform ``collector.collect()`` and return a CollectReport. """
+
+# -------------------------------------------------------------------------
+# Python test function related hooks
+# -------------------------------------------------------------------------
+
+@hookspec(firstresult=True)
+def pytest_pycollect_makemodule(path, parent):
+ """ return a Module collector or None for the given path.
+ This hook will be called for each matching test module path.
+ The pytest_collect_file hook needs to be used if you want to
+ create test modules for files that do not match as a test module.
+ """
+
+@hookspec(firstresult=True)
+def pytest_pycollect_makeitem(collector, name, obj):
+ """ return custom item/collector for a python object in a module, or None. """
+
+@hookspec(firstresult=True)
+def pytest_pyfunc_call(pyfuncitem):
+ """ call underlying test function. """
+
+def pytest_generate_tests(metafunc):
+ """ generate (multiple) parametrized calls to a test function."""
+
+@hookspec(firstresult=True)
+def pytest_make_parametrize_id(config, val):
+ """Return a user-friendly string representation of the given ``val`` that will be used
+ by @pytest.mark.parametrize calls. Return None if the hook doesn't know about ``val``.
+ """
+
+# -------------------------------------------------------------------------
+# generic runtest related hooks
+# -------------------------------------------------------------------------
+
+@hookspec(firstresult=True)
+def pytest_runtestloop(session):
+ """ called for performing the main runtest loop
+ (after collection finished). """
+
+def pytest_itemstart(item, node):
+ """ (deprecated, use pytest_runtest_logstart). """
+
+@hookspec(firstresult=True)
+def pytest_runtest_protocol(item, nextitem):
+ """ implements the runtest_setup/call/teardown protocol for
+ the given test item, including capturing exceptions and calling
+ reporting hooks.
+
+ :arg item: test item for which the runtest protocol is performed.
+
+ :arg nextitem: the scheduled-to-be-next test item (or None if this
+ is the end my friend). This argument is passed on to
+ :py:func:`pytest_runtest_teardown`.
+
+ :return boolean: True if no further hook implementations should be invoked.
+ """
+
+def pytest_runtest_logstart(nodeid, location):
+ """ signal the start of running a single test item. """
+
+def pytest_runtest_setup(item):
+ """ called before ``pytest_runtest_call(item)``. """
+
+def pytest_runtest_call(item):
+ """ called to execute the test ``item``. """
+
+def pytest_runtest_teardown(item, nextitem):
+ """ called after ``pytest_runtest_call``.
+
+ :arg nextitem: the scheduled-to-be-next test item (None if no further
+ test item is scheduled). This argument can be used to
+ perform exact teardowns, i.e. calling just enough finalizers
+ so that nextitem only needs to call setup-functions.
+ """
+
+@hookspec(firstresult=True)
+def pytest_runtest_makereport(item, call):
+ """ return a :py:class:`_pytest.runner.TestReport` object
+ for the given :py:class:`pytest.Item` and
+ :py:class:`_pytest.runner.CallInfo`.
+ """
+
+def pytest_runtest_logreport(report):
+ """ process a test setup/call/teardown report relating to
+ the respective phase of executing a test. """
+
+# -------------------------------------------------------------------------
+# Fixture related hooks
+# -------------------------------------------------------------------------
+
+@hookspec(firstresult=True)
+def pytest_fixture_setup(fixturedef, request):
+ """ performs fixture setup execution. """
+
+def pytest_fixture_post_finalizer(fixturedef):
+ """ called after fixture teardown, but before the cache is cleared so
+ the fixture result cache ``fixturedef.cached_result`` can
+ still be accessed."""
+
+# -------------------------------------------------------------------------
+# test session related hooks
+# -------------------------------------------------------------------------
+
+def pytest_sessionstart(session):
+ """ before session.main() is called. """
+
+def pytest_sessionfinish(session, exitstatus):
+ """ whole test run finishes. """
+
+def pytest_unconfigure(config):
+ """ called before test process is exited. """
+
+
+# -------------------------------------------------------------------------
+# hooks for customising the assert methods
+# -------------------------------------------------------------------------
+
+def pytest_assertrepr_compare(config, op, left, right):
+ """return explanation for comparisons in failing assert expressions.
+
+ Return None for no custom explanation, otherwise return a list
+ of strings. The strings will be joined by newlines but any newlines
+ *in* a string will be escaped. Note that all but the first line will
+ be indented sligthly, the intention is for the first line to be a summary.
+ """
+
+# -------------------------------------------------------------------------
+# hooks for influencing reporting (invoked from _pytest_terminal)
+# -------------------------------------------------------------------------
+
+def pytest_report_header(config, startdir):
+ """ return a string to be displayed as header info for terminal reporting."""
+
+@hookspec(firstresult=True)
+def pytest_report_teststatus(report):
+ """ return result-category, shortletter and verbose word for reporting."""
+
+def pytest_terminal_summary(terminalreporter, exitstatus):
+ """ add additional section in terminal summary reporting. """
+
+
+@hookspec(historic=True)
+def pytest_logwarning(message, code, nodeid, fslocation):
+ """ process a warning specified by a message, a code string,
+ a nodeid and fslocation (both of which may be None
+ if the warning is not tied to a partilar node/location)."""
+
+# -------------------------------------------------------------------------
+# doctest hooks
+# -------------------------------------------------------------------------
+
+@hookspec(firstresult=True)
+def pytest_doctest_prepare_content(content):
+ """ return processed content for a given doctest"""
+
+# -------------------------------------------------------------------------
+# error handling and internal debugging hooks
+# -------------------------------------------------------------------------
+
+def pytest_internalerror(excrepr, excinfo):
+ """ called for internal errors. """
+
+def pytest_keyboard_interrupt(excinfo):
+ """ called for keyboard interrupt. """
+
+def pytest_exception_interact(node, call, report):
+ """called when an exception was raised which can potentially be
+ interactively handled.
+
+ This hook is only called if an exception was raised
+ that is not an internal exception like ``skip.Exception``.
+ """
+
+def pytest_enter_pdb(config):
+ """ called upon pdb.set_trace(), can be used by plugins to take special
+ action just before the python debugger enters in interactive mode.
+
+ :arg config: pytest config object
+ :type config: _pytest.config.Config
+ """
diff --git a/lib/spack/external/_pytest/junitxml.py b/lib/spack/external/_pytest/junitxml.py
new file mode 100644
index 0000000000..317382e637
--- /dev/null
+++ b/lib/spack/external/_pytest/junitxml.py
@@ -0,0 +1,413 @@
+"""
+ report test results in JUnit-XML format,
+ for use with Jenkins and build integration servers.
+
+
+Based on initial code from Ross Lawley.
+"""
+# Output conforms to https://github.com/jenkinsci/xunit-plugin/blob/master/
+# src/main/resources/org/jenkinsci/plugins/xunit/types/model/xsd/junit-10.xsd
+
+import functools
+import py
+import os
+import re
+import sys
+import time
+import pytest
+from _pytest.config import filename_arg
+
+# Python 2.X and 3.X compatibility
+if sys.version_info[0] < 3:
+ from codecs import open
+else:
+ unichr = chr
+ unicode = str
+ long = int
+
+
+class Junit(py.xml.Namespace):
+ pass
+
+
+# We need to get the subset of the invalid unicode ranges according to
+# XML 1.0 which are valid in this python build. Hence we calculate
+# this dynamically instead of hardcoding it. The spec range of valid
+# chars is: Char ::= #x9 | #xA | #xD | [#x20-#xD7FF] | [#xE000-#xFFFD]
+# | [#x10000-#x10FFFF]
+_legal_chars = (0x09, 0x0A, 0x0d)
+_legal_ranges = (
+ (0x20, 0x7E), (0x80, 0xD7FF), (0xE000, 0xFFFD), (0x10000, 0x10FFFF),
+)
+_legal_xml_re = [
+ unicode("%s-%s") % (unichr(low), unichr(high))
+ for (low, high) in _legal_ranges if low < sys.maxunicode
+]
+_legal_xml_re = [unichr(x) for x in _legal_chars] + _legal_xml_re
+illegal_xml_re = re.compile(unicode('[^%s]') % unicode('').join(_legal_xml_re))
+del _legal_chars
+del _legal_ranges
+del _legal_xml_re
+
+_py_ext_re = re.compile(r"\.py$")
+
+
+def bin_xml_escape(arg):
+ def repl(matchobj):
+ i = ord(matchobj.group())
+ if i <= 0xFF:
+ return unicode('#x%02X') % i
+ else:
+ return unicode('#x%04X') % i
+
+ return py.xml.raw(illegal_xml_re.sub(repl, py.xml.escape(arg)))
+
+
+class _NodeReporter(object):
+ def __init__(self, nodeid, xml):
+
+ self.id = nodeid
+ self.xml = xml
+ self.add_stats = self.xml.add_stats
+ self.duration = 0
+ self.properties = []
+ self.nodes = []
+ self.testcase = None
+ self.attrs = {}
+
+ def append(self, node):
+ self.xml.add_stats(type(node).__name__)
+ self.nodes.append(node)
+
+ def add_property(self, name, value):
+ self.properties.append((str(name), bin_xml_escape(value)))
+
+ def make_properties_node(self):
+ """Return a Junit node containing custom properties, if any.
+ """
+ if self.properties:
+ return Junit.properties([
+ Junit.property(name=name, value=value)
+ for name, value in self.properties
+ ])
+ return ''
+
+ def record_testreport(self, testreport):
+ assert not self.testcase
+ names = mangle_test_address(testreport.nodeid)
+ classnames = names[:-1]
+ if self.xml.prefix:
+ classnames.insert(0, self.xml.prefix)
+ attrs = {
+ "classname": ".".join(classnames),
+ "name": bin_xml_escape(names[-1]),
+ "file": testreport.location[0],
+ }
+ if testreport.location[1] is not None:
+ attrs["line"] = testreport.location[1]
+ self.attrs = attrs
+
+ def to_xml(self):
+ testcase = Junit.testcase(time=self.duration, **self.attrs)
+ testcase.append(self.make_properties_node())
+ for node in self.nodes:
+ testcase.append(node)
+ return testcase
+
+ def _add_simple(self, kind, message, data=None):
+ data = bin_xml_escape(data)
+ node = kind(data, message=message)
+ self.append(node)
+
+ def _write_captured_output(self, report):
+ for capname in ('out', 'err'):
+ content = getattr(report, 'capstd' + capname)
+ if content:
+ tag = getattr(Junit, 'system-' + capname)
+ self.append(tag(bin_xml_escape(content)))
+
+ def append_pass(self, report):
+ self.add_stats('passed')
+ self._write_captured_output(report)
+
+ def append_failure(self, report):
+ # msg = str(report.longrepr.reprtraceback.extraline)
+ if hasattr(report, "wasxfail"):
+ self._add_simple(
+ Junit.skipped,
+ "xfail-marked test passes unexpectedly")
+ else:
+ if hasattr(report.longrepr, "reprcrash"):
+ message = report.longrepr.reprcrash.message
+ elif isinstance(report.longrepr, (unicode, str)):
+ message = report.longrepr
+ else:
+ message = str(report.longrepr)
+ message = bin_xml_escape(message)
+ fail = Junit.failure(message=message)
+ fail.append(bin_xml_escape(report.longrepr))
+ self.append(fail)
+ self._write_captured_output(report)
+
+ def append_collect_error(self, report):
+ # msg = str(report.longrepr.reprtraceback.extraline)
+ self.append(Junit.error(bin_xml_escape(report.longrepr),
+ message="collection failure"))
+
+ def append_collect_skipped(self, report):
+ self._add_simple(
+ Junit.skipped, "collection skipped", report.longrepr)
+
+ def append_error(self, report):
+ if getattr(report, 'when', None) == 'teardown':
+ msg = "test teardown failure"
+ else:
+ msg = "test setup failure"
+ self._add_simple(
+ Junit.error, msg, report.longrepr)
+ self._write_captured_output(report)
+
+ def append_skipped(self, report):
+ if hasattr(report, "wasxfail"):
+ self._add_simple(
+ Junit.skipped, "expected test failure", report.wasxfail
+ )
+ else:
+ filename, lineno, skipreason = report.longrepr
+ if skipreason.startswith("Skipped: "):
+ skipreason = bin_xml_escape(skipreason[9:])
+ self.append(
+ Junit.skipped("%s:%s: %s" % (filename, lineno, skipreason),
+ type="pytest.skip",
+ message=skipreason))
+ self._write_captured_output(report)
+
+ def finalize(self):
+ data = self.to_xml().unicode(indent=0)
+ self.__dict__.clear()
+ self.to_xml = lambda: py.xml.raw(data)
+
+
+@pytest.fixture
+def record_xml_property(request):
+ """Add extra xml properties to the tag for the calling test.
+ The fixture is callable with ``(name, value)``, with value being automatically
+ xml-encoded.
+ """
+ request.node.warn(
+ code='C3',
+ message='record_xml_property is an experimental feature',
+ )
+ xml = getattr(request.config, "_xml", None)
+ if xml is not None:
+ node_reporter = xml.node_reporter(request.node.nodeid)
+ return node_reporter.add_property
+ else:
+ def add_property_noop(name, value):
+ pass
+
+ return add_property_noop
+
+
+def pytest_addoption(parser):
+ group = parser.getgroup("terminal reporting")
+ group.addoption(
+ '--junitxml', '--junit-xml',
+ action="store",
+ dest="xmlpath",
+ metavar="path",
+ type=functools.partial(filename_arg, optname="--junitxml"),
+ default=None,
+ help="create junit-xml style report file at given path.")
+ group.addoption(
+ '--junitprefix', '--junit-prefix',
+ action="store",
+ metavar="str",
+ default=None,
+ help="prepend prefix to classnames in junit-xml output")
+
+
+def pytest_configure(config):
+ xmlpath = config.option.xmlpath
+ # prevent opening xmllog on slave nodes (xdist)
+ if xmlpath and not hasattr(config, 'slaveinput'):
+ config._xml = LogXML(xmlpath, config.option.junitprefix)
+ config.pluginmanager.register(config._xml)
+
+
+def pytest_unconfigure(config):
+ xml = getattr(config, '_xml', None)
+ if xml:
+ del config._xml
+ config.pluginmanager.unregister(xml)
+
+
+def mangle_test_address(address):
+ path, possible_open_bracket, params = address.partition('[')
+ names = path.split("::")
+ try:
+ names.remove('()')
+ except ValueError:
+ pass
+ # convert file path to dotted path
+ names[0] = names[0].replace("/", '.')
+ names[0] = _py_ext_re.sub("", names[0])
+ # put any params back
+ names[-1] += possible_open_bracket + params
+ return names
+
+
+class LogXML(object):
+ def __init__(self, logfile, prefix):
+ logfile = os.path.expanduser(os.path.expandvars(logfile))
+ self.logfile = os.path.normpath(os.path.abspath(logfile))
+ self.prefix = prefix
+ self.stats = dict.fromkeys([
+ 'error',
+ 'passed',
+ 'failure',
+ 'skipped',
+ ], 0)
+ self.node_reporters = {} # nodeid -> _NodeReporter
+ self.node_reporters_ordered = []
+ self.global_properties = []
+
+ def finalize(self, report):
+ nodeid = getattr(report, 'nodeid', report)
+ # local hack to handle xdist report order
+ slavenode = getattr(report, 'node', None)
+ reporter = self.node_reporters.pop((nodeid, slavenode))
+ if reporter is not None:
+ reporter.finalize()
+
+ def node_reporter(self, report):
+ nodeid = getattr(report, 'nodeid', report)
+ # local hack to handle xdist report order
+ slavenode = getattr(report, 'node', None)
+
+ key = nodeid, slavenode
+
+ if key in self.node_reporters:
+ # TODO: breasks for --dist=each
+ return self.node_reporters[key]
+
+ reporter = _NodeReporter(nodeid, self)
+
+ self.node_reporters[key] = reporter
+ self.node_reporters_ordered.append(reporter)
+
+ return reporter
+
+ def add_stats(self, key):
+ if key in self.stats:
+ self.stats[key] += 1
+
+ def _opentestcase(self, report):
+ reporter = self.node_reporter(report)
+ reporter.record_testreport(report)
+ return reporter
+
+ def pytest_runtest_logreport(self, report):
+ """handle a setup/call/teardown report, generating the appropriate
+ xml tags as necessary.
+
+ note: due to plugins like xdist, this hook may be called in interlaced
+ order with reports from other nodes. for example:
+
+ usual call order:
+ -> setup node1
+ -> call node1
+ -> teardown node1
+ -> setup node2
+ -> call node2
+ -> teardown node2
+
+ possible call order in xdist:
+ -> setup node1
+ -> call node1
+ -> setup node2
+ -> call node2
+ -> teardown node2
+ -> teardown node1
+ """
+ if report.passed:
+ if report.when == "call": # ignore setup/teardown
+ reporter = self._opentestcase(report)
+ reporter.append_pass(report)
+ elif report.failed:
+ reporter = self._opentestcase(report)
+ if report.when == "call":
+ reporter.append_failure(report)
+ else:
+ reporter.append_error(report)
+ elif report.skipped:
+ reporter = self._opentestcase(report)
+ reporter.append_skipped(report)
+ self.update_testcase_duration(report)
+ if report.when == "teardown":
+ self.finalize(report)
+
+ def update_testcase_duration(self, report):
+ """accumulates total duration for nodeid from given report and updates
+ the Junit.testcase with the new total if already created.
+ """
+ reporter = self.node_reporter(report)
+ reporter.duration += getattr(report, 'duration', 0.0)
+
+ def pytest_collectreport(self, report):
+ if not report.passed:
+ reporter = self._opentestcase(report)
+ if report.failed:
+ reporter.append_collect_error(report)
+ else:
+ reporter.append_collect_skipped(report)
+
+ def pytest_internalerror(self, excrepr):
+ reporter = self.node_reporter('internal')
+ reporter.attrs.update(classname="pytest", name='internal')
+ reporter._add_simple(Junit.error, 'internal error', excrepr)
+
+ def pytest_sessionstart(self):
+ self.suite_start_time = time.time()
+
+ def pytest_sessionfinish(self):
+ dirname = os.path.dirname(os.path.abspath(self.logfile))
+ if not os.path.isdir(dirname):
+ os.makedirs(dirname)
+ logfile = open(self.logfile, 'w', encoding='utf-8')
+ suite_stop_time = time.time()
+ suite_time_delta = suite_stop_time - self.suite_start_time
+
+ numtests = self.stats['passed'] + self.stats['failure'] + self.stats['skipped'] + self.stats['error']
+
+ logfile.write('<?xml version="1.0" encoding="utf-8"?>')
+
+ logfile.write(Junit.testsuite(
+ self._get_global_properties_node(),
+ [x.to_xml() for x in self.node_reporters_ordered],
+ name="pytest",
+ errors=self.stats['error'],
+ failures=self.stats['failure'],
+ skips=self.stats['skipped'],
+ tests=numtests,
+ time="%.3f" % suite_time_delta, ).unicode(indent=0))
+ logfile.close()
+
+ def pytest_terminal_summary(self, terminalreporter):
+ terminalreporter.write_sep("-",
+ "generated xml file: %s" % (self.logfile))
+
+ def add_global_property(self, name, value):
+ self.global_properties.append((str(name), bin_xml_escape(value)))
+
+ def _get_global_properties_node(self):
+ """Return a Junit node containing custom properties, if any.
+ """
+ if self.global_properties:
+ return Junit.properties(
+ [
+ Junit.property(name=name, value=value)
+ for name, value in self.global_properties
+ ]
+ )
+ return ''
diff --git a/lib/spack/external/_pytest/main.py b/lib/spack/external/_pytest/main.py
new file mode 100644
index 0000000000..52876c12a4
--- /dev/null
+++ b/lib/spack/external/_pytest/main.py
@@ -0,0 +1,762 @@
+""" core implementation of testing process: init, session, runtest loop. """
+import functools
+import os
+import sys
+
+import _pytest
+import _pytest._code
+import py
+import pytest
+try:
+ from collections import MutableMapping as MappingMixin
+except ImportError:
+ from UserDict import DictMixin as MappingMixin
+
+from _pytest.config import directory_arg
+from _pytest.runner import collect_one_node
+
+tracebackcutdir = py.path.local(_pytest.__file__).dirpath()
+
+# exitcodes for the command line
+EXIT_OK = 0
+EXIT_TESTSFAILED = 1
+EXIT_INTERRUPTED = 2
+EXIT_INTERNALERROR = 3
+EXIT_USAGEERROR = 4
+EXIT_NOTESTSCOLLECTED = 5
+
+def pytest_addoption(parser):
+ parser.addini("norecursedirs", "directory patterns to avoid for recursion",
+ type="args", default=['.*', 'build', 'dist', 'CVS', '_darcs', '{arch}', '*.egg'])
+ parser.addini("testpaths", "directories to search for tests when no files or directories are given in the command line.",
+ type="args", default=[])
+ #parser.addini("dirpatterns",
+ # "patterns specifying possible locations of test files",
+ # type="linelist", default=["**/test_*.txt",
+ # "**/test_*.py", "**/*_test.py"]
+ #)
+ group = parser.getgroup("general", "running and selection options")
+ group._addoption('-x', '--exitfirst', action="store_const",
+ dest="maxfail", const=1,
+ help="exit instantly on first error or failed test."),
+ group._addoption('--maxfail', metavar="num",
+ action="store", type=int, dest="maxfail", default=0,
+ help="exit after first num failures or errors.")
+ group._addoption('--strict', action="store_true",
+ help="run pytest in strict mode, warnings become errors.")
+ group._addoption("-c", metavar="file", type=str, dest="inifilename",
+ help="load configuration from `file` instead of trying to locate one of the implicit configuration files.")
+ group._addoption("--continue-on-collection-errors", action="store_true",
+ default=False, dest="continue_on_collection_errors",
+ help="Force test execution even if collection errors occur.")
+
+ group = parser.getgroup("collect", "collection")
+ group.addoption('--collectonly', '--collect-only', action="store_true",
+ help="only collect tests, don't execute them."),
+ group.addoption('--pyargs', action="store_true",
+ help="try to interpret all arguments as python packages.")
+ group.addoption("--ignore", action="append", metavar="path",
+ help="ignore path during collection (multi-allowed).")
+ # when changing this to --conf-cut-dir, config.py Conftest.setinitial
+ # needs upgrading as well
+ group.addoption('--confcutdir', dest="confcutdir", default=None,
+ metavar="dir", type=functools.partial(directory_arg, optname="--confcutdir"),
+ help="only load conftest.py's relative to specified dir.")
+ group.addoption('--noconftest', action="store_true",
+ dest="noconftest", default=False,
+ help="Don't load any conftest.py files.")
+ group.addoption('--keepduplicates', '--keep-duplicates', action="store_true",
+ dest="keepduplicates", default=False,
+ help="Keep duplicate tests.")
+
+ group = parser.getgroup("debugconfig",
+ "test session debugging and configuration")
+ group.addoption('--basetemp', dest="basetemp", default=None, metavar="dir",
+ help="base temporary directory for this test run.")
+
+
+def pytest_namespace():
+ collect = dict(Item=Item, Collector=Collector, File=File, Session=Session)
+ return dict(collect=collect)
+
+
+def pytest_configure(config):
+ pytest.config = config # compatibiltiy
+
+
+def wrap_session(config, doit):
+ """Skeleton command line program"""
+ session = Session(config)
+ session.exitstatus = EXIT_OK
+ initstate = 0
+ try:
+ try:
+ config._do_configure()
+ initstate = 1
+ config.hook.pytest_sessionstart(session=session)
+ initstate = 2
+ session.exitstatus = doit(config, session) or 0
+ except pytest.UsageError:
+ raise
+ except KeyboardInterrupt:
+ excinfo = _pytest._code.ExceptionInfo()
+ if initstate < 2 and isinstance(
+ excinfo.value, pytest.exit.Exception):
+ sys.stderr.write('{0}: {1}\n'.format(
+ excinfo.typename, excinfo.value.msg))
+ config.hook.pytest_keyboard_interrupt(excinfo=excinfo)
+ session.exitstatus = EXIT_INTERRUPTED
+ except:
+ excinfo = _pytest._code.ExceptionInfo()
+ config.notify_exception(excinfo, config.option)
+ session.exitstatus = EXIT_INTERNALERROR
+ if excinfo.errisinstance(SystemExit):
+ sys.stderr.write("mainloop: caught Spurious SystemExit!\n")
+
+ finally:
+ excinfo = None # Explicitly break reference cycle.
+ session.startdir.chdir()
+ if initstate >= 2:
+ config.hook.pytest_sessionfinish(
+ session=session,
+ exitstatus=session.exitstatus)
+ config._ensure_unconfigure()
+ return session.exitstatus
+
+def pytest_cmdline_main(config):
+ return wrap_session(config, _main)
+
+def _main(config, session):
+ """ default command line protocol for initialization, session,
+ running tests and reporting. """
+ config.hook.pytest_collection(session=session)
+ config.hook.pytest_runtestloop(session=session)
+
+ if session.testsfailed:
+ return EXIT_TESTSFAILED
+ elif session.testscollected == 0:
+ return EXIT_NOTESTSCOLLECTED
+
+def pytest_collection(session):
+ return session.perform_collect()
+
+def pytest_runtestloop(session):
+ if (session.testsfailed and
+ not session.config.option.continue_on_collection_errors):
+ raise session.Interrupted(
+ "%d errors during collection" % session.testsfailed)
+
+ if session.config.option.collectonly:
+ return True
+
+ for i, item in enumerate(session.items):
+ nextitem = session.items[i+1] if i+1 < len(session.items) else None
+ item.config.hook.pytest_runtest_protocol(item=item, nextitem=nextitem)
+ if session.shouldstop:
+ raise session.Interrupted(session.shouldstop)
+ return True
+
+def pytest_ignore_collect(path, config):
+ p = path.dirpath()
+ ignore_paths = config._getconftest_pathlist("collect_ignore", path=p)
+ ignore_paths = ignore_paths or []
+ excludeopt = config.getoption("ignore")
+ if excludeopt:
+ ignore_paths.extend([py.path.local(x) for x in excludeopt])
+
+ if path in ignore_paths:
+ return True
+
+ # Skip duplicate paths.
+ keepduplicates = config.getoption("keepduplicates")
+ duplicate_paths = config.pluginmanager._duplicatepaths
+ if not keepduplicates:
+ if path in duplicate_paths:
+ return True
+ else:
+ duplicate_paths.add(path)
+
+ return False
+
+
+class FSHookProxy:
+ def __init__(self, fspath, pm, remove_mods):
+ self.fspath = fspath
+ self.pm = pm
+ self.remove_mods = remove_mods
+
+ def __getattr__(self, name):
+ x = self.pm.subset_hook_caller(name, remove_plugins=self.remove_mods)
+ self.__dict__[name] = x
+ return x
+
+def compatproperty(name):
+ def fget(self):
+ import warnings
+ warnings.warn("This usage is deprecated, please use pytest.{0} instead".format(name),
+ PendingDeprecationWarning, stacklevel=2)
+ return getattr(pytest, name)
+
+ return property(fget)
+
+class NodeKeywords(MappingMixin):
+ def __init__(self, node):
+ self.node = node
+ self.parent = node.parent
+ self._markers = {node.name: True}
+
+ def __getitem__(self, key):
+ try:
+ return self._markers[key]
+ except KeyError:
+ if self.parent is None:
+ raise
+ return self.parent.keywords[key]
+
+ def __setitem__(self, key, value):
+ self._markers[key] = value
+
+ def __delitem__(self, key):
+ raise ValueError("cannot delete key in keywords dict")
+
+ def __iter__(self):
+ seen = set(self._markers)
+ if self.parent is not None:
+ seen.update(self.parent.keywords)
+ return iter(seen)
+
+ def __len__(self):
+ return len(self.__iter__())
+
+ def keys(self):
+ return list(self)
+
+ def __repr__(self):
+ return "<NodeKeywords for node %s>" % (self.node, )
+
+
+class Node(object):
+ """ base class for Collector and Item the test collection tree.
+ Collector subclasses have children, Items are terminal nodes."""
+
+ def __init__(self, name, parent=None, config=None, session=None):
+ #: a unique name within the scope of the parent node
+ self.name = name
+
+ #: the parent collector node.
+ self.parent = parent
+
+ #: the pytest config object
+ self.config = config or parent.config
+
+ #: the session this node is part of
+ self.session = session or parent.session
+
+ #: filesystem path where this node was collected from (can be None)
+ self.fspath = getattr(parent, 'fspath', None)
+
+ #: keywords/markers collected from all scopes
+ self.keywords = NodeKeywords(self)
+
+ #: allow adding of extra keywords to use for matching
+ self.extra_keyword_matches = set()
+
+ # used for storing artificial fixturedefs for direct parametrization
+ self._name2pseudofixturedef = {}
+
+ @property
+ def ihook(self):
+ """ fspath sensitive hook proxy used to call pytest hooks"""
+ return self.session.gethookproxy(self.fspath)
+
+ Module = compatproperty("Module")
+ Class = compatproperty("Class")
+ Instance = compatproperty("Instance")
+ Function = compatproperty("Function")
+ File = compatproperty("File")
+ Item = compatproperty("Item")
+
+ def _getcustomclass(self, name):
+ cls = getattr(self, name)
+ if cls != getattr(pytest, name):
+ py.log._apiwarn("2.0", "use of node.%s is deprecated, "
+ "use pytest_pycollect_makeitem(...) to create custom "
+ "collection nodes" % name)
+ return cls
+
+ def __repr__(self):
+ return "<%s %r>" %(self.__class__.__name__,
+ getattr(self, 'name', None))
+
+ def warn(self, code, message):
+ """ generate a warning with the given code and message for this
+ item. """
+ assert isinstance(code, str)
+ fslocation = getattr(self, "location", None)
+ if fslocation is None:
+ fslocation = getattr(self, "fspath", None)
+ else:
+ fslocation = "%s:%s" % (fslocation[0], fslocation[1] + 1)
+
+ self.ihook.pytest_logwarning.call_historic(kwargs=dict(
+ code=code, message=message,
+ nodeid=self.nodeid, fslocation=fslocation))
+
+ # methods for ordering nodes
+ @property
+ def nodeid(self):
+ """ a ::-separated string denoting its collection tree address. """
+ try:
+ return self._nodeid
+ except AttributeError:
+ self._nodeid = x = self._makeid()
+ return x
+
+ def _makeid(self):
+ return self.parent.nodeid + "::" + self.name
+
+ def __hash__(self):
+ return hash(self.nodeid)
+
+ def setup(self):
+ pass
+
+ def teardown(self):
+ pass
+
+ def _memoizedcall(self, attrname, function):
+ exattrname = "_ex_" + attrname
+ failure = getattr(self, exattrname, None)
+ if failure is not None:
+ py.builtin._reraise(failure[0], failure[1], failure[2])
+ if hasattr(self, attrname):
+ return getattr(self, attrname)
+ try:
+ res = function()
+ except py.builtin._sysex:
+ raise
+ except:
+ failure = sys.exc_info()
+ setattr(self, exattrname, failure)
+ raise
+ setattr(self, attrname, res)
+ return res
+
+ def listchain(self):
+ """ return list of all parent collectors up to self,
+ starting from root of collection tree. """
+ chain = []
+ item = self
+ while item is not None:
+ chain.append(item)
+ item = item.parent
+ chain.reverse()
+ return chain
+
+ def add_marker(self, marker):
+ """ dynamically add a marker object to the node.
+
+ ``marker`` can be a string or pytest.mark.* instance.
+ """
+ from _pytest.mark import MarkDecorator
+ if isinstance(marker, py.builtin._basestring):
+ marker = MarkDecorator(marker)
+ elif not isinstance(marker, MarkDecorator):
+ raise ValueError("is not a string or pytest.mark.* Marker")
+ self.keywords[marker.name] = marker
+
+ def get_marker(self, name):
+ """ get a marker object from this node or None if
+ the node doesn't have a marker with that name. """
+ val = self.keywords.get(name, None)
+ if val is not None:
+ from _pytest.mark import MarkInfo, MarkDecorator
+ if isinstance(val, (MarkDecorator, MarkInfo)):
+ return val
+
+ def listextrakeywords(self):
+ """ Return a set of all extra keywords in self and any parents."""
+ extra_keywords = set()
+ item = self
+ for item in self.listchain():
+ extra_keywords.update(item.extra_keyword_matches)
+ return extra_keywords
+
+ def listnames(self):
+ return [x.name for x in self.listchain()]
+
+ def addfinalizer(self, fin):
+ """ register a function to be called when this node is finalized.
+
+ This method can only be called when this node is active
+ in a setup chain, for example during self.setup().
+ """
+ self.session._setupstate.addfinalizer(fin, self)
+
+ def getparent(self, cls):
+ """ get the next parent node (including ourself)
+ which is an instance of the given class"""
+ current = self
+ while current and not isinstance(current, cls):
+ current = current.parent
+ return current
+
+ def _prunetraceback(self, excinfo):
+ pass
+
+ def _repr_failure_py(self, excinfo, style=None):
+ fm = self.session._fixturemanager
+ if excinfo.errisinstance(fm.FixtureLookupError):
+ return excinfo.value.formatrepr()
+ tbfilter = True
+ if self.config.option.fulltrace:
+ style="long"
+ else:
+ tb = _pytest._code.Traceback([excinfo.traceback[-1]])
+ self._prunetraceback(excinfo)
+ if len(excinfo.traceback) == 0:
+ excinfo.traceback = tb
+ tbfilter = False # prunetraceback already does it
+ if style == "auto":
+ style = "long"
+ # XXX should excinfo.getrepr record all data and toterminal() process it?
+ if style is None:
+ if self.config.option.tbstyle == "short":
+ style = "short"
+ else:
+ style = "long"
+
+ try:
+ os.getcwd()
+ abspath = False
+ except OSError:
+ abspath = True
+
+ return excinfo.getrepr(funcargs=True, abspath=abspath,
+ showlocals=self.config.option.showlocals,
+ style=style, tbfilter=tbfilter)
+
+ repr_failure = _repr_failure_py
+
+class Collector(Node):
+ """ Collector instances create children through collect()
+ and thus iteratively build a tree.
+ """
+
+ class CollectError(Exception):
+ """ an error during collection, contains a custom message. """
+
+ def collect(self):
+ """ returns a list of children (items and collectors)
+ for this collection node.
+ """
+ raise NotImplementedError("abstract")
+
+ def repr_failure(self, excinfo):
+ """ represent a collection failure. """
+ if excinfo.errisinstance(self.CollectError):
+ exc = excinfo.value
+ return str(exc.args[0])
+ return self._repr_failure_py(excinfo, style="short")
+
+ def _memocollect(self):
+ """ internal helper method to cache results of calling collect(). """
+ return self._memoizedcall('_collected', lambda: list(self.collect()))
+
+ def _prunetraceback(self, excinfo):
+ if hasattr(self, 'fspath'):
+ traceback = excinfo.traceback
+ ntraceback = traceback.cut(path=self.fspath)
+ if ntraceback == traceback:
+ ntraceback = ntraceback.cut(excludepath=tracebackcutdir)
+ excinfo.traceback = ntraceback.filter()
+
+class FSCollector(Collector):
+ def __init__(self, fspath, parent=None, config=None, session=None):
+ fspath = py.path.local(fspath) # xxx only for test_resultlog.py?
+ name = fspath.basename
+ if parent is not None:
+ rel = fspath.relto(parent.fspath)
+ if rel:
+ name = rel
+ name = name.replace(os.sep, "/")
+ super(FSCollector, self).__init__(name, parent, config, session)
+ self.fspath = fspath
+
+ def _makeid(self):
+ relpath = self.fspath.relto(self.config.rootdir)
+ if os.sep != "/":
+ relpath = relpath.replace(os.sep, "/")
+ return relpath
+
+class File(FSCollector):
+ """ base class for collecting tests from a file. """
+
+class Item(Node):
+ """ a basic test invocation item. Note that for a single function
+ there might be multiple test invocation items.
+ """
+ nextitem = None
+
+ def __init__(self, name, parent=None, config=None, session=None):
+ super(Item, self).__init__(name, parent, config, session)
+ self._report_sections = []
+
+ def add_report_section(self, when, key, content):
+ if content:
+ self._report_sections.append((when, key, content))
+
+ def reportinfo(self):
+ return self.fspath, None, ""
+
+ @property
+ def location(self):
+ try:
+ return self._location
+ except AttributeError:
+ location = self.reportinfo()
+ # bestrelpath is a quite slow function
+ cache = self.config.__dict__.setdefault("_bestrelpathcache", {})
+ try:
+ fspath = cache[location[0]]
+ except KeyError:
+ fspath = self.session.fspath.bestrelpath(location[0])
+ cache[location[0]] = fspath
+ location = (fspath, location[1], str(location[2]))
+ self._location = location
+ return location
+
+class NoMatch(Exception):
+ """ raised if matching cannot locate a matching names. """
+
+class Interrupted(KeyboardInterrupt):
+ """ signals an interrupted test run. """
+ __module__ = 'builtins' # for py3
+
+class Session(FSCollector):
+ Interrupted = Interrupted
+
+ def __init__(self, config):
+ FSCollector.__init__(self, config.rootdir, parent=None,
+ config=config, session=self)
+ self.testsfailed = 0
+ self.testscollected = 0
+ self.shouldstop = False
+ self.trace = config.trace.root.get("collection")
+ self._norecursepatterns = config.getini("norecursedirs")
+ self.startdir = py.path.local()
+ self.config.pluginmanager.register(self, name="session")
+
+ def _makeid(self):
+ return ""
+
+ @pytest.hookimpl(tryfirst=True)
+ def pytest_collectstart(self):
+ if self.shouldstop:
+ raise self.Interrupted(self.shouldstop)
+
+ @pytest.hookimpl(tryfirst=True)
+ def pytest_runtest_logreport(self, report):
+ if report.failed and not hasattr(report, 'wasxfail'):
+ self.testsfailed += 1
+ maxfail = self.config.getvalue("maxfail")
+ if maxfail and self.testsfailed >= maxfail:
+ self.shouldstop = "stopping after %d failures" % (
+ self.testsfailed)
+ pytest_collectreport = pytest_runtest_logreport
+
+ def isinitpath(self, path):
+ return path in self._initialpaths
+
+ def gethookproxy(self, fspath):
+ # check if we have the common case of running
+ # hooks with all conftest.py filesall conftest.py
+ pm = self.config.pluginmanager
+ my_conftestmodules = pm._getconftestmodules(fspath)
+ remove_mods = pm._conftest_plugins.difference(my_conftestmodules)
+ if remove_mods:
+ # one or more conftests are not in use at this fspath
+ proxy = FSHookProxy(fspath, pm, remove_mods)
+ else:
+ # all plugis are active for this fspath
+ proxy = self.config.hook
+ return proxy
+
+ def perform_collect(self, args=None, genitems=True):
+ hook = self.config.hook
+ try:
+ items = self._perform_collect(args, genitems)
+ hook.pytest_collection_modifyitems(session=self,
+ config=self.config, items=items)
+ finally:
+ hook.pytest_collection_finish(session=self)
+ self.testscollected = len(items)
+ return items
+
+ def _perform_collect(self, args, genitems):
+ if args is None:
+ args = self.config.args
+ self.trace("perform_collect", self, args)
+ self.trace.root.indent += 1
+ self._notfound = []
+ self._initialpaths = set()
+ self._initialparts = []
+ self.items = items = []
+ for arg in args:
+ parts = self._parsearg(arg)
+ self._initialparts.append(parts)
+ self._initialpaths.add(parts[0])
+ rep = collect_one_node(self)
+ self.ihook.pytest_collectreport(report=rep)
+ self.trace.root.indent -= 1
+ if self._notfound:
+ errors = []
+ for arg, exc in self._notfound:
+ line = "(no name %r in any of %r)" % (arg, exc.args[0])
+ errors.append("not found: %s\n%s" % (arg, line))
+ #XXX: test this
+ raise pytest.UsageError(*errors)
+ if not genitems:
+ return rep.result
+ else:
+ if rep.passed:
+ for node in rep.result:
+ self.items.extend(self.genitems(node))
+ return items
+
+ def collect(self):
+ for parts in self._initialparts:
+ arg = "::".join(map(str, parts))
+ self.trace("processing argument", arg)
+ self.trace.root.indent += 1
+ try:
+ for x in self._collect(arg):
+ yield x
+ except NoMatch:
+ # we are inside a make_report hook so
+ # we cannot directly pass through the exception
+ self._notfound.append((arg, sys.exc_info()[1]))
+
+ self.trace.root.indent -= 1
+
+ def _collect(self, arg):
+ names = self._parsearg(arg)
+ path = names.pop(0)
+ if path.check(dir=1):
+ assert not names, "invalid arg %r" %(arg,)
+ for path in path.visit(fil=lambda x: x.check(file=1),
+ rec=self._recurse, bf=True, sort=True):
+ for x in self._collectfile(path):
+ yield x
+ else:
+ assert path.check(file=1)
+ for x in self.matchnodes(self._collectfile(path), names):
+ yield x
+
+ def _collectfile(self, path):
+ ihook = self.gethookproxy(path)
+ if not self.isinitpath(path):
+ if ihook.pytest_ignore_collect(path=path, config=self.config):
+ return ()
+ return ihook.pytest_collect_file(path=path, parent=self)
+
+ def _recurse(self, path):
+ ihook = self.gethookproxy(path.dirpath())
+ if ihook.pytest_ignore_collect(path=path, config=self.config):
+ return
+ for pat in self._norecursepatterns:
+ if path.check(fnmatch=pat):
+ return False
+ ihook = self.gethookproxy(path)
+ ihook.pytest_collect_directory(path=path, parent=self)
+ return True
+
+ def _tryconvertpyarg(self, x):
+ """Convert a dotted module name to path.
+
+ """
+ import pkgutil
+ try:
+ loader = pkgutil.find_loader(x)
+ except ImportError:
+ return x
+ if loader is None:
+ return x
+ # This method is sometimes invoked when AssertionRewritingHook, which
+ # does not define a get_filename method, is already in place:
+ try:
+ path = loader.get_filename(x)
+ except AttributeError:
+ # Retrieve path from AssertionRewritingHook:
+ path = loader.modules[x][0].co_filename
+ if loader.is_package(x):
+ path = os.path.dirname(path)
+ return path
+
+ def _parsearg(self, arg):
+ """ return (fspath, names) tuple after checking the file exists. """
+ parts = str(arg).split("::")
+ if self.config.option.pyargs:
+ parts[0] = self._tryconvertpyarg(parts[0])
+ relpath = parts[0].replace("/", os.sep)
+ path = self.config.invocation_dir.join(relpath, abs=True)
+ if not path.check():
+ if self.config.option.pyargs:
+ raise pytest.UsageError("file or package not found: " + arg + " (missing __init__.py?)")
+ else:
+ raise pytest.UsageError("file not found: " + arg)
+ parts[0] = path
+ return parts
+
+ def matchnodes(self, matching, names):
+ self.trace("matchnodes", matching, names)
+ self.trace.root.indent += 1
+ nodes = self._matchnodes(matching, names)
+ num = len(nodes)
+ self.trace("matchnodes finished -> ", num, "nodes")
+ self.trace.root.indent -= 1
+ if num == 0:
+ raise NoMatch(matching, names[:1])
+ return nodes
+
+ def _matchnodes(self, matching, names):
+ if not matching or not names:
+ return matching
+ name = names[0]
+ assert name
+ nextnames = names[1:]
+ resultnodes = []
+ for node in matching:
+ if isinstance(node, pytest.Item):
+ if not names:
+ resultnodes.append(node)
+ continue
+ assert isinstance(node, pytest.Collector)
+ rep = collect_one_node(node)
+ if rep.passed:
+ has_matched = False
+ for x in rep.result:
+ # TODO: remove parametrized workaround once collection structure contains parametrization
+ if x.name == name or x.name.split("[")[0] == name:
+ resultnodes.extend(self.matchnodes([x], nextnames))
+ has_matched = True
+ # XXX accept IDs that don't have "()" for class instances
+ if not has_matched and len(rep.result) == 1 and x.name == "()":
+ nextnames.insert(0, name)
+ resultnodes.extend(self.matchnodes([x], nextnames))
+ node.ihook.pytest_collectreport(report=rep)
+ return resultnodes
+
+ def genitems(self, node):
+ self.trace("genitems", node)
+ if isinstance(node, pytest.Item):
+ node.ihook.pytest_itemcollected(item=node)
+ yield node
+ else:
+ assert isinstance(node, pytest.Collector)
+ rep = collect_one_node(node)
+ if rep.passed:
+ for subnode in rep.result:
+ for x in self.genitems(subnode):
+ yield x
+ node.ihook.pytest_collectreport(report=rep)
diff --git a/lib/spack/external/_pytest/mark.py b/lib/spack/external/_pytest/mark.py
new file mode 100644
index 0000000000..357a60492e
--- /dev/null
+++ b/lib/spack/external/_pytest/mark.py
@@ -0,0 +1,328 @@
+""" generic mechanism for marking and selecting python functions. """
+import inspect
+
+
+class MarkerError(Exception):
+
+ """Error in use of a pytest marker/attribute."""
+
+
+def pytest_namespace():
+ return {'mark': MarkGenerator()}
+
+
+def pytest_addoption(parser):
+ group = parser.getgroup("general")
+ group._addoption(
+ '-k',
+ action="store", dest="keyword", default='', metavar="EXPRESSION",
+ help="only run tests which match the given substring expression. "
+ "An expression is a python evaluatable expression "
+ "where all names are substring-matched against test names "
+ "and their parent classes. Example: -k 'test_method or test_"
+ "other' matches all test functions and classes whose name "
+ "contains 'test_method' or 'test_other'. "
+ "Additionally keywords are matched to classes and functions "
+ "containing extra names in their 'extra_keyword_matches' set, "
+ "as well as functions which have names assigned directly to them."
+ )
+
+ group._addoption(
+ "-m",
+ action="store", dest="markexpr", default="", metavar="MARKEXPR",
+ help="only run tests matching given mark expression. "
+ "example: -m 'mark1 and not mark2'."
+ )
+
+ group.addoption(
+ "--markers", action="store_true",
+ help="show markers (builtin, plugin and per-project ones)."
+ )
+
+ parser.addini("markers", "markers for test functions", 'linelist')
+
+
+def pytest_cmdline_main(config):
+ import _pytest.config
+ if config.option.markers:
+ config._do_configure()
+ tw = _pytest.config.create_terminal_writer(config)
+ for line in config.getini("markers"):
+ name, rest = line.split(":", 1)
+ tw.write("@pytest.mark.%s:" % name, bold=True)
+ tw.line(rest)
+ tw.line()
+ config._ensure_unconfigure()
+ return 0
+
+
+pytest_cmdline_main.tryfirst = True
+
+
+def pytest_collection_modifyitems(items, config):
+ keywordexpr = config.option.keyword.lstrip()
+ matchexpr = config.option.markexpr
+ if not keywordexpr and not matchexpr:
+ return
+ # pytest used to allow "-" for negating
+ # but today we just allow "-" at the beginning, use "not" instead
+ # we probably remove "-" alltogether soon
+ if keywordexpr.startswith("-"):
+ keywordexpr = "not " + keywordexpr[1:]
+ selectuntil = False
+ if keywordexpr[-1:] == ":":
+ selectuntil = True
+ keywordexpr = keywordexpr[:-1]
+
+ remaining = []
+ deselected = []
+ for colitem in items:
+ if keywordexpr and not matchkeyword(colitem, keywordexpr):
+ deselected.append(colitem)
+ else:
+ if selectuntil:
+ keywordexpr = None
+ if matchexpr:
+ if not matchmark(colitem, matchexpr):
+ deselected.append(colitem)
+ continue
+ remaining.append(colitem)
+
+ if deselected:
+ config.hook.pytest_deselected(items=deselected)
+ items[:] = remaining
+
+
+class MarkMapping:
+ """Provides a local mapping for markers where item access
+ resolves to True if the marker is present. """
+ def __init__(self, keywords):
+ mymarks = set()
+ for key, value in keywords.items():
+ if isinstance(value, MarkInfo) or isinstance(value, MarkDecorator):
+ mymarks.add(key)
+ self._mymarks = mymarks
+
+ def __getitem__(self, name):
+ return name in self._mymarks
+
+
+class KeywordMapping:
+ """Provides a local mapping for keywords.
+ Given a list of names, map any substring of one of these names to True.
+ """
+ def __init__(self, names):
+ self._names = names
+
+ def __getitem__(self, subname):
+ for name in self._names:
+ if subname in name:
+ return True
+ return False
+
+
+def matchmark(colitem, markexpr):
+ """Tries to match on any marker names, attached to the given colitem."""
+ return eval(markexpr, {}, MarkMapping(colitem.keywords))
+
+
+def matchkeyword(colitem, keywordexpr):
+ """Tries to match given keyword expression to given collector item.
+
+ Will match on the name of colitem, including the names of its parents.
+ Only matches names of items which are either a :class:`Class` or a
+ :class:`Function`.
+ Additionally, matches on names in the 'extra_keyword_matches' set of
+ any item, as well as names directly assigned to test functions.
+ """
+ mapped_names = set()
+
+ # Add the names of the current item and any parent items
+ import pytest
+ for item in colitem.listchain():
+ if not isinstance(item, pytest.Instance):
+ mapped_names.add(item.name)
+
+ # Add the names added as extra keywords to current or parent items
+ for name in colitem.listextrakeywords():
+ mapped_names.add(name)
+
+ # Add the names attached to the current function through direct assignment
+ if hasattr(colitem, 'function'):
+ for name in colitem.function.__dict__:
+ mapped_names.add(name)
+
+ mapping = KeywordMapping(mapped_names)
+ if " " not in keywordexpr:
+ # special case to allow for simple "-k pass" and "-k 1.3"
+ return mapping[keywordexpr]
+ elif keywordexpr.startswith("not ") and " " not in keywordexpr[4:]:
+ return not mapping[keywordexpr[4:]]
+ return eval(keywordexpr, {}, mapping)
+
+
+def pytest_configure(config):
+ import pytest
+ if config.option.strict:
+ pytest.mark._config = config
+
+
+class MarkGenerator:
+ """ Factory for :class:`MarkDecorator` objects - exposed as
+ a ``pytest.mark`` singleton instance. Example::
+
+ import pytest
+ @pytest.mark.slowtest
+ def test_function():
+ pass
+
+ will set a 'slowtest' :class:`MarkInfo` object
+ on the ``test_function`` object. """
+
+ def __getattr__(self, name):
+ if name[0] == "_":
+ raise AttributeError("Marker name must NOT start with underscore")
+ if hasattr(self, '_config'):
+ self._check(name)
+ return MarkDecorator(name)
+
+ def _check(self, name):
+ try:
+ if name in self._markers:
+ return
+ except AttributeError:
+ pass
+ self._markers = l = set()
+ for line in self._config.getini("markers"):
+ beginning = line.split(":", 1)
+ x = beginning[0].split("(", 1)[0]
+ l.add(x)
+ if name not in self._markers:
+ raise AttributeError("%r not a registered marker" % (name,))
+
+def istestfunc(func):
+ return hasattr(func, "__call__") and \
+ getattr(func, "__name__", "<lambda>") != "<lambda>"
+
+class MarkDecorator:
+ """ A decorator for test functions and test classes. When applied
+ it will create :class:`MarkInfo` objects which may be
+ :ref:`retrieved by hooks as item keywords <excontrolskip>`.
+ MarkDecorator instances are often created like this::
+
+ mark1 = pytest.mark.NAME # simple MarkDecorator
+ mark2 = pytest.mark.NAME(name1=value) # parametrized MarkDecorator
+
+ and can then be applied as decorators to test functions::
+
+ @mark2
+ def test_function():
+ pass
+
+ When a MarkDecorator instance is called it does the following:
+ 1. If called with a single class as its only positional argument and no
+ additional keyword arguments, it attaches itself to the class so it
+ gets applied automatically to all test cases found in that class.
+ 2. If called with a single function as its only positional argument and
+ no additional keyword arguments, it attaches a MarkInfo object to the
+ function, containing all the arguments already stored internally in
+ the MarkDecorator.
+ 3. When called in any other case, it performs a 'fake construction' call,
+ i.e. it returns a new MarkDecorator instance with the original
+ MarkDecorator's content updated with the arguments passed to this
+ call.
+
+ Note: The rules above prevent MarkDecorator objects from storing only a
+ single function or class reference as their positional argument with no
+ additional keyword or positional arguments.
+
+ """
+ def __init__(self, name, args=None, kwargs=None):
+ self.name = name
+ self.args = args or ()
+ self.kwargs = kwargs or {}
+
+ @property
+ def markname(self):
+ return self.name # for backward-compat (2.4.1 had this attr)
+
+ def __repr__(self):
+ d = self.__dict__.copy()
+ name = d.pop('name')
+ return "<MarkDecorator %r %r>" % (name, d)
+
+ def __call__(self, *args, **kwargs):
+ """ if passed a single callable argument: decorate it with mark info.
+ otherwise add *args/**kwargs in-place to mark information. """
+ if args and not kwargs:
+ func = args[0]
+ is_class = inspect.isclass(func)
+ if len(args) == 1 and (istestfunc(func) or is_class):
+ if is_class:
+ if hasattr(func, 'pytestmark'):
+ mark_list = func.pytestmark
+ if not isinstance(mark_list, list):
+ mark_list = [mark_list]
+ # always work on a copy to avoid updating pytestmark
+ # from a superclass by accident
+ mark_list = mark_list + [self]
+ func.pytestmark = mark_list
+ else:
+ func.pytestmark = [self]
+ else:
+ holder = getattr(func, self.name, None)
+ if holder is None:
+ holder = MarkInfo(
+ self.name, self.args, self.kwargs
+ )
+ setattr(func, self.name, holder)
+ else:
+ holder.add(self.args, self.kwargs)
+ return func
+ kw = self.kwargs.copy()
+ kw.update(kwargs)
+ args = self.args + args
+ return self.__class__(self.name, args=args, kwargs=kw)
+
+
+def extract_argvalue(maybe_marked_args):
+ # TODO: incorrect mark data, the old code wanst able to collect lists
+ # individual parametrized argument sets can be wrapped in a series
+ # of markers in which case we unwrap the values and apply the mark
+ # at Function init
+ newmarks = {}
+ argval = maybe_marked_args
+ while isinstance(argval, MarkDecorator):
+ newmark = MarkDecorator(argval.markname,
+ argval.args[:-1], argval.kwargs)
+ newmarks[newmark.markname] = newmark
+ argval = argval.args[-1]
+ return argval, newmarks
+
+
+class MarkInfo:
+ """ Marking object created by :class:`MarkDecorator` instances. """
+ def __init__(self, name, args, kwargs):
+ #: name of attribute
+ self.name = name
+ #: positional argument list, empty if none specified
+ self.args = args
+ #: keyword argument dictionary, empty if nothing specified
+ self.kwargs = kwargs.copy()
+ self._arglist = [(args, kwargs.copy())]
+
+ def __repr__(self):
+ return "<MarkInfo %r args=%r kwargs=%r>" % (
+ self.name, self.args, self.kwargs
+ )
+
+ def add(self, args, kwargs):
+ """ add a MarkInfo with the given args and kwargs. """
+ self._arglist.append((args, kwargs))
+ self.args += args
+ self.kwargs.update(kwargs)
+
+ def __iter__(self):
+ """ yield MarkInfo objects each relating to a marking-call. """
+ for args, kwargs in self._arglist:
+ yield MarkInfo(self.name, args, kwargs)
diff --git a/lib/spack/external/_pytest/monkeypatch.py b/lib/spack/external/_pytest/monkeypatch.py
new file mode 100644
index 0000000000..852e72beda
--- /dev/null
+++ b/lib/spack/external/_pytest/monkeypatch.py
@@ -0,0 +1,258 @@
+""" monkeypatching and mocking functionality. """
+
+import os, sys
+import re
+
+from py.builtin import _basestring
+
+import pytest
+
+RE_IMPORT_ERROR_NAME = re.compile("^No module named (.*)$")
+
+
+@pytest.fixture
+def monkeypatch(request):
+ """The returned ``monkeypatch`` fixture provides these
+ helper methods to modify objects, dictionaries or os.environ::
+
+ monkeypatch.setattr(obj, name, value, raising=True)
+ monkeypatch.delattr(obj, name, raising=True)
+ monkeypatch.setitem(mapping, name, value)
+ monkeypatch.delitem(obj, name, raising=True)
+ monkeypatch.setenv(name, value, prepend=False)
+ monkeypatch.delenv(name, value, raising=True)
+ monkeypatch.syspath_prepend(path)
+ monkeypatch.chdir(path)
+
+ All modifications will be undone after the requesting
+ test function or fixture has finished. The ``raising``
+ parameter determines if a KeyError or AttributeError
+ will be raised if the set/deletion operation has no target.
+ """
+ mpatch = MonkeyPatch()
+ request.addfinalizer(mpatch.undo)
+ return mpatch
+
+
+def resolve(name):
+ # simplified from zope.dottedname
+ parts = name.split('.')
+
+ used = parts.pop(0)
+ found = __import__(used)
+ for part in parts:
+ used += '.' + part
+ try:
+ found = getattr(found, part)
+ except AttributeError:
+ pass
+ else:
+ continue
+ # we use explicit un-nesting of the handling block in order
+ # to avoid nested exceptions on python 3
+ try:
+ __import__(used)
+ except ImportError as ex:
+ # str is used for py2 vs py3
+ expected = str(ex).split()[-1]
+ if expected == used:
+ raise
+ else:
+ raise ImportError(
+ 'import error in %s: %s' % (used, ex)
+ )
+ found = annotated_getattr(found, part, used)
+ return found
+
+
+def annotated_getattr(obj, name, ann):
+ try:
+ obj = getattr(obj, name)
+ except AttributeError:
+ raise AttributeError(
+ '%r object at %s has no attribute %r' % (
+ type(obj).__name__, ann, name
+ )
+ )
+ return obj
+
+
+def derive_importpath(import_path, raising):
+ if not isinstance(import_path, _basestring) or "." not in import_path:
+ raise TypeError("must be absolute import path string, not %r" %
+ (import_path,))
+ module, attr = import_path.rsplit('.', 1)
+ target = resolve(module)
+ if raising:
+ annotated_getattr(target, attr, ann=module)
+ return attr, target
+
+
+class Notset:
+ def __repr__(self):
+ return "<notset>"
+
+
+notset = Notset()
+
+
+class MonkeyPatch:
+ """ Object returned by the ``monkeypatch`` fixture keeping a record of setattr/item/env/syspath changes.
+ """
+
+ def __init__(self):
+ self._setattr = []
+ self._setitem = []
+ self._cwd = None
+ self._savesyspath = None
+
+ def setattr(self, target, name, value=notset, raising=True):
+ """ Set attribute value on target, memorizing the old value.
+ By default raise AttributeError if the attribute did not exist.
+
+ For convenience you can specify a string as ``target`` which
+ will be interpreted as a dotted import path, with the last part
+ being the attribute name. Example:
+ ``monkeypatch.setattr("os.getcwd", lambda x: "/")``
+ would set the ``getcwd`` function of the ``os`` module.
+
+ The ``raising`` value determines if the setattr should fail
+ if the attribute is not already present (defaults to True
+ which means it will raise).
+ """
+ __tracebackhide__ = True
+ import inspect
+
+ if value is notset:
+ if not isinstance(target, _basestring):
+ raise TypeError("use setattr(target, name, value) or "
+ "setattr(target, value) with target being a dotted "
+ "import string")
+ value = name
+ name, target = derive_importpath(target, raising)
+
+ oldval = getattr(target, name, notset)
+ if raising and oldval is notset:
+ raise AttributeError("%r has no attribute %r" % (target, name))
+
+ # avoid class descriptors like staticmethod/classmethod
+ if inspect.isclass(target):
+ oldval = target.__dict__.get(name, notset)
+ self._setattr.append((target, name, oldval))
+ setattr(target, name, value)
+
+ def delattr(self, target, name=notset, raising=True):
+ """ Delete attribute ``name`` from ``target``, by default raise
+ AttributeError it the attribute did not previously exist.
+
+ If no ``name`` is specified and ``target`` is a string
+ it will be interpreted as a dotted import path with the
+ last part being the attribute name.
+
+ If ``raising`` is set to False, no exception will be raised if the
+ attribute is missing.
+ """
+ __tracebackhide__ = True
+ if name is notset:
+ if not isinstance(target, _basestring):
+ raise TypeError("use delattr(target, name) or "
+ "delattr(target) with target being a dotted "
+ "import string")
+ name, target = derive_importpath(target, raising)
+
+ if not hasattr(target, name):
+ if raising:
+ raise AttributeError(name)
+ else:
+ self._setattr.append((target, name, getattr(target, name, notset)))
+ delattr(target, name)
+
+ def setitem(self, dic, name, value):
+ """ Set dictionary entry ``name`` to value. """
+ self._setitem.append((dic, name, dic.get(name, notset)))
+ dic[name] = value
+
+ def delitem(self, dic, name, raising=True):
+ """ Delete ``name`` from dict. Raise KeyError if it doesn't exist.
+
+ If ``raising`` is set to False, no exception will be raised if the
+ key is missing.
+ """
+ if name not in dic:
+ if raising:
+ raise KeyError(name)
+ else:
+ self._setitem.append((dic, name, dic.get(name, notset)))
+ del dic[name]
+
+ def setenv(self, name, value, prepend=None):
+ """ Set environment variable ``name`` to ``value``. If ``prepend``
+ is a character, read the current environment variable value
+ and prepend the ``value`` adjoined with the ``prepend`` character."""
+ value = str(value)
+ if prepend and name in os.environ:
+ value = value + prepend + os.environ[name]
+ self.setitem(os.environ, name, value)
+
+ def delenv(self, name, raising=True):
+ """ Delete ``name`` from the environment. Raise KeyError it does not
+ exist.
+
+ If ``raising`` is set to False, no exception will be raised if the
+ environment variable is missing.
+ """
+ self.delitem(os.environ, name, raising=raising)
+
+ def syspath_prepend(self, path):
+ """ Prepend ``path`` to ``sys.path`` list of import locations. """
+ if self._savesyspath is None:
+ self._savesyspath = sys.path[:]
+ sys.path.insert(0, str(path))
+
+ def chdir(self, path):
+ """ Change the current working directory to the specified path.
+ Path can be a string or a py.path.local object.
+ """
+ if self._cwd is None:
+ self._cwd = os.getcwd()
+ if hasattr(path, "chdir"):
+ path.chdir()
+ else:
+ os.chdir(path)
+
+ def undo(self):
+ """ Undo previous changes. This call consumes the
+ undo stack. Calling it a second time has no effect unless
+ you do more monkeypatching after the undo call.
+
+ There is generally no need to call `undo()`, since it is
+ called automatically during tear-down.
+
+ Note that the same `monkeypatch` fixture is used across a
+ single test function invocation. If `monkeypatch` is used both by
+ the test function itself and one of the test fixtures,
+ calling `undo()` will undo all of the changes made in
+ both functions.
+ """
+ for obj, name, value in reversed(self._setattr):
+ if value is not notset:
+ setattr(obj, name, value)
+ else:
+ delattr(obj, name)
+ self._setattr[:] = []
+ for dictionary, name, value in reversed(self._setitem):
+ if value is notset:
+ try:
+ del dictionary[name]
+ except KeyError:
+ pass # was already deleted, so we have the desired state
+ else:
+ dictionary[name] = value
+ self._setitem[:] = []
+ if self._savesyspath is not None:
+ sys.path[:] = self._savesyspath
+ self._savesyspath = None
+
+ if self._cwd is not None:
+ os.chdir(self._cwd)
+ self._cwd = None
diff --git a/lib/spack/external/_pytest/nose.py b/lib/spack/external/_pytest/nose.py
new file mode 100644
index 0000000000..0387468686
--- /dev/null
+++ b/lib/spack/external/_pytest/nose.py
@@ -0,0 +1,71 @@
+""" run test suites written for nose. """
+
+import sys
+
+import py
+import pytest
+from _pytest import unittest
+
+
+def get_skip_exceptions():
+ skip_classes = set()
+ for module_name in ('unittest', 'unittest2', 'nose'):
+ mod = sys.modules.get(module_name)
+ if hasattr(mod, 'SkipTest'):
+ skip_classes.add(mod.SkipTest)
+ return tuple(skip_classes)
+
+
+def pytest_runtest_makereport(item, call):
+ if call.excinfo and call.excinfo.errisinstance(get_skip_exceptions()):
+ # let's substitute the excinfo with a pytest.skip one
+ call2 = call.__class__(lambda:
+ pytest.skip(str(call.excinfo.value)), call.when)
+ call.excinfo = call2.excinfo
+
+
+@pytest.hookimpl(trylast=True)
+def pytest_runtest_setup(item):
+ if is_potential_nosetest(item):
+ if isinstance(item.parent, pytest.Generator):
+ gen = item.parent
+ if not hasattr(gen, '_nosegensetup'):
+ call_optional(gen.obj, 'setup')
+ if isinstance(gen.parent, pytest.Instance):
+ call_optional(gen.parent.obj, 'setup')
+ gen._nosegensetup = True
+ if not call_optional(item.obj, 'setup'):
+ # call module level setup if there is no object level one
+ call_optional(item.parent.obj, 'setup')
+ #XXX this implies we only call teardown when setup worked
+ item.session._setupstate.addfinalizer((lambda: teardown_nose(item)), item)
+
+def teardown_nose(item):
+ if is_potential_nosetest(item):
+ if not call_optional(item.obj, 'teardown'):
+ call_optional(item.parent.obj, 'teardown')
+ #if hasattr(item.parent, '_nosegensetup'):
+ # #call_optional(item._nosegensetup, 'teardown')
+ # del item.parent._nosegensetup
+
+
+def pytest_make_collect_report(collector):
+ if isinstance(collector, pytest.Generator):
+ call_optional(collector.obj, 'setup')
+
+
+def is_potential_nosetest(item):
+ # extra check needed since we do not do nose style setup/teardown
+ # on direct unittest style classes
+ return isinstance(item, pytest.Function) and \
+ not isinstance(item, unittest.TestCaseFunction)
+
+
+def call_optional(obj, name):
+ method = getattr(obj, name, None)
+ isfixture = hasattr(method, "_pytestfixturefunction")
+ if method is not None and not isfixture and py.builtin.callable(method):
+ # If there's any problems allow the exception to raise rather than
+ # silently ignoring them
+ method()
+ return True
diff --git a/lib/spack/external/_pytest/pastebin.py b/lib/spack/external/_pytest/pastebin.py
new file mode 100644
index 0000000000..9f1cf90637
--- /dev/null
+++ b/lib/spack/external/_pytest/pastebin.py
@@ -0,0 +1,98 @@
+""" submit failure or test session information to a pastebin service. """
+import pytest
+import sys
+import tempfile
+
+
+def pytest_addoption(parser):
+ group = parser.getgroup("terminal reporting")
+ group._addoption('--pastebin', metavar="mode",
+ action='store', dest="pastebin", default=None,
+ choices=['failed', 'all'],
+ help="send failed|all info to bpaste.net pastebin service.")
+
+
+@pytest.hookimpl(trylast=True)
+def pytest_configure(config):
+ import py
+ if config.option.pastebin == "all":
+ tr = config.pluginmanager.getplugin('terminalreporter')
+ # if no terminal reporter plugin is present, nothing we can do here;
+ # this can happen when this function executes in a slave node
+ # when using pytest-xdist, for example
+ if tr is not None:
+ # pastebin file will be utf-8 encoded binary file
+ config._pastebinfile = tempfile.TemporaryFile('w+b')
+ oldwrite = tr._tw.write
+
+ def tee_write(s, **kwargs):
+ oldwrite(s, **kwargs)
+ if py.builtin._istext(s):
+ s = s.encode('utf-8')
+ config._pastebinfile.write(s)
+
+ tr._tw.write = tee_write
+
+
+def pytest_unconfigure(config):
+ if hasattr(config, '_pastebinfile'):
+ # get terminal contents and delete file
+ config._pastebinfile.seek(0)
+ sessionlog = config._pastebinfile.read()
+ config._pastebinfile.close()
+ del config._pastebinfile
+ # undo our patching in the terminal reporter
+ tr = config.pluginmanager.getplugin('terminalreporter')
+ del tr._tw.__dict__['write']
+ # write summary
+ tr.write_sep("=", "Sending information to Paste Service")
+ pastebinurl = create_new_paste(sessionlog)
+ tr.write_line("pastebin session-log: %s\n" % pastebinurl)
+
+
+def create_new_paste(contents):
+ """
+ Creates a new paste using bpaste.net service.
+
+ :contents: paste contents as utf-8 encoded bytes
+ :returns: url to the pasted contents
+ """
+ import re
+ if sys.version_info < (3, 0):
+ from urllib import urlopen, urlencode
+ else:
+ from urllib.request import urlopen
+ from urllib.parse import urlencode
+
+ params = {
+ 'code': contents,
+ 'lexer': 'python3' if sys.version_info[0] == 3 else 'python',
+ 'expiry': '1week',
+ }
+ url = 'https://bpaste.net'
+ response = urlopen(url, data=urlencode(params).encode('ascii')).read()
+ m = re.search(r'href="/raw/(\w+)"', response.decode('utf-8'))
+ if m:
+ return '%s/show/%s' % (url, m.group(1))
+ else:
+ return 'bad response: ' + response
+
+
+def pytest_terminal_summary(terminalreporter):
+ import _pytest.config
+ if terminalreporter.config.option.pastebin != "failed":
+ return
+ tr = terminalreporter
+ if 'failed' in tr.stats:
+ terminalreporter.write_sep("=", "Sending information to Paste Service")
+ for rep in terminalreporter.stats.get('failed'):
+ try:
+ msg = rep.longrepr.reprtraceback.reprentries[-1].reprfileloc
+ except AttributeError:
+ msg = tr._getfailureheadline(rep)
+ tw = _pytest.config.create_terminal_writer(terminalreporter.config, stringio=True)
+ rep.toterminal(tw)
+ s = tw.stringio.getvalue()
+ assert len(s)
+ pastebinurl = create_new_paste(s)
+ tr.write_line("%s --> %s" %(msg, pastebinurl))
diff --git a/lib/spack/external/_pytest/pytester.py b/lib/spack/external/_pytest/pytester.py
new file mode 100644
index 0000000000..17ff529a6c
--- /dev/null
+++ b/lib/spack/external/_pytest/pytester.py
@@ -0,0 +1,1139 @@
+""" (disabled by default) support for testing pytest and pytest plugins. """
+import codecs
+import gc
+import os
+import platform
+import re
+import subprocess
+import sys
+import time
+import traceback
+from fnmatch import fnmatch
+
+from py.builtin import print_
+
+from _pytest._code import Source
+import py
+import pytest
+from _pytest.main import Session, EXIT_OK
+from _pytest.assertion.rewrite import AssertionRewritingHook
+
+
+def pytest_addoption(parser):
+ # group = parser.getgroup("pytester", "pytester (self-tests) options")
+ parser.addoption('--lsof',
+ action="store_true", dest="lsof", default=False,
+ help=("run FD checks if lsof is available"))
+
+ parser.addoption('--runpytest', default="inprocess", dest="runpytest",
+ choices=("inprocess", "subprocess", ),
+ help=("run pytest sub runs in tests using an 'inprocess' "
+ "or 'subprocess' (python -m main) method"))
+
+
+def pytest_configure(config):
+ # This might be called multiple times. Only take the first.
+ global _pytest_fullpath
+ try:
+ _pytest_fullpath
+ except NameError:
+ _pytest_fullpath = os.path.abspath(pytest.__file__.rstrip("oc"))
+ _pytest_fullpath = _pytest_fullpath.replace("$py.class", ".py")
+
+ if config.getvalue("lsof"):
+ checker = LsofFdLeakChecker()
+ if checker.matching_platform():
+ config.pluginmanager.register(checker)
+
+
+class LsofFdLeakChecker(object):
+ def get_open_files(self):
+ out = self._exec_lsof()
+ open_files = self._parse_lsof_output(out)
+ return open_files
+
+ def _exec_lsof(self):
+ pid = os.getpid()
+ return py.process.cmdexec("lsof -Ffn0 -p %d" % pid)
+
+ def _parse_lsof_output(self, out):
+ def isopen(line):
+ return line.startswith('f') and ("deleted" not in line and
+ 'mem' not in line and "txt" not in line and 'cwd' not in line)
+
+ open_files = []
+
+ for line in out.split("\n"):
+ if isopen(line):
+ fields = line.split('\0')
+ fd = fields[0][1:]
+ filename = fields[1][1:]
+ if filename.startswith('/'):
+ open_files.append((fd, filename))
+
+ return open_files
+
+ def matching_platform(self):
+ try:
+ py.process.cmdexec("lsof -v")
+ except (py.process.cmdexec.Error, UnicodeDecodeError):
+ # cmdexec may raise UnicodeDecodeError on Windows systems
+ # with locale other than english:
+ # https://bitbucket.org/pytest-dev/py/issues/66
+ return False
+ else:
+ return True
+
+ @pytest.hookimpl(hookwrapper=True, tryfirst=True)
+ def pytest_runtest_item(self, item):
+ lines1 = self.get_open_files()
+ yield
+ if hasattr(sys, "pypy_version_info"):
+ gc.collect()
+ lines2 = self.get_open_files()
+
+ new_fds = set([t[0] for t in lines2]) - set([t[0] for t in lines1])
+ leaked_files = [t for t in lines2 if t[0] in new_fds]
+ if leaked_files:
+ error = []
+ error.append("***** %s FD leakage detected" % len(leaked_files))
+ error.extend([str(f) for f in leaked_files])
+ error.append("*** Before:")
+ error.extend([str(f) for f in lines1])
+ error.append("*** After:")
+ error.extend([str(f) for f in lines2])
+ error.append(error[0])
+ error.append("*** function %s:%s: %s " % item.location)
+ pytest.fail("\n".join(error), pytrace=False)
+
+
+# XXX copied from execnet's conftest.py - needs to be merged
+winpymap = {
+ 'python2.7': r'C:\Python27\python.exe',
+ 'python2.6': r'C:\Python26\python.exe',
+ 'python3.1': r'C:\Python31\python.exe',
+ 'python3.2': r'C:\Python32\python.exe',
+ 'python3.3': r'C:\Python33\python.exe',
+ 'python3.4': r'C:\Python34\python.exe',
+ 'python3.5': r'C:\Python35\python.exe',
+}
+
+def getexecutable(name, cache={}):
+ try:
+ return cache[name]
+ except KeyError:
+ executable = py.path.local.sysfind(name)
+ if executable:
+ import subprocess
+ popen = subprocess.Popen([str(executable), "--version"],
+ universal_newlines=True, stderr=subprocess.PIPE)
+ out, err = popen.communicate()
+ if name == "jython":
+ if not err or "2.5" not in err:
+ executable = None
+ if "2.5.2" in err:
+ executable = None # http://bugs.jython.org/issue1790
+ elif popen.returncode != 0:
+ # Handle pyenv's 127.
+ executable = None
+ cache[name] = executable
+ return executable
+
+@pytest.fixture(params=['python2.6', 'python2.7', 'python3.3', "python3.4",
+ 'pypy', 'pypy3'])
+def anypython(request):
+ name = request.param
+ executable = getexecutable(name)
+ if executable is None:
+ if sys.platform == "win32":
+ executable = winpymap.get(name, None)
+ if executable:
+ executable = py.path.local(executable)
+ if executable.check():
+ return executable
+ pytest.skip("no suitable %s found" % (name,))
+ return executable
+
+# used at least by pytest-xdist plugin
+@pytest.fixture
+def _pytest(request):
+ """ Return a helper which offers a gethookrecorder(hook)
+ method which returns a HookRecorder instance which helps
+ to make assertions about called hooks.
+ """
+ return PytestArg(request)
+
+class PytestArg:
+ def __init__(self, request):
+ self.request = request
+
+ def gethookrecorder(self, hook):
+ hookrecorder = HookRecorder(hook._pm)
+ self.request.addfinalizer(hookrecorder.finish_recording)
+ return hookrecorder
+
+
+def get_public_names(l):
+ """Only return names from iterator l without a leading underscore."""
+ return [x for x in l if x[0] != "_"]
+
+
+class ParsedCall:
+ def __init__(self, name, kwargs):
+ self.__dict__.update(kwargs)
+ self._name = name
+
+ def __repr__(self):
+ d = self.__dict__.copy()
+ del d['_name']
+ return "<ParsedCall %r(**%r)>" %(self._name, d)
+
+
+class HookRecorder:
+ """Record all hooks called in a plugin manager.
+
+ This wraps all the hook calls in the plugin manager, recording
+ each call before propagating the normal calls.
+
+ """
+
+ def __init__(self, pluginmanager):
+ self._pluginmanager = pluginmanager
+ self.calls = []
+
+ def before(hook_name, hook_impls, kwargs):
+ self.calls.append(ParsedCall(hook_name, kwargs))
+
+ def after(outcome, hook_name, hook_impls, kwargs):
+ pass
+
+ self._undo_wrapping = pluginmanager.add_hookcall_monitoring(before, after)
+
+ def finish_recording(self):
+ self._undo_wrapping()
+
+ def getcalls(self, names):
+ if isinstance(names, str):
+ names = names.split()
+ return [call for call in self.calls if call._name in names]
+
+ def assert_contains(self, entries):
+ __tracebackhide__ = True
+ i = 0
+ entries = list(entries)
+ backlocals = sys._getframe(1).f_locals
+ while entries:
+ name, check = entries.pop(0)
+ for ind, call in enumerate(self.calls[i:]):
+ if call._name == name:
+ print_("NAMEMATCH", name, call)
+ if eval(check, backlocals, call.__dict__):
+ print_("CHECKERMATCH", repr(check), "->", call)
+ else:
+ print_("NOCHECKERMATCH", repr(check), "-", call)
+ continue
+ i += ind + 1
+ break
+ print_("NONAMEMATCH", name, "with", call)
+ else:
+ pytest.fail("could not find %r check %r" % (name, check))
+
+ def popcall(self, name):
+ __tracebackhide__ = True
+ for i, call in enumerate(self.calls):
+ if call._name == name:
+ del self.calls[i]
+ return call
+ lines = ["could not find call %r, in:" % (name,)]
+ lines.extend([" %s" % str(x) for x in self.calls])
+ pytest.fail("\n".join(lines))
+
+ def getcall(self, name):
+ l = self.getcalls(name)
+ assert len(l) == 1, (name, l)
+ return l[0]
+
+ # functionality for test reports
+
+ def getreports(self,
+ names="pytest_runtest_logreport pytest_collectreport"):
+ return [x.report for x in self.getcalls(names)]
+
+ def matchreport(self, inamepart="",
+ names="pytest_runtest_logreport pytest_collectreport", when=None):
+ """ return a testreport whose dotted import path matches """
+ l = []
+ for rep in self.getreports(names=names):
+ try:
+ if not when and rep.when != "call" and rep.passed:
+ # setup/teardown passing reports - let's ignore those
+ continue
+ except AttributeError:
+ pass
+ if when and getattr(rep, 'when', None) != when:
+ continue
+ if not inamepart or inamepart in rep.nodeid.split("::"):
+ l.append(rep)
+ if not l:
+ raise ValueError("could not find test report matching %r: "
+ "no test reports at all!" % (inamepart,))
+ if len(l) > 1:
+ raise ValueError(
+ "found 2 or more testreports matching %r: %s" %(inamepart, l))
+ return l[0]
+
+ def getfailures(self,
+ names='pytest_runtest_logreport pytest_collectreport'):
+ return [rep for rep in self.getreports(names) if rep.failed]
+
+ def getfailedcollections(self):
+ return self.getfailures('pytest_collectreport')
+
+ def listoutcomes(self):
+ passed = []
+ skipped = []
+ failed = []
+ for rep in self.getreports(
+ "pytest_collectreport pytest_runtest_logreport"):
+ if rep.passed:
+ if getattr(rep, "when", None) == "call":
+ passed.append(rep)
+ elif rep.skipped:
+ skipped.append(rep)
+ elif rep.failed:
+ failed.append(rep)
+ return passed, skipped, failed
+
+ def countoutcomes(self):
+ return [len(x) for x in self.listoutcomes()]
+
+ def assertoutcome(self, passed=0, skipped=0, failed=0):
+ realpassed, realskipped, realfailed = self.listoutcomes()
+ assert passed == len(realpassed)
+ assert skipped == len(realskipped)
+ assert failed == len(realfailed)
+
+ def clear(self):
+ self.calls[:] = []
+
+
+@pytest.fixture
+def linecomp(request):
+ return LineComp()
+
+
+@pytest.fixture(name='LineMatcher')
+def LineMatcher_fixture(request):
+ return LineMatcher
+
+
+@pytest.fixture
+def testdir(request, tmpdir_factory):
+ return Testdir(request, tmpdir_factory)
+
+
+rex_outcome = re.compile("(\d+) ([\w-]+)")
+class RunResult:
+ """The result of running a command.
+
+ Attributes:
+
+ :ret: The return value.
+ :outlines: List of lines captured from stdout.
+ :errlines: List of lines captures from stderr.
+ :stdout: :py:class:`LineMatcher` of stdout, use ``stdout.str()`` to
+ reconstruct stdout or the commonly used
+ ``stdout.fnmatch_lines()`` method.
+ :stderrr: :py:class:`LineMatcher` of stderr.
+ :duration: Duration in seconds.
+
+ """
+ def __init__(self, ret, outlines, errlines, duration):
+ self.ret = ret
+ self.outlines = outlines
+ self.errlines = errlines
+ self.stdout = LineMatcher(outlines)
+ self.stderr = LineMatcher(errlines)
+ self.duration = duration
+
+ def parseoutcomes(self):
+ """ Return a dictionary of outcomestring->num from parsing
+ the terminal output that the test process produced."""
+ for line in reversed(self.outlines):
+ if 'seconds' in line:
+ outcomes = rex_outcome.findall(line)
+ if outcomes:
+ d = {}
+ for num, cat in outcomes:
+ d[cat] = int(num)
+ return d
+
+ def assert_outcomes(self, passed=0, skipped=0, failed=0):
+ """ assert that the specified outcomes appear with the respective
+ numbers (0 means it didn't occur) in the text output from a test run."""
+ d = self.parseoutcomes()
+ assert passed == d.get("passed", 0)
+ assert skipped == d.get("skipped", 0)
+ assert failed == d.get("failed", 0)
+
+
+
+class Testdir:
+ """Temporary test directory with tools to test/run pytest itself.
+
+ This is based on the ``tmpdir`` fixture but provides a number of
+ methods which aid with testing pytest itself. Unless
+ :py:meth:`chdir` is used all methods will use :py:attr:`tmpdir` as
+ current working directory.
+
+ Attributes:
+
+ :tmpdir: The :py:class:`py.path.local` instance of the temporary
+ directory.
+
+ :plugins: A list of plugins to use with :py:meth:`parseconfig` and
+ :py:meth:`runpytest`. Initially this is an empty list but
+ plugins can be added to the list. The type of items to add to
+ the list depend on the method which uses them so refer to them
+ for details.
+
+ """
+
+ def __init__(self, request, tmpdir_factory):
+ self.request = request
+ # XXX remove duplication with tmpdir plugin
+ basetmp = tmpdir_factory.ensuretemp("testdir")
+ name = request.function.__name__
+ for i in range(100):
+ try:
+ tmpdir = basetmp.mkdir(name + str(i))
+ except py.error.EEXIST:
+ continue
+ break
+ self.tmpdir = tmpdir
+ self.plugins = []
+ self._savesyspath = (list(sys.path), list(sys.meta_path))
+ self._savemodulekeys = set(sys.modules)
+ self.chdir() # always chdir
+ self.request.addfinalizer(self.finalize)
+ method = self.request.config.getoption("--runpytest")
+ if method == "inprocess":
+ self._runpytest_method = self.runpytest_inprocess
+ elif method == "subprocess":
+ self._runpytest_method = self.runpytest_subprocess
+
+ def __repr__(self):
+ return "<Testdir %r>" % (self.tmpdir,)
+
+ def finalize(self):
+ """Clean up global state artifacts.
+
+ Some methods modify the global interpreter state and this
+ tries to clean this up. It does not remove the temporary
+ directory however so it can be looked at after the test run
+ has finished.
+
+ """
+ sys.path[:], sys.meta_path[:] = self._savesyspath
+ if hasattr(self, '_olddir'):
+ self._olddir.chdir()
+ self.delete_loaded_modules()
+
+ def delete_loaded_modules(self):
+ """Delete modules that have been loaded during a test.
+
+ This allows the interpreter to catch module changes in case
+ the module is re-imported.
+ """
+ for name in set(sys.modules).difference(self._savemodulekeys):
+ # it seems zope.interfaces is keeping some state
+ # (used by twisted related tests)
+ if name != "zope.interface":
+ del sys.modules[name]
+
+ def make_hook_recorder(self, pluginmanager):
+ """Create a new :py:class:`HookRecorder` for a PluginManager."""
+ assert not hasattr(pluginmanager, "reprec")
+ pluginmanager.reprec = reprec = HookRecorder(pluginmanager)
+ self.request.addfinalizer(reprec.finish_recording)
+ return reprec
+
+ def chdir(self):
+ """Cd into the temporary directory.
+
+ This is done automatically upon instantiation.
+
+ """
+ old = self.tmpdir.chdir()
+ if not hasattr(self, '_olddir'):
+ self._olddir = old
+
+ def _makefile(self, ext, args, kwargs):
+ items = list(kwargs.items())
+ if args:
+ source = py.builtin._totext("\n").join(
+ map(py.builtin._totext, args)) + py.builtin._totext("\n")
+ basename = self.request.function.__name__
+ items.insert(0, (basename, source))
+ ret = None
+ for name, value in items:
+ p = self.tmpdir.join(name).new(ext=ext)
+ p.dirpath().ensure_dir()
+ source = Source(value)
+
+ def my_totext(s, encoding="utf-8"):
+ if py.builtin._isbytes(s):
+ s = py.builtin._totext(s, encoding=encoding)
+ return s
+
+ source_unicode = "\n".join([my_totext(line) for line in source.lines])
+ source = py.builtin._totext(source_unicode)
+ content = source.strip().encode("utf-8") # + "\n"
+ #content = content.rstrip() + "\n"
+ p.write(content, "wb")
+ if ret is None:
+ ret = p
+ return ret
+
+ def makefile(self, ext, *args, **kwargs):
+ """Create a new file in the testdir.
+
+ ext: The extension the file should use, including the dot.
+ E.g. ".py".
+
+ args: All args will be treated as strings and joined using
+ newlines. The result will be written as contents to the
+ file. The name of the file will be based on the test
+ function requesting this fixture.
+ E.g. "testdir.makefile('.txt', 'line1', 'line2')"
+
+ kwargs: Each keyword is the name of a file, while the value of
+ it will be written as contents of the file.
+ E.g. "testdir.makefile('.ini', pytest='[pytest]\naddopts=-rs\n')"
+
+ """
+ return self._makefile(ext, args, kwargs)
+
+ def makeconftest(self, source):
+ """Write a contest.py file with 'source' as contents."""
+ return self.makepyfile(conftest=source)
+
+ def makeini(self, source):
+ """Write a tox.ini file with 'source' as contents."""
+ return self.makefile('.ini', tox=source)
+
+ def getinicfg(self, source):
+ """Return the pytest section from the tox.ini config file."""
+ p = self.makeini(source)
+ return py.iniconfig.IniConfig(p)['pytest']
+
+ def makepyfile(self, *args, **kwargs):
+ """Shortcut for .makefile() with a .py extension."""
+ return self._makefile('.py', args, kwargs)
+
+ def maketxtfile(self, *args, **kwargs):
+ """Shortcut for .makefile() with a .txt extension."""
+ return self._makefile('.txt', args, kwargs)
+
+ def syspathinsert(self, path=None):
+ """Prepend a directory to sys.path, defaults to :py:attr:`tmpdir`.
+
+ This is undone automatically after the test.
+ """
+ if path is None:
+ path = self.tmpdir
+ sys.path.insert(0, str(path))
+ # a call to syspathinsert() usually means that the caller
+ # wants to import some dynamically created files.
+ # with python3 we thus invalidate import caches.
+ self._possibly_invalidate_import_caches()
+
+ def _possibly_invalidate_import_caches(self):
+ # invalidate caches if we can (py33 and above)
+ try:
+ import importlib
+ except ImportError:
+ pass
+ else:
+ if hasattr(importlib, "invalidate_caches"):
+ importlib.invalidate_caches()
+
+ def mkdir(self, name):
+ """Create a new (sub)directory."""
+ return self.tmpdir.mkdir(name)
+
+ def mkpydir(self, name):
+ """Create a new python package.
+
+ This creates a (sub)direcotry with an empty ``__init__.py``
+ file so that is recognised as a python package.
+
+ """
+ p = self.mkdir(name)
+ p.ensure("__init__.py")
+ return p
+
+ Session = Session
+ def getnode(self, config, arg):
+ """Return the collection node of a file.
+
+ :param config: :py:class:`_pytest.config.Config` instance, see
+ :py:meth:`parseconfig` and :py:meth:`parseconfigure` to
+ create the configuration.
+
+ :param arg: A :py:class:`py.path.local` instance of the file.
+
+ """
+ session = Session(config)
+ assert '::' not in str(arg)
+ p = py.path.local(arg)
+ config.hook.pytest_sessionstart(session=session)
+ res = session.perform_collect([str(p)], genitems=False)[0]
+ config.hook.pytest_sessionfinish(session=session, exitstatus=EXIT_OK)
+ return res
+
+ def getpathnode(self, path):
+ """Return the collection node of a file.
+
+ This is like :py:meth:`getnode` but uses
+ :py:meth:`parseconfigure` to create the (configured) pytest
+ Config instance.
+
+ :param path: A :py:class:`py.path.local` instance of the file.
+
+ """
+ config = self.parseconfigure(path)
+ session = Session(config)
+ x = session.fspath.bestrelpath(path)
+ config.hook.pytest_sessionstart(session=session)
+ res = session.perform_collect([x], genitems=False)[0]
+ config.hook.pytest_sessionfinish(session=session, exitstatus=EXIT_OK)
+ return res
+
+ def genitems(self, colitems):
+ """Generate all test items from a collection node.
+
+ This recurses into the collection node and returns a list of
+ all the test items contained within.
+
+ """
+ session = colitems[0].session
+ result = []
+ for colitem in colitems:
+ result.extend(session.genitems(colitem))
+ return result
+
+ def runitem(self, source):
+ """Run the "test_func" Item.
+
+ The calling test instance (the class which contains the test
+ method) must provide a ``.getrunner()`` method which should
+ return a runner which can run the test protocol for a single
+ item, like e.g. :py:func:`_pytest.runner.runtestprotocol`.
+
+ """
+ # used from runner functional tests
+ item = self.getitem(source)
+ # the test class where we are called from wants to provide the runner
+ testclassinstance = self.request.instance
+ runner = testclassinstance.getrunner()
+ return runner(item)
+
+ def inline_runsource(self, source, *cmdlineargs):
+ """Run a test module in process using ``pytest.main()``.
+
+ This run writes "source" into a temporary file and runs
+ ``pytest.main()`` on it, returning a :py:class:`HookRecorder`
+ instance for the result.
+
+ :param source: The source code of the test module.
+
+ :param cmdlineargs: Any extra command line arguments to use.
+
+ :return: :py:class:`HookRecorder` instance of the result.
+
+ """
+ p = self.makepyfile(source)
+ l = list(cmdlineargs) + [p]
+ return self.inline_run(*l)
+
+ def inline_genitems(self, *args):
+ """Run ``pytest.main(['--collectonly'])`` in-process.
+
+ Retuns a tuple of the collected items and a
+ :py:class:`HookRecorder` instance.
+
+ This runs the :py:func:`pytest.main` function to run all of
+ pytest inside the test process itself like
+ :py:meth:`inline_run`. However the return value is a tuple of
+ the collection items and a :py:class:`HookRecorder` instance.
+
+ """
+ rec = self.inline_run("--collect-only", *args)
+ items = [x.item for x in rec.getcalls("pytest_itemcollected")]
+ return items, rec
+
+ def inline_run(self, *args, **kwargs):
+ """Run ``pytest.main()`` in-process, returning a HookRecorder.
+
+ This runs the :py:func:`pytest.main` function to run all of
+ pytest inside the test process itself. This means it can
+ return a :py:class:`HookRecorder` instance which gives more
+ detailed results from then run then can be done by matching
+ stdout/stderr from :py:meth:`runpytest`.
+
+ :param args: Any command line arguments to pass to
+ :py:func:`pytest.main`.
+
+ :param plugin: (keyword-only) Extra plugin instances the
+ ``pytest.main()`` instance should use.
+
+ :return: A :py:class:`HookRecorder` instance.
+ """
+ # When running py.test inline any plugins active in the main
+ # test process are already imported. So this disables the
+ # warning which will trigger to say they can no longer be
+ # re-written, which is fine as they are already re-written.
+ orig_warn = AssertionRewritingHook._warn_already_imported
+
+ def revert():
+ AssertionRewritingHook._warn_already_imported = orig_warn
+
+ self.request.addfinalizer(revert)
+ AssertionRewritingHook._warn_already_imported = lambda *a: None
+
+ rec = []
+
+ class Collect:
+ def pytest_configure(x, config):
+ rec.append(self.make_hook_recorder(config.pluginmanager))
+
+ plugins = kwargs.get("plugins") or []
+ plugins.append(Collect())
+ ret = pytest.main(list(args), plugins=plugins)
+ self.delete_loaded_modules()
+ if len(rec) == 1:
+ reprec = rec.pop()
+ else:
+ class reprec:
+ pass
+ reprec.ret = ret
+
+ # typically we reraise keyboard interrupts from the child run
+ # because it's our user requesting interruption of the testing
+ if ret == 2 and not kwargs.get("no_reraise_ctrlc"):
+ calls = reprec.getcalls("pytest_keyboard_interrupt")
+ if calls and calls[-1].excinfo.type == KeyboardInterrupt:
+ raise KeyboardInterrupt()
+ return reprec
+
+ def runpytest_inprocess(self, *args, **kwargs):
+ """ Return result of running pytest in-process, providing a similar
+ interface to what self.runpytest() provides. """
+ if kwargs.get("syspathinsert"):
+ self.syspathinsert()
+ now = time.time()
+ capture = py.io.StdCapture()
+ try:
+ try:
+ reprec = self.inline_run(*args, **kwargs)
+ except SystemExit as e:
+
+ class reprec:
+ ret = e.args[0]
+
+ except Exception:
+ traceback.print_exc()
+
+ class reprec:
+ ret = 3
+ finally:
+ out, err = capture.reset()
+ sys.stdout.write(out)
+ sys.stderr.write(err)
+
+ res = RunResult(reprec.ret,
+ out.split("\n"), err.split("\n"),
+ time.time()-now)
+ res.reprec = reprec
+ return res
+
+ def runpytest(self, *args, **kwargs):
+ """ Run pytest inline or in a subprocess, depending on the command line
+ option "--runpytest" and return a :py:class:`RunResult`.
+
+ """
+ args = self._ensure_basetemp(args)
+ return self._runpytest_method(*args, **kwargs)
+
+ def _ensure_basetemp(self, args):
+ args = [str(x) for x in args]
+ for x in args:
+ if str(x).startswith('--basetemp'):
+ #print ("basedtemp exists: %s" %(args,))
+ break
+ else:
+ args.append("--basetemp=%s" % self.tmpdir.dirpath('basetemp'))
+ #print ("added basetemp: %s" %(args,))
+ return args
+
+ def parseconfig(self, *args):
+ """Return a new pytest Config instance from given commandline args.
+
+ This invokes the pytest bootstrapping code in _pytest.config
+ to create a new :py:class:`_pytest.core.PluginManager` and
+ call the pytest_cmdline_parse hook to create new
+ :py:class:`_pytest.config.Config` instance.
+
+ If :py:attr:`plugins` has been populated they should be plugin
+ modules which will be registered with the PluginManager.
+
+ """
+ args = self._ensure_basetemp(args)
+
+ import _pytest.config
+ config = _pytest.config._prepareconfig(args, self.plugins)
+ # we don't know what the test will do with this half-setup config
+ # object and thus we make sure it gets unconfigured properly in any
+ # case (otherwise capturing could still be active, for example)
+ self.request.addfinalizer(config._ensure_unconfigure)
+ return config
+
+ def parseconfigure(self, *args):
+ """Return a new pytest configured Config instance.
+
+ This returns a new :py:class:`_pytest.config.Config` instance
+ like :py:meth:`parseconfig`, but also calls the
+ pytest_configure hook.
+
+ """
+ config = self.parseconfig(*args)
+ config._do_configure()
+ self.request.addfinalizer(config._ensure_unconfigure)
+ return config
+
+ def getitem(self, source, funcname="test_func"):
+ """Return the test item for a test function.
+
+ This writes the source to a python file and runs pytest's
+ collection on the resulting module, returning the test item
+ for the requested function name.
+
+ :param source: The module source.
+
+ :param funcname: The name of the test function for which the
+ Item must be returned.
+
+ """
+ items = self.getitems(source)
+ for item in items:
+ if item.name == funcname:
+ return item
+ assert 0, "%r item not found in module:\n%s\nitems: %s" %(
+ funcname, source, items)
+
+ def getitems(self, source):
+ """Return all test items collected from the module.
+
+ This writes the source to a python file and runs pytest's
+ collection on the resulting module, returning all test items
+ contained within.
+
+ """
+ modcol = self.getmodulecol(source)
+ return self.genitems([modcol])
+
+ def getmodulecol(self, source, configargs=(), withinit=False):
+ """Return the module collection node for ``source``.
+
+ This writes ``source`` to a file using :py:meth:`makepyfile`
+ and then runs the pytest collection on it, returning the
+ collection node for the test module.
+
+ :param source: The source code of the module to collect.
+
+ :param configargs: Any extra arguments to pass to
+ :py:meth:`parseconfigure`.
+
+ :param withinit: Whether to also write a ``__init__.py`` file
+ to the temporarly directory to ensure it is a package.
+
+ """
+ kw = {self.request.function.__name__: Source(source).strip()}
+ path = self.makepyfile(**kw)
+ if withinit:
+ self.makepyfile(__init__ = "#")
+ self.config = config = self.parseconfigure(path, *configargs)
+ node = self.getnode(config, path)
+ return node
+
+ def collect_by_name(self, modcol, name):
+ """Return the collection node for name from the module collection.
+
+ This will search a module collection node for a collection
+ node matching the given name.
+
+ :param modcol: A module collection node, see
+ :py:meth:`getmodulecol`.
+
+ :param name: The name of the node to return.
+
+ """
+ for colitem in modcol._memocollect():
+ if colitem.name == name:
+ return colitem
+
+ def popen(self, cmdargs, stdout, stderr, **kw):
+ """Invoke subprocess.Popen.
+
+ This calls subprocess.Popen making sure the current working
+ directory is the PYTHONPATH.
+
+ You probably want to use :py:meth:`run` instead.
+
+ """
+ env = os.environ.copy()
+ env['PYTHONPATH'] = os.pathsep.join(filter(None, [
+ str(os.getcwd()), env.get('PYTHONPATH', '')]))
+ kw['env'] = env
+ return subprocess.Popen(cmdargs,
+ stdout=stdout, stderr=stderr, **kw)
+
+ def run(self, *cmdargs):
+ """Run a command with arguments.
+
+ Run a process using subprocess.Popen saving the stdout and
+ stderr.
+
+ Returns a :py:class:`RunResult`.
+
+ """
+ return self._run(*cmdargs)
+
+ def _run(self, *cmdargs):
+ cmdargs = [str(x) for x in cmdargs]
+ p1 = self.tmpdir.join("stdout")
+ p2 = self.tmpdir.join("stderr")
+ print_("running:", ' '.join(cmdargs))
+ print_(" in:", str(py.path.local()))
+ f1 = codecs.open(str(p1), "w", encoding="utf8")
+ f2 = codecs.open(str(p2), "w", encoding="utf8")
+ try:
+ now = time.time()
+ popen = self.popen(cmdargs, stdout=f1, stderr=f2,
+ close_fds=(sys.platform != "win32"))
+ ret = popen.wait()
+ finally:
+ f1.close()
+ f2.close()
+ f1 = codecs.open(str(p1), "r", encoding="utf8")
+ f2 = codecs.open(str(p2), "r", encoding="utf8")
+ try:
+ out = f1.read().splitlines()
+ err = f2.read().splitlines()
+ finally:
+ f1.close()
+ f2.close()
+ self._dump_lines(out, sys.stdout)
+ self._dump_lines(err, sys.stderr)
+ return RunResult(ret, out, err, time.time()-now)
+
+ def _dump_lines(self, lines, fp):
+ try:
+ for line in lines:
+ py.builtin.print_(line, file=fp)
+ except UnicodeEncodeError:
+ print("couldn't print to %s because of encoding" % (fp,))
+
+ def _getpytestargs(self):
+ # we cannot use "(sys.executable,script)"
+ # because on windows the script is e.g. a pytest.exe
+ return (sys.executable, _pytest_fullpath,) # noqa
+
+ def runpython(self, script):
+ """Run a python script using sys.executable as interpreter.
+
+ Returns a :py:class:`RunResult`.
+ """
+ return self.run(sys.executable, script)
+
+ def runpython_c(self, command):
+ """Run python -c "command", return a :py:class:`RunResult`."""
+ return self.run(sys.executable, "-c", command)
+
+ def runpytest_subprocess(self, *args, **kwargs):
+ """Run pytest as a subprocess with given arguments.
+
+ Any plugins added to the :py:attr:`plugins` list will added
+ using the ``-p`` command line option. Addtionally
+ ``--basetemp`` is used put any temporary files and directories
+ in a numbered directory prefixed with "runpytest-" so they do
+ not conflict with the normal numberd pytest location for
+ temporary files and directories.
+
+ Returns a :py:class:`RunResult`.
+
+ """
+ p = py.path.local.make_numbered_dir(prefix="runpytest-",
+ keep=None, rootdir=self.tmpdir)
+ args = ('--basetemp=%s' % p, ) + args
+ #for x in args:
+ # if '--confcutdir' in str(x):
+ # break
+ #else:
+ # pass
+ # args = ('--confcutdir=.',) + args
+ plugins = [x for x in self.plugins if isinstance(x, str)]
+ if plugins:
+ args = ('-p', plugins[0]) + args
+ args = self._getpytestargs() + args
+ return self.run(*args)
+
+ def spawn_pytest(self, string, expect_timeout=10.0):
+ """Run pytest using pexpect.
+
+ This makes sure to use the right pytest and sets up the
+ temporary directory locations.
+
+ The pexpect child is returned.
+
+ """
+ basetemp = self.tmpdir.mkdir("pexpect")
+ invoke = " ".join(map(str, self._getpytestargs()))
+ cmd = "%s --basetemp=%s %s" % (invoke, basetemp, string)
+ return self.spawn(cmd, expect_timeout=expect_timeout)
+
+ def spawn(self, cmd, expect_timeout=10.0):
+ """Run a command using pexpect.
+
+ The pexpect child is returned.
+ """
+ pexpect = pytest.importorskip("pexpect", "3.0")
+ if hasattr(sys, 'pypy_version_info') and '64' in platform.machine():
+ pytest.skip("pypy-64 bit not supported")
+ if sys.platform.startswith("freebsd"):
+ pytest.xfail("pexpect does not work reliably on freebsd")
+ logfile = self.tmpdir.join("spawn.out").open("wb")
+ child = pexpect.spawn(cmd, logfile=logfile)
+ self.request.addfinalizer(logfile.close)
+ child.timeout = expect_timeout
+ return child
+
+def getdecoded(out):
+ try:
+ return out.decode("utf-8")
+ except UnicodeDecodeError:
+ return "INTERNAL not-utf8-decodeable, truncated string:\n%s" % (
+ py.io.saferepr(out),)
+
+
+class LineComp:
+ def __init__(self):
+ self.stringio = py.io.TextIO()
+
+ def assert_contains_lines(self, lines2):
+ """ assert that lines2 are contained (linearly) in lines1.
+ return a list of extralines found.
+ """
+ __tracebackhide__ = True
+ val = self.stringio.getvalue()
+ self.stringio.truncate(0)
+ self.stringio.seek(0)
+ lines1 = val.split("\n")
+ return LineMatcher(lines1).fnmatch_lines(lines2)
+
+
+class LineMatcher:
+ """Flexible matching of text.
+
+ This is a convenience class to test large texts like the output of
+ commands.
+
+ The constructor takes a list of lines without their trailing
+ newlines, i.e. ``text.splitlines()``.
+
+ """
+
+ def __init__(self, lines):
+ self.lines = lines
+ self._log_output = []
+
+ def str(self):
+ """Return the entire original text."""
+ return "\n".join(self.lines)
+
+ def _getlines(self, lines2):
+ if isinstance(lines2, str):
+ lines2 = Source(lines2)
+ if isinstance(lines2, Source):
+ lines2 = lines2.strip().lines
+ return lines2
+
+ def fnmatch_lines_random(self, lines2):
+ """Check lines exist in the output.
+
+ The argument is a list of lines which have to occur in the
+ output, in any order. Each line can contain glob whildcards.
+
+ """
+ lines2 = self._getlines(lines2)
+ for line in lines2:
+ for x in self.lines:
+ if line == x or fnmatch(x, line):
+ self._log("matched: ", repr(line))
+ break
+ else:
+ self._log("line %r not found in output" % line)
+ raise ValueError(self._log_text)
+
+ def get_lines_after(self, fnline):
+ """Return all lines following the given line in the text.
+
+ The given line can contain glob wildcards.
+ """
+ for i, line in enumerate(self.lines):
+ if fnline == line or fnmatch(line, fnline):
+ return self.lines[i+1:]
+ raise ValueError("line %r not found in output" % fnline)
+
+ def _log(self, *args):
+ self._log_output.append(' '.join((str(x) for x in args)))
+
+ @property
+ def _log_text(self):
+ return '\n'.join(self._log_output)
+
+ def fnmatch_lines(self, lines2):
+ """Search the text for matching lines.
+
+ The argument is a list of lines which have to match and can
+ use glob wildcards. If they do not match an pytest.fail() is
+ called. The matches and non-matches are also printed on
+ stdout.
+
+ """
+ lines2 = self._getlines(lines2)
+ lines1 = self.lines[:]
+ nextline = None
+ extralines = []
+ __tracebackhide__ = True
+ for line in lines2:
+ nomatchprinted = False
+ while lines1:
+ nextline = lines1.pop(0)
+ if line == nextline:
+ self._log("exact match:", repr(line))
+ break
+ elif fnmatch(nextline, line):
+ self._log("fnmatch:", repr(line))
+ self._log(" with:", repr(nextline))
+ break
+ else:
+ if not nomatchprinted:
+ self._log("nomatch:", repr(line))
+ nomatchprinted = True
+ self._log(" and:", repr(nextline))
+ extralines.append(nextline)
+ else:
+ self._log("remains unmatched: %r" % (line,))
+ pytest.fail(self._log_text)
diff --git a/lib/spack/external/_pytest/python.py b/lib/spack/external/_pytest/python.py
new file mode 100644
index 0000000000..53815da2f0
--- /dev/null
+++ b/lib/spack/external/_pytest/python.py
@@ -0,0 +1,1578 @@
+""" Python test discovery, setup and run of test functions. """
+
+import fnmatch
+import inspect
+import sys
+import collections
+import math
+from itertools import count
+
+import py
+import pytest
+from _pytest.mark import MarkerError
+
+
+import _pytest
+import _pytest._pluggy as pluggy
+from _pytest import fixtures
+from _pytest.compat import (
+ isclass, isfunction, is_generator, _escape_strings,
+ REGEX_TYPE, STRING_TYPES, NoneType, NOTSET,
+ get_real_func, getfslineno, safe_getattr,
+ getlocation, enum,
+)
+
+cutdir1 = py.path.local(pluggy.__file__.rstrip("oc"))
+cutdir2 = py.path.local(_pytest.__file__).dirpath()
+cutdir3 = py.path.local(py.__file__).dirpath()
+
+
+def filter_traceback(entry):
+ """Return True if a TracebackEntry instance should be removed from tracebacks:
+ * dynamically generated code (no code to show up for it);
+ * internal traceback from pytest or its internal libraries, py and pluggy.
+ """
+ # entry.path might sometimes return a str object when the entry
+ # points to dynamically generated code
+ # see https://bitbucket.org/pytest-dev/py/issues/71
+ raw_filename = entry.frame.code.raw.co_filename
+ is_generated = '<' in raw_filename and '>' in raw_filename
+ if is_generated:
+ return False
+ # entry.path might point to an inexisting file, in which case it will
+ # alsso return a str object. see #1133
+ p = py.path.local(entry.path)
+ return p != cutdir1 and not p.relto(cutdir2) and not p.relto(cutdir3)
+
+
+
+def pyobj_property(name):
+ def get(self):
+ node = self.getparent(getattr(pytest, name))
+ if node is not None:
+ return node.obj
+ doc = "python %s object this node was collected from (can be None)." % (
+ name.lower(),)
+ return property(get, None, None, doc)
+
+
+def pytest_addoption(parser):
+ group = parser.getgroup("general")
+ group.addoption('--fixtures', '--funcargs',
+ action="store_true", dest="showfixtures", default=False,
+ help="show available fixtures, sorted by plugin appearance")
+ group.addoption(
+ '--fixtures-per-test',
+ action="store_true",
+ dest="show_fixtures_per_test",
+ default=False,
+ help="show fixtures per test",
+ )
+ parser.addini("usefixtures", type="args", default=[],
+ help="list of default fixtures to be used with this project")
+ parser.addini("python_files", type="args",
+ default=['test_*.py', '*_test.py'],
+ help="glob-style file patterns for Python test module discovery")
+ parser.addini("python_classes", type="args", default=["Test",],
+ help="prefixes or glob names for Python test class discovery")
+ parser.addini("python_functions", type="args", default=["test",],
+ help="prefixes or glob names for Python test function and "
+ "method discovery")
+
+ group.addoption("--import-mode", default="prepend",
+ choices=["prepend", "append"], dest="importmode",
+ help="prepend/append to sys.path when importing test modules, "
+ "default is to prepend.")
+
+
+def pytest_cmdline_main(config):
+ if config.option.showfixtures:
+ showfixtures(config)
+ return 0
+ if config.option.show_fixtures_per_test:
+ show_fixtures_per_test(config)
+ return 0
+
+
+def pytest_generate_tests(metafunc):
+ # those alternative spellings are common - raise a specific error to alert
+ # the user
+ alt_spellings = ['parameterize', 'parametrise', 'parameterise']
+ for attr in alt_spellings:
+ if hasattr(metafunc.function, attr):
+ msg = "{0} has '{1}', spelling should be 'parametrize'"
+ raise MarkerError(msg.format(metafunc.function.__name__, attr))
+ try:
+ markers = metafunc.function.parametrize
+ except AttributeError:
+ return
+ for marker in markers:
+ metafunc.parametrize(*marker.args, **marker.kwargs)
+
+def pytest_configure(config):
+ config.addinivalue_line("markers",
+ "parametrize(argnames, argvalues): call a test function multiple "
+ "times passing in different arguments in turn. argvalues generally "
+ "needs to be a list of values if argnames specifies only one name "
+ "or a list of tuples of values if argnames specifies multiple names. "
+ "Example: @parametrize('arg1', [1,2]) would lead to two calls of the "
+ "decorated test function, one with arg1=1 and another with arg1=2."
+ "see http://pytest.org/latest/parametrize.html for more info and "
+ "examples."
+ )
+ config.addinivalue_line("markers",
+ "usefixtures(fixturename1, fixturename2, ...): mark tests as needing "
+ "all of the specified fixtures. see http://pytest.org/latest/fixture.html#usefixtures "
+ )
+
+@pytest.hookimpl(trylast=True)
+def pytest_namespace():
+ raises.Exception = pytest.fail.Exception
+ return {
+ 'raises': raises,
+ 'approx': approx,
+ 'collect': {
+ 'Module': Module,
+ 'Class': Class,
+ 'Instance': Instance,
+ 'Function': Function,
+ 'Generator': Generator,
+ }
+ }
+
+
+@pytest.hookimpl(trylast=True)
+def pytest_pyfunc_call(pyfuncitem):
+ testfunction = pyfuncitem.obj
+ if pyfuncitem._isyieldedfunction():
+ testfunction(*pyfuncitem._args)
+ else:
+ funcargs = pyfuncitem.funcargs
+ testargs = {}
+ for arg in pyfuncitem._fixtureinfo.argnames:
+ testargs[arg] = funcargs[arg]
+ testfunction(**testargs)
+ return True
+
+def pytest_collect_file(path, parent):
+ ext = path.ext
+ if ext == ".py":
+ if not parent.session.isinitpath(path):
+ for pat in parent.config.getini('python_files'):
+ if path.fnmatch(pat):
+ break
+ else:
+ return
+ ihook = parent.session.gethookproxy(path)
+ return ihook.pytest_pycollect_makemodule(path=path, parent=parent)
+
+def pytest_pycollect_makemodule(path, parent):
+ return Module(path, parent)
+
+@pytest.hookimpl(hookwrapper=True)
+def pytest_pycollect_makeitem(collector, name, obj):
+ outcome = yield
+ res = outcome.get_result()
+ if res is not None:
+ raise StopIteration
+ # nothing was collected elsewhere, let's do it here
+ if isclass(obj):
+ if collector.istestclass(obj, name):
+ Class = collector._getcustomclass("Class")
+ outcome.force_result(Class(name, parent=collector))
+ elif collector.istestfunction(obj, name):
+ # mock seems to store unbound methods (issue473), normalize it
+ obj = getattr(obj, "__func__", obj)
+ # We need to try and unwrap the function if it's a functools.partial
+ # or a funtools.wrapped.
+ # We musn't if it's been wrapped with mock.patch (python 2 only)
+ if not (isfunction(obj) or isfunction(get_real_func(obj))):
+ collector.warn(code="C2", message=
+ "cannot collect %r because it is not a function."
+ % name, )
+ elif getattr(obj, "__test__", True):
+ if is_generator(obj):
+ res = Generator(name, parent=collector)
+ else:
+ res = list(collector._genfunctions(name, obj))
+ outcome.force_result(res)
+
+def pytest_make_parametrize_id(config, val):
+ return None
+
+
+
+class PyobjContext(object):
+ module = pyobj_property("Module")
+ cls = pyobj_property("Class")
+ instance = pyobj_property("Instance")
+
+class PyobjMixin(PyobjContext):
+ def obj():
+ def fget(self):
+ obj = getattr(self, '_obj', None)
+ if obj is None:
+ self._obj = obj = self._getobj()
+ return obj
+
+ def fset(self, value):
+ self._obj = value
+
+ return property(fget, fset, None, "underlying python object")
+
+ obj = obj()
+
+ def _getobj(self):
+ return getattr(self.parent.obj, self.name)
+
+ def getmodpath(self, stopatmodule=True, includemodule=False):
+ """ return python path relative to the containing module. """
+ chain = self.listchain()
+ chain.reverse()
+ parts = []
+ for node in chain:
+ if isinstance(node, Instance):
+ continue
+ name = node.name
+ if isinstance(node, Module):
+ assert name.endswith(".py")
+ name = name[:-3]
+ if stopatmodule:
+ if includemodule:
+ parts.append(name)
+ break
+ parts.append(name)
+ parts.reverse()
+ s = ".".join(parts)
+ return s.replace(".[", "[")
+
+ def _getfslineno(self):
+ return getfslineno(self.obj)
+
+ def reportinfo(self):
+ # XXX caching?
+ obj = self.obj
+ compat_co_firstlineno = getattr(obj, 'compat_co_firstlineno', None)
+ if isinstance(compat_co_firstlineno, int):
+ # nose compatibility
+ fspath = sys.modules[obj.__module__].__file__
+ if fspath.endswith(".pyc"):
+ fspath = fspath[:-1]
+ lineno = compat_co_firstlineno
+ else:
+ fspath, lineno = getfslineno(obj)
+ modpath = self.getmodpath()
+ assert isinstance(lineno, int)
+ return fspath, lineno, modpath
+
+class PyCollector(PyobjMixin, pytest.Collector):
+
+ def funcnamefilter(self, name):
+ return self._matches_prefix_or_glob_option('python_functions', name)
+
+ def isnosetest(self, obj):
+ """ Look for the __test__ attribute, which is applied by the
+ @nose.tools.istest decorator
+ """
+ # We explicitly check for "is True" here to not mistakenly treat
+ # classes with a custom __getattr__ returning something truthy (like a
+ # function) as test classes.
+ return safe_getattr(obj, '__test__', False) is True
+
+ def classnamefilter(self, name):
+ return self._matches_prefix_or_glob_option('python_classes', name)
+
+ def istestfunction(self, obj, name):
+ return (
+ (self.funcnamefilter(name) or self.isnosetest(obj)) and
+ safe_getattr(obj, "__call__", False) and fixtures.getfixturemarker(obj) is None
+ )
+
+ def istestclass(self, obj, name):
+ return self.classnamefilter(name) or self.isnosetest(obj)
+
+ def _matches_prefix_or_glob_option(self, option_name, name):
+ """
+ checks if the given name matches the prefix or glob-pattern defined
+ in ini configuration.
+ """
+ for option in self.config.getini(option_name):
+ if name.startswith(option):
+ return True
+ # check that name looks like a glob-string before calling fnmatch
+ # because this is called for every name in each collected module,
+ # and fnmatch is somewhat expensive to call
+ elif ('*' in option or '?' in option or '[' in option) and \
+ fnmatch.fnmatch(name, option):
+ return True
+ return False
+
+ def collect(self):
+ if not getattr(self.obj, "__test__", True):
+ return []
+
+ # NB. we avoid random getattrs and peek in the __dict__ instead
+ # (XXX originally introduced from a PyPy need, still true?)
+ dicts = [getattr(self.obj, '__dict__', {})]
+ for basecls in inspect.getmro(self.obj.__class__):
+ dicts.append(basecls.__dict__)
+ seen = {}
+ l = []
+ for dic in dicts:
+ for name, obj in list(dic.items()):
+ if name in seen:
+ continue
+ seen[name] = True
+ res = self.makeitem(name, obj)
+ if res is None:
+ continue
+ if not isinstance(res, list):
+ res = [res]
+ l.extend(res)
+ l.sort(key=lambda item: item.reportinfo()[:2])
+ return l
+
+ def makeitem(self, name, obj):
+ #assert self.ihook.fspath == self.fspath, self
+ return self.ihook.pytest_pycollect_makeitem(
+ collector=self, name=name, obj=obj)
+
+ def _genfunctions(self, name, funcobj):
+ module = self.getparent(Module).obj
+ clscol = self.getparent(Class)
+ cls = clscol and clscol.obj or None
+ transfer_markers(funcobj, cls, module)
+ fm = self.session._fixturemanager
+ fixtureinfo = fm.getfixtureinfo(self, funcobj, cls)
+ metafunc = Metafunc(funcobj, fixtureinfo, self.config,
+ cls=cls, module=module)
+ methods = []
+ if hasattr(module, "pytest_generate_tests"):
+ methods.append(module.pytest_generate_tests)
+ if hasattr(cls, "pytest_generate_tests"):
+ methods.append(cls().pytest_generate_tests)
+ if methods:
+ self.ihook.pytest_generate_tests.call_extra(methods,
+ dict(metafunc=metafunc))
+ else:
+ self.ihook.pytest_generate_tests(metafunc=metafunc)
+
+ Function = self._getcustomclass("Function")
+ if not metafunc._calls:
+ yield Function(name, parent=self, fixtureinfo=fixtureinfo)
+ else:
+ # add funcargs() as fixturedefs to fixtureinfo.arg2fixturedefs
+ fixtures.add_funcarg_pseudo_fixture_def(self, metafunc, fm)
+
+ for callspec in metafunc._calls:
+ subname = "%s[%s]" % (name, callspec.id)
+ yield Function(name=subname, parent=self,
+ callspec=callspec, callobj=funcobj,
+ fixtureinfo=fixtureinfo,
+ keywords={callspec.id:True},
+ originalname=name,
+ )
+
+
+def _marked(func, mark):
+ """ Returns True if :func: is already marked with :mark:, False otherwise.
+ This can happen if marker is applied to class and the test file is
+ invoked more than once.
+ """
+ try:
+ func_mark = getattr(func, mark.name)
+ except AttributeError:
+ return False
+ return mark.args == func_mark.args and mark.kwargs == func_mark.kwargs
+
+
+def transfer_markers(funcobj, cls, mod):
+ # XXX this should rather be code in the mark plugin or the mark
+ # plugin should merge with the python plugin.
+ for holder in (cls, mod):
+ try:
+ pytestmark = holder.pytestmark
+ except AttributeError:
+ continue
+ if isinstance(pytestmark, list):
+ for mark in pytestmark:
+ if not _marked(funcobj, mark):
+ mark(funcobj)
+ else:
+ if not _marked(funcobj, pytestmark):
+ pytestmark(funcobj)
+
+class Module(pytest.File, PyCollector):
+ """ Collector for test classes and functions. """
+ def _getobj(self):
+ return self._memoizedcall('_obj', self._importtestmodule)
+
+ def collect(self):
+ self.session._fixturemanager.parsefactories(self)
+ return super(Module, self).collect()
+
+ def _importtestmodule(self):
+ # we assume we are only called once per module
+ importmode = self.config.getoption("--import-mode")
+ try:
+ mod = self.fspath.pyimport(ensuresyspath=importmode)
+ except SyntaxError:
+ raise self.CollectError(
+ _pytest._code.ExceptionInfo().getrepr(style="short"))
+ except self.fspath.ImportMismatchError:
+ e = sys.exc_info()[1]
+ raise self.CollectError(
+ "import file mismatch:\n"
+ "imported module %r has this __file__ attribute:\n"
+ " %s\n"
+ "which is not the same as the test file we want to collect:\n"
+ " %s\n"
+ "HINT: remove __pycache__ / .pyc files and/or use a "
+ "unique basename for your test file modules"
+ % e.args
+ )
+ except ImportError:
+ from _pytest._code.code import ExceptionInfo
+ exc_info = ExceptionInfo()
+ if self.config.getoption('verbose') < 2:
+ exc_info.traceback = exc_info.traceback.filter(filter_traceback)
+ exc_repr = exc_info.getrepr(style='short') if exc_info.traceback else exc_info.exconly()
+ formatted_tb = py._builtin._totext(exc_repr)
+ raise self.CollectError(
+ "ImportError while importing test module '{fspath}'.\n"
+ "Hint: make sure your test modules/packages have valid Python names.\n"
+ "Traceback:\n"
+ "{traceback}".format(fspath=self.fspath, traceback=formatted_tb)
+ )
+ except _pytest.runner.Skipped as e:
+ if e.allow_module_level:
+ raise
+ raise self.CollectError(
+ "Using pytest.skip outside of a test is not allowed. If you are "
+ "trying to decorate a test function, use the @pytest.mark.skip "
+ "or @pytest.mark.skipif decorators instead."
+ )
+ self.config.pluginmanager.consider_module(mod)
+ return mod
+
+ def setup(self):
+ setup_module = _get_xunit_setup_teardown(self.obj, "setUpModule")
+ if setup_module is None:
+ setup_module = _get_xunit_setup_teardown(self.obj, "setup_module")
+ if setup_module is not None:
+ setup_module()
+
+ teardown_module = _get_xunit_setup_teardown(self.obj, 'tearDownModule')
+ if teardown_module is None:
+ teardown_module = _get_xunit_setup_teardown(self.obj, 'teardown_module')
+ if teardown_module is not None:
+ self.addfinalizer(teardown_module)
+
+
+def _get_xunit_setup_teardown(holder, attr_name, param_obj=None):
+ """
+ Return a callable to perform xunit-style setup or teardown if
+ the function exists in the ``holder`` object.
+ The ``param_obj`` parameter is the parameter which will be passed to the function
+ when the callable is called without arguments, defaults to the ``holder`` object.
+ Return ``None`` if a suitable callable is not found.
+ """
+ param_obj = param_obj if param_obj is not None else holder
+ result = _get_xunit_func(holder, attr_name)
+ if result is not None:
+ arg_count = result.__code__.co_argcount
+ if inspect.ismethod(result):
+ arg_count -= 1
+ if arg_count:
+ return lambda: result(param_obj)
+ else:
+ return result
+
+
+def _get_xunit_func(obj, name):
+ """Return the attribute from the given object to be used as a setup/teardown
+ xunit-style function, but only if not marked as a fixture to
+ avoid calling it twice.
+ """
+ meth = getattr(obj, name, None)
+ if fixtures.getfixturemarker(meth) is None:
+ return meth
+
+
+class Class(PyCollector):
+ """ Collector for test methods. """
+ def collect(self):
+ if hasinit(self.obj):
+ self.warn("C1", "cannot collect test class %r because it has a "
+ "__init__ constructor" % self.obj.__name__)
+ return []
+ elif hasnew(self.obj):
+ self.warn("C1", "cannot collect test class %r because it has a "
+ "__new__ constructor" % self.obj.__name__)
+ return []
+ return [self._getcustomclass("Instance")(name="()", parent=self)]
+
+ def setup(self):
+ setup_class = _get_xunit_func(self.obj, 'setup_class')
+ if setup_class is not None:
+ setup_class = getattr(setup_class, 'im_func', setup_class)
+ setup_class = getattr(setup_class, '__func__', setup_class)
+ setup_class(self.obj)
+
+ fin_class = getattr(self.obj, 'teardown_class', None)
+ if fin_class is not None:
+ fin_class = getattr(fin_class, 'im_func', fin_class)
+ fin_class = getattr(fin_class, '__func__', fin_class)
+ self.addfinalizer(lambda: fin_class(self.obj))
+
+class Instance(PyCollector):
+ def _getobj(self):
+ return self.parent.obj()
+
+ def collect(self):
+ self.session._fixturemanager.parsefactories(self)
+ return super(Instance, self).collect()
+
+ def newinstance(self):
+ self.obj = self._getobj()
+ return self.obj
+
+class FunctionMixin(PyobjMixin):
+ """ mixin for the code common to Function and Generator.
+ """
+
+ def setup(self):
+ """ perform setup for this test function. """
+ if hasattr(self, '_preservedparent'):
+ obj = self._preservedparent
+ elif isinstance(self.parent, Instance):
+ obj = self.parent.newinstance()
+ self.obj = self._getobj()
+ else:
+ obj = self.parent.obj
+ if inspect.ismethod(self.obj):
+ setup_name = 'setup_method'
+ teardown_name = 'teardown_method'
+ else:
+ setup_name = 'setup_function'
+ teardown_name = 'teardown_function'
+ setup_func_or_method = _get_xunit_setup_teardown(obj, setup_name, param_obj=self.obj)
+ if setup_func_or_method is not None:
+ setup_func_or_method()
+ teardown_func_or_method = _get_xunit_setup_teardown(obj, teardown_name, param_obj=self.obj)
+ if teardown_func_or_method is not None:
+ self.addfinalizer(teardown_func_or_method)
+
+ def _prunetraceback(self, excinfo):
+ if hasattr(self, '_obj') and not self.config.option.fulltrace:
+ code = _pytest._code.Code(get_real_func(self.obj))
+ path, firstlineno = code.path, code.firstlineno
+ traceback = excinfo.traceback
+ ntraceback = traceback.cut(path=path, firstlineno=firstlineno)
+ if ntraceback == traceback:
+ ntraceback = ntraceback.cut(path=path)
+ if ntraceback == traceback:
+ #ntraceback = ntraceback.cut(excludepath=cutdir2)
+ ntraceback = ntraceback.filter(filter_traceback)
+ if not ntraceback:
+ ntraceback = traceback
+
+ excinfo.traceback = ntraceback.filter()
+ # issue364: mark all but first and last frames to
+ # only show a single-line message for each frame
+ if self.config.option.tbstyle == "auto":
+ if len(excinfo.traceback) > 2:
+ for entry in excinfo.traceback[1:-1]:
+ entry.set_repr_style('short')
+
+ def _repr_failure_py(self, excinfo, style="long"):
+ if excinfo.errisinstance(pytest.fail.Exception):
+ if not excinfo.value.pytrace:
+ return py._builtin._totext(excinfo.value)
+ return super(FunctionMixin, self)._repr_failure_py(excinfo,
+ style=style)
+
+ def repr_failure(self, excinfo, outerr=None):
+ assert outerr is None, "XXX outerr usage is deprecated"
+ style = self.config.option.tbstyle
+ if style == "auto":
+ style = "long"
+ return self._repr_failure_py(excinfo, style=style)
+
+
+class Generator(FunctionMixin, PyCollector):
+ def collect(self):
+ # test generators are seen as collectors but they also
+ # invoke setup/teardown on popular request
+ # (induced by the common "test_*" naming shared with normal tests)
+ from _pytest import deprecated
+ self.session._setupstate.prepare(self)
+ # see FunctionMixin.setup and test_setupstate_is_preserved_134
+ self._preservedparent = self.parent.obj
+ l = []
+ seen = {}
+ for i, x in enumerate(self.obj()):
+ name, call, args = self.getcallargs(x)
+ if not callable(call):
+ raise TypeError("%r yielded non callable test %r" %(self.obj, call,))
+ if name is None:
+ name = "[%d]" % i
+ else:
+ name = "['%s']" % name
+ if name in seen:
+ raise ValueError("%r generated tests with non-unique name %r" %(self, name))
+ seen[name] = True
+ l.append(self.Function(name, self, args=args, callobj=call))
+ self.config.warn('C1', deprecated.YIELD_TESTS, fslocation=self.fspath)
+ return l
+
+ def getcallargs(self, obj):
+ if not isinstance(obj, (tuple, list)):
+ obj = (obj,)
+ # explict naming
+ if isinstance(obj[0], py.builtin._basestring):
+ name = obj[0]
+ obj = obj[1:]
+ else:
+ name = None
+ call, args = obj[0], obj[1:]
+ return name, call, args
+
+
+def hasinit(obj):
+ init = getattr(obj, '__init__', None)
+ if init:
+ return init != object.__init__
+
+
+def hasnew(obj):
+ new = getattr(obj, '__new__', None)
+ if new:
+ return new != object.__new__
+
+
+class CallSpec2(object):
+ def __init__(self, metafunc):
+ self.metafunc = metafunc
+ self.funcargs = {}
+ self._idlist = []
+ self.params = {}
+ self._globalid = NOTSET
+ self._globalid_args = set()
+ self._globalparam = NOTSET
+ self._arg2scopenum = {} # used for sorting parametrized resources
+ self.keywords = {}
+ self.indices = {}
+
+ def copy(self, metafunc):
+ cs = CallSpec2(self.metafunc)
+ cs.funcargs.update(self.funcargs)
+ cs.params.update(self.params)
+ cs.keywords.update(self.keywords)
+ cs.indices.update(self.indices)
+ cs._arg2scopenum.update(self._arg2scopenum)
+ cs._idlist = list(self._idlist)
+ cs._globalid = self._globalid
+ cs._globalid_args = self._globalid_args
+ cs._globalparam = self._globalparam
+ return cs
+
+ def _checkargnotcontained(self, arg):
+ if arg in self.params or arg in self.funcargs:
+ raise ValueError("duplicate %r" %(arg,))
+
+ def getparam(self, name):
+ try:
+ return self.params[name]
+ except KeyError:
+ if self._globalparam is NOTSET:
+ raise ValueError(name)
+ return self._globalparam
+
+ @property
+ def id(self):
+ return "-".join(map(str, filter(None, self._idlist)))
+
+ def setmulti(self, valtypes, argnames, valset, id, keywords, scopenum,
+ param_index):
+ for arg,val in zip(argnames, valset):
+ self._checkargnotcontained(arg)
+ valtype_for_arg = valtypes[arg]
+ getattr(self, valtype_for_arg)[arg] = val
+ self.indices[arg] = param_index
+ self._arg2scopenum[arg] = scopenum
+ self._idlist.append(id)
+ self.keywords.update(keywords)
+
+ def setall(self, funcargs, id, param):
+ for x in funcargs:
+ self._checkargnotcontained(x)
+ self.funcargs.update(funcargs)
+ if id is not NOTSET:
+ self._idlist.append(id)
+ if param is not NOTSET:
+ assert self._globalparam is NOTSET
+ self._globalparam = param
+ for arg in funcargs:
+ self._arg2scopenum[arg] = fixtures.scopenum_function
+
+
+class Metafunc(fixtures.FuncargnamesCompatAttr):
+ """
+ Metafunc objects are passed to the ``pytest_generate_tests`` hook.
+ They help to inspect a test function and to generate tests according to
+ test configuration or values specified in the class or module where a
+ test function is defined.
+ """
+ def __init__(self, function, fixtureinfo, config, cls=None, module=None):
+ #: access to the :class:`_pytest.config.Config` object for the test session
+ self.config = config
+
+ #: the module object where the test function is defined in.
+ self.module = module
+
+ #: underlying python test function
+ self.function = function
+
+ #: set of fixture names required by the test function
+ self.fixturenames = fixtureinfo.names_closure
+
+ #: class object where the test function is defined in or ``None``.
+ self.cls = cls
+
+ self._calls = []
+ self._ids = py.builtin.set()
+ self._arg2fixturedefs = fixtureinfo.name2fixturedefs
+
+ def parametrize(self, argnames, argvalues, indirect=False, ids=None,
+ scope=None):
+ """ Add new invocations to the underlying test function using the list
+ of argvalues for the given argnames. Parametrization is performed
+ during the collection phase. If you need to setup expensive resources
+ see about setting indirect to do it rather at test setup time.
+
+ :arg argnames: a comma-separated string denoting one or more argument
+ names, or a list/tuple of argument strings.
+
+ :arg argvalues: The list of argvalues determines how often a
+ test is invoked with different argument values. If only one
+ argname was specified argvalues is a list of values. If N
+ argnames were specified, argvalues must be a list of N-tuples,
+ where each tuple-element specifies a value for its respective
+ argname.
+
+ :arg indirect: The list of argnames or boolean. A list of arguments'
+ names (subset of argnames). If True the list contains all names from
+ the argnames. Each argvalue corresponding to an argname in this list will
+ be passed as request.param to its respective argname fixture
+ function so that it can perform more expensive setups during the
+ setup phase of a test rather than at collection time.
+
+ :arg ids: list of string ids, or a callable.
+ If strings, each is corresponding to the argvalues so that they are
+ part of the test id. If None is given as id of specific test, the
+ automatically generated id for that argument will be used.
+ If callable, it should take one argument (a single argvalue) and return
+ a string or return None. If None, the automatically generated id for that
+ argument will be used.
+ If no ids are provided they will be generated automatically from
+ the argvalues.
+
+ :arg scope: if specified it denotes the scope of the parameters.
+ The scope is used for grouping tests by parameter instances.
+ It will also override any fixture-function defined scope, allowing
+ to set a dynamic scope using test context or configuration.
+ """
+ from _pytest.fixtures import scope2index
+ from _pytest.mark import extract_argvalue
+ from py.io import saferepr
+
+ unwrapped_argvalues = []
+ newkeywords = []
+ for maybe_marked_args in argvalues:
+ argval, newmarks = extract_argvalue(maybe_marked_args)
+ unwrapped_argvalues.append(argval)
+ newkeywords.append(newmarks)
+ argvalues = unwrapped_argvalues
+
+ if not isinstance(argnames, (tuple, list)):
+ argnames = [x.strip() for x in argnames.split(",") if x.strip()]
+ if len(argnames) == 1:
+ argvalues = [(val,) for val in argvalues]
+ if not argvalues:
+ argvalues = [(NOTSET,) * len(argnames)]
+ # we passed a empty list to parameterize, skip that test
+ #
+ fs, lineno = getfslineno(self.function)
+ newmark = pytest.mark.skip(
+ reason="got empty parameter set %r, function %s at %s:%d" % (
+ argnames, self.function.__name__, fs, lineno))
+ newkeywords = [{newmark.markname: newmark}]
+
+ if scope is None:
+ scope = _find_parametrized_scope(argnames, self._arg2fixturedefs, indirect)
+
+ scopenum = scope2index(
+ scope, descr='call to {0}'.format(self.parametrize))
+ valtypes = {}
+ for arg in argnames:
+ if arg not in self.fixturenames:
+ if isinstance(indirect, (tuple, list)):
+ name = 'fixture' if arg in indirect else 'argument'
+ else:
+ name = 'fixture' if indirect else 'argument'
+ raise ValueError(
+ "%r uses no %s %r" % (
+ self.function, name, arg))
+
+ if indirect is True:
+ valtypes = dict.fromkeys(argnames, "params")
+ elif indirect is False:
+ valtypes = dict.fromkeys(argnames, "funcargs")
+ elif isinstance(indirect, (tuple, list)):
+ valtypes = dict.fromkeys(argnames, "funcargs")
+ for arg in indirect:
+ if arg not in argnames:
+ raise ValueError("indirect given to %r: fixture %r doesn't exist" % (
+ self.function, arg))
+ valtypes[arg] = "params"
+ idfn = None
+ if callable(ids):
+ idfn = ids
+ ids = None
+ if ids:
+ if len(ids) != len(argvalues):
+ raise ValueError('%d tests specified with %d ids' %(
+ len(argvalues), len(ids)))
+ for id_value in ids:
+ if id_value is not None and not isinstance(id_value, py.builtin._basestring):
+ msg = 'ids must be list of strings, found: %s (type: %s)'
+ raise ValueError(msg % (saferepr(id_value), type(id_value).__name__))
+ ids = idmaker(argnames, argvalues, idfn, ids, self.config)
+ newcalls = []
+ for callspec in self._calls or [CallSpec2(self)]:
+ elements = zip(ids, argvalues, newkeywords, count())
+ for a_id, valset, keywords, param_index in elements:
+ assert len(valset) == len(argnames)
+ newcallspec = callspec.copy(self)
+ newcallspec.setmulti(valtypes, argnames, valset, a_id,
+ keywords, scopenum, param_index)
+ newcalls.append(newcallspec)
+ self._calls = newcalls
+
+ def addcall(self, funcargs=None, id=NOTSET, param=NOTSET):
+ """ (deprecated, use parametrize) Add a new call to the underlying
+ test function during the collection phase of a test run. Note that
+ request.addcall() is called during the test collection phase prior and
+ independently to actual test execution. You should only use addcall()
+ if you need to specify multiple arguments of a test function.
+
+ :arg funcargs: argument keyword dictionary used when invoking
+ the test function.
+
+ :arg id: used for reporting and identification purposes. If you
+ don't supply an `id` an automatic unique id will be generated.
+
+ :arg param: a parameter which will be exposed to a later fixture function
+ invocation through the ``request.param`` attribute.
+ """
+ assert funcargs is None or isinstance(funcargs, dict)
+ if funcargs is not None:
+ for name in funcargs:
+ if name not in self.fixturenames:
+ pytest.fail("funcarg %r not used in this function." % name)
+ else:
+ funcargs = {}
+ if id is None:
+ raise ValueError("id=None not allowed")
+ if id is NOTSET:
+ id = len(self._calls)
+ id = str(id)
+ if id in self._ids:
+ raise ValueError("duplicate id %r" % id)
+ self._ids.add(id)
+
+ cs = CallSpec2(self)
+ cs.setall(funcargs, id, param)
+ self._calls.append(cs)
+
+
+def _find_parametrized_scope(argnames, arg2fixturedefs, indirect):
+ """Find the most appropriate scope for a parametrized call based on its arguments.
+
+ When there's at least one direct argument, always use "function" scope.
+
+ When a test function is parametrized and all its arguments are indirect
+ (e.g. fixtures), return the most narrow scope based on the fixtures used.
+
+ Related to issue #1832, based on code posted by @Kingdread.
+ """
+ from _pytest.fixtures import scopes
+ indirect_as_list = isinstance(indirect, (list, tuple))
+ all_arguments_are_fixtures = indirect is True or \
+ indirect_as_list and len(indirect) == argnames
+ if all_arguments_are_fixtures:
+ fixturedefs = arg2fixturedefs or {}
+ used_scopes = [fixturedef[0].scope for name, fixturedef in fixturedefs.items()]
+ if used_scopes:
+ # Takes the most narrow scope from used fixtures
+ for scope in reversed(scopes):
+ if scope in used_scopes:
+ return scope
+
+ return 'function'
+
+
+def _idval(val, argname, idx, idfn, config=None):
+ if idfn:
+ try:
+ s = idfn(val)
+ if s:
+ return _escape_strings(s)
+ except Exception:
+ pass
+
+ if config:
+ hook_id = config.hook.pytest_make_parametrize_id(config=config, val=val)
+ if hook_id:
+ return hook_id
+
+ if isinstance(val, STRING_TYPES):
+ return _escape_strings(val)
+ elif isinstance(val, (float, int, bool, NoneType)):
+ return str(val)
+ elif isinstance(val, REGEX_TYPE):
+ return _escape_strings(val.pattern)
+ elif enum is not None and isinstance(val, enum.Enum):
+ return str(val)
+ elif isclass(val) and hasattr(val, '__name__'):
+ return val.__name__
+ return str(argname)+str(idx)
+
+def _idvalset(idx, valset, argnames, idfn, ids, config=None):
+ if ids is None or (idx >= len(ids) or ids[idx] is None):
+ this_id = [_idval(val, argname, idx, idfn, config)
+ for val, argname in zip(valset, argnames)]
+ return "-".join(this_id)
+ else:
+ return _escape_strings(ids[idx])
+
+def idmaker(argnames, argvalues, idfn=None, ids=None, config=None):
+ ids = [_idvalset(valindex, valset, argnames, idfn, ids, config)
+ for valindex, valset in enumerate(argvalues)]
+ if len(set(ids)) != len(ids):
+ # The ids are not unique
+ duplicates = [testid for testid in ids if ids.count(testid) > 1]
+ counters = collections.defaultdict(lambda: 0)
+ for index, testid in enumerate(ids):
+ if testid in duplicates:
+ ids[index] = testid + str(counters[testid])
+ counters[testid] += 1
+ return ids
+
+
+def show_fixtures_per_test(config):
+ from _pytest.main import wrap_session
+ return wrap_session(config, _show_fixtures_per_test)
+
+
+def _show_fixtures_per_test(config, session):
+ import _pytest.config
+ session.perform_collect()
+ curdir = py.path.local()
+ tw = _pytest.config.create_terminal_writer(config)
+ verbose = config.getvalue("verbose")
+
+ def get_best_rel(func):
+ loc = getlocation(func, curdir)
+ return curdir.bestrelpath(loc)
+
+ def write_fixture(fixture_def):
+ argname = fixture_def.argname
+
+ if verbose <= 0 and argname.startswith("_"):
+ return
+ if verbose > 0:
+ bestrel = get_best_rel(fixture_def.func)
+ funcargspec = "{0} -- {1}".format(argname, bestrel)
+ else:
+ funcargspec = argname
+ tw.line(funcargspec, green=True)
+
+ INDENT = ' {0}'
+ fixture_doc = fixture_def.func.__doc__
+
+ if fixture_doc:
+ for line in fixture_doc.strip().split('\n'):
+ tw.line(INDENT.format(line.strip()))
+ else:
+ tw.line(INDENT.format('no docstring available'), red=True)
+
+ def write_item(item):
+ name2fixturedefs = item._fixtureinfo.name2fixturedefs
+
+ if not name2fixturedefs:
+ # The given test item does not use any fixtures
+ return
+ bestrel = get_best_rel(item.function)
+
+ tw.line()
+ tw.sep('-', 'fixtures used by {0}'.format(item.name))
+ tw.sep('-', '({0})'.format(bestrel))
+ for argname, fixture_defs in sorted(name2fixturedefs.items()):
+ assert fixture_defs is not None
+ if not fixture_defs:
+ continue
+ # The last fixture def item in the list is expected
+ # to be the one used by the test item
+ write_fixture(fixture_defs[-1])
+
+ for item in session.items:
+ write_item(item)
+
+
+def showfixtures(config):
+ from _pytest.main import wrap_session
+ return wrap_session(config, _showfixtures_main)
+
+def _showfixtures_main(config, session):
+ import _pytest.config
+ session.perform_collect()
+ curdir = py.path.local()
+ tw = _pytest.config.create_terminal_writer(config)
+ verbose = config.getvalue("verbose")
+
+ fm = session._fixturemanager
+
+ available = []
+ seen = set()
+
+ for argname, fixturedefs in fm._arg2fixturedefs.items():
+ assert fixturedefs is not None
+ if not fixturedefs:
+ continue
+ for fixturedef in fixturedefs:
+ loc = getlocation(fixturedef.func, curdir)
+ if (fixturedef.argname, loc) in seen:
+ continue
+ seen.add((fixturedef.argname, loc))
+ available.append((len(fixturedef.baseid),
+ fixturedef.func.__module__,
+ curdir.bestrelpath(loc),
+ fixturedef.argname, fixturedef))
+
+ available.sort()
+ currentmodule = None
+ for baseid, module, bestrel, argname, fixturedef in available:
+ if currentmodule != module:
+ if not module.startswith("_pytest."):
+ tw.line()
+ tw.sep("-", "fixtures defined from %s" %(module,))
+ currentmodule = module
+ if verbose <= 0 and argname[0] == "_":
+ continue
+ if verbose > 0:
+ funcargspec = "%s -- %s" %(argname, bestrel,)
+ else:
+ funcargspec = argname
+ tw.line(funcargspec, green=True)
+ loc = getlocation(fixturedef.func, curdir)
+ doc = fixturedef.func.__doc__ or ""
+ if doc:
+ for line in doc.strip().split("\n"):
+ tw.line(" " + line.strip())
+ else:
+ tw.line(" %s: no docstring available" %(loc,),
+ red=True)
+
+
+# builtin pytest.raises helper
+
+def raises(expected_exception, *args, **kwargs):
+ """
+ Assert that a code block/function call raises ``expected_exception``
+ and raise a failure exception otherwise.
+
+ This helper produces a ``ExceptionInfo()`` object (see below).
+
+ If using Python 2.5 or above, you may use this function as a
+ context manager::
+
+ >>> with raises(ZeroDivisionError):
+ ... 1/0
+
+ .. versionchanged:: 2.10
+
+ In the context manager form you may use the keyword argument
+ ``message`` to specify a custom failure message::
+
+ >>> with raises(ZeroDivisionError, message="Expecting ZeroDivisionError"):
+ ... pass
+ Traceback (most recent call last):
+ ...
+ Failed: Expecting ZeroDivisionError
+
+
+ .. note::
+
+ When using ``pytest.raises`` as a context manager, it's worthwhile to
+ note that normal context manager rules apply and that the exception
+ raised *must* be the final line in the scope of the context manager.
+ Lines of code after that, within the scope of the context manager will
+ not be executed. For example::
+
+ >>> value = 15
+ >>> with raises(ValueError) as exc_info:
+ ... if value > 10:
+ ... raise ValueError("value must be <= 10")
+ ... assert str(exc_info.value) == "value must be <= 10" # this will not execute
+
+ Instead, the following approach must be taken (note the difference in
+ scope)::
+
+ >>> with raises(ValueError) as exc_info:
+ ... if value > 10:
+ ... raise ValueError("value must be <= 10")
+ ...
+ >>> assert str(exc_info.value) == "value must be <= 10"
+
+
+ Or you can specify a callable by passing a to-be-called lambda::
+
+ >>> raises(ZeroDivisionError, lambda: 1/0)
+ <ExceptionInfo ...>
+
+ or you can specify an arbitrary callable with arguments::
+
+ >>> def f(x): return 1/x
+ ...
+ >>> raises(ZeroDivisionError, f, 0)
+ <ExceptionInfo ...>
+ >>> raises(ZeroDivisionError, f, x=0)
+ <ExceptionInfo ...>
+
+ A third possibility is to use a string to be executed::
+
+ >>> raises(ZeroDivisionError, "f(0)")
+ <ExceptionInfo ...>
+
+ .. autoclass:: _pytest._code.ExceptionInfo
+ :members:
+
+ .. note::
+ Similar to caught exception objects in Python, explicitly clearing
+ local references to returned ``ExceptionInfo`` objects can
+ help the Python interpreter speed up its garbage collection.
+
+ Clearing those references breaks a reference cycle
+ (``ExceptionInfo`` --> caught exception --> frame stack raising
+ the exception --> current frame stack --> local variables -->
+ ``ExceptionInfo``) which makes Python keep all objects referenced
+ from that cycle (including all local variables in the current
+ frame) alive until the next cyclic garbage collection run. See the
+ official Python ``try`` statement documentation for more detailed
+ information.
+
+ """
+ __tracebackhide__ = True
+ if expected_exception is AssertionError:
+ # we want to catch a AssertionError
+ # replace our subclass with the builtin one
+ # see https://github.com/pytest-dev/pytest/issues/176
+ from _pytest.assertion.util import BuiltinAssertionError \
+ as expected_exception
+ msg = ("exceptions must be old-style classes or"
+ " derived from BaseException, not %s")
+ if isinstance(expected_exception, tuple):
+ for exc in expected_exception:
+ if not isclass(exc):
+ raise TypeError(msg % type(exc))
+ elif not isclass(expected_exception):
+ raise TypeError(msg % type(expected_exception))
+
+ message = "DID NOT RAISE {0}".format(expected_exception)
+
+ if not args:
+ if "message" in kwargs:
+ message = kwargs.pop("message")
+ return RaisesContext(expected_exception, message)
+ elif isinstance(args[0], str):
+ code, = args
+ assert isinstance(code, str)
+ frame = sys._getframe(1)
+ loc = frame.f_locals.copy()
+ loc.update(kwargs)
+ #print "raises frame scope: %r" % frame.f_locals
+ try:
+ code = _pytest._code.Source(code).compile()
+ py.builtin.exec_(code, frame.f_globals, loc)
+ # XXX didn'T mean f_globals == f_locals something special?
+ # this is destroyed here ...
+ except expected_exception:
+ return _pytest._code.ExceptionInfo()
+ else:
+ func = args[0]
+ try:
+ func(*args[1:], **kwargs)
+ except expected_exception:
+ return _pytest._code.ExceptionInfo()
+ pytest.fail(message)
+
+class RaisesContext(object):
+ def __init__(self, expected_exception, message):
+ self.expected_exception = expected_exception
+ self.message = message
+ self.excinfo = None
+
+ def __enter__(self):
+ self.excinfo = object.__new__(_pytest._code.ExceptionInfo)
+ return self.excinfo
+
+ def __exit__(self, *tp):
+ __tracebackhide__ = True
+ if tp[0] is None:
+ pytest.fail(self.message)
+ if sys.version_info < (2, 7):
+ # py26: on __exit__() exc_value often does not contain the
+ # exception value.
+ # http://bugs.python.org/issue7853
+ if not isinstance(tp[1], BaseException):
+ exc_type, value, traceback = tp
+ tp = exc_type, exc_type(value), traceback
+ self.excinfo.__init__(tp)
+ suppress_exception = issubclass(self.excinfo.type, self.expected_exception)
+ if sys.version_info[0] == 2 and suppress_exception:
+ sys.exc_clear()
+ return suppress_exception
+
+
+# builtin pytest.approx helper
+
+class approx(object):
+ """
+ Assert that two numbers (or two sets of numbers) are equal to each other
+ within some tolerance.
+
+ Due to the `intricacies of floating-point arithmetic`__, numbers that we
+ would intuitively expect to be equal are not always so::
+
+ >>> 0.1 + 0.2 == 0.3
+ False
+
+ __ https://docs.python.org/3/tutorial/floatingpoint.html
+
+ This problem is commonly encountered when writing tests, e.g. when making
+ sure that floating-point values are what you expect them to be. One way to
+ deal with this problem is to assert that two floating-point numbers are
+ equal to within some appropriate tolerance::
+
+ >>> abs((0.1 + 0.2) - 0.3) < 1e-6
+ True
+
+ However, comparisons like this are tedious to write and difficult to
+ understand. Furthermore, absolute comparisons like the one above are
+ usually discouraged because there's no tolerance that works well for all
+ situations. ``1e-6`` is good for numbers around ``1``, but too small for
+ very big numbers and too big for very small ones. It's better to express
+ the tolerance as a fraction of the expected value, but relative comparisons
+ like that are even more difficult to write correctly and concisely.
+
+ The ``approx`` class performs floating-point comparisons using a syntax
+ that's as intuitive as possible::
+
+ >>> from pytest import approx
+ >>> 0.1 + 0.2 == approx(0.3)
+ True
+
+ The same syntax also works on sequences of numbers::
+
+ >>> (0.1 + 0.2, 0.2 + 0.4) == approx((0.3, 0.6))
+ True
+
+ By default, ``approx`` considers numbers within a relative tolerance of
+ ``1e-6`` (i.e. one part in a million) of its expected value to be equal.
+ This treatment would lead to surprising results if the expected value was
+ ``0.0``, because nothing but ``0.0`` itself is relatively close to ``0.0``.
+ To handle this case less surprisingly, ``approx`` also considers numbers
+ within an absolute tolerance of ``1e-12`` of its expected value to be
+ equal. Infinite numbers are another special case. They are only
+ considered equal to themselves, regardless of the relative tolerance. Both
+ the relative and absolute tolerances can be changed by passing arguments to
+ the ``approx`` constructor::
+
+ >>> 1.0001 == approx(1)
+ False
+ >>> 1.0001 == approx(1, rel=1e-3)
+ True
+ >>> 1.0001 == approx(1, abs=1e-3)
+ True
+
+ If you specify ``abs`` but not ``rel``, the comparison will not consider
+ the relative tolerance at all. In other words, two numbers that are within
+ the default relative tolerance of ``1e-6`` will still be considered unequal
+ if they exceed the specified absolute tolerance. If you specify both
+ ``abs`` and ``rel``, the numbers will be considered equal if either
+ tolerance is met::
+
+ >>> 1 + 1e-8 == approx(1)
+ True
+ >>> 1 + 1e-8 == approx(1, abs=1e-12)
+ False
+ >>> 1 + 1e-8 == approx(1, rel=1e-6, abs=1e-12)
+ True
+
+ If you're thinking about using ``approx``, then you might want to know how
+ it compares to other good ways of comparing floating-point numbers. All of
+ these algorithms are based on relative and absolute tolerances and should
+ agree for the most part, but they do have meaningful differences:
+
+ - ``math.isclose(a, b, rel_tol=1e-9, abs_tol=0.0)``: True if the relative
+ tolerance is met w.r.t. either ``a`` or ``b`` or if the absolute
+ tolerance is met. Because the relative tolerance is calculated w.r.t.
+ both ``a`` and ``b``, this test is symmetric (i.e. neither ``a`` nor
+ ``b`` is a "reference value"). You have to specify an absolute tolerance
+ if you want to compare to ``0.0`` because there is no tolerance by
+ default. Only available in python>=3.5. `More information...`__
+
+ __ https://docs.python.org/3/library/math.html#math.isclose
+
+ - ``numpy.isclose(a, b, rtol=1e-5, atol=1e-8)``: True if the difference
+ between ``a`` and ``b`` is less that the sum of the relative tolerance
+ w.r.t. ``b`` and the absolute tolerance. Because the relative tolerance
+ is only calculated w.r.t. ``b``, this test is asymmetric and you can
+ think of ``b`` as the reference value. Support for comparing sequences
+ is provided by ``numpy.allclose``. `More information...`__
+
+ __ http://docs.scipy.org/doc/numpy-1.10.0/reference/generated/numpy.isclose.html
+
+ - ``unittest.TestCase.assertAlmostEqual(a, b)``: True if ``a`` and ``b``
+ are within an absolute tolerance of ``1e-7``. No relative tolerance is
+ considered and the absolute tolerance cannot be changed, so this function
+ is not appropriate for very large or very small numbers. Also, it's only
+ available in subclasses of ``unittest.TestCase`` and it's ugly because it
+ doesn't follow PEP8. `More information...`__
+
+ __ https://docs.python.org/3/library/unittest.html#unittest.TestCase.assertAlmostEqual
+
+ - ``a == pytest.approx(b, rel=1e-6, abs=1e-12)``: True if the relative
+ tolerance is met w.r.t. ``b`` or if the absolute tolerance is met.
+ Because the relative tolerance is only calculated w.r.t. ``b``, this test
+ is asymmetric and you can think of ``b`` as the reference value. In the
+ special case that you explicitly specify an absolute tolerance but not a
+ relative tolerance, only the absolute tolerance is considered.
+ """
+
+ def __init__(self, expected, rel=None, abs=None):
+ self.expected = expected
+ self.abs = abs
+ self.rel = rel
+
+ def __repr__(self):
+ return ', '.join(repr(x) for x in self.expected)
+
+ def __eq__(self, actual):
+ from collections import Iterable
+ if not isinstance(actual, Iterable):
+ actual = [actual]
+ if len(actual) != len(self.expected):
+ return False
+ return all(a == x for a, x in zip(actual, self.expected))
+
+ __hash__ = None
+
+ def __ne__(self, actual):
+ return not (actual == self)
+
+ @property
+ def expected(self):
+ # Regardless of whether the user-specified expected value is a number
+ # or a sequence of numbers, return a list of ApproxNotIterable objects
+ # that can be compared against.
+ from collections import Iterable
+ approx_non_iter = lambda x: ApproxNonIterable(x, self.rel, self.abs)
+ if isinstance(self._expected, Iterable):
+ return [approx_non_iter(x) for x in self._expected]
+ else:
+ return [approx_non_iter(self._expected)]
+
+ @expected.setter
+ def expected(self, expected):
+ self._expected = expected
+
+
+class ApproxNonIterable(object):
+ """
+ Perform approximate comparisons for single numbers only.
+
+ In other words, the ``expected`` attribute for objects of this class must
+ be some sort of number. This is in contrast to the ``approx`` class, where
+ the ``expected`` attribute can either be a number of a sequence of numbers.
+ This class is responsible for making comparisons, while ``approx`` is
+ responsible for abstracting the difference between numbers and sequences of
+ numbers. Although this class can stand on its own, it's only meant to be
+ used within ``approx``.
+ """
+
+ def __init__(self, expected, rel=None, abs=None):
+ self.expected = expected
+ self.abs = abs
+ self.rel = rel
+
+ def __repr__(self):
+ if isinstance(self.expected, complex):
+ return str(self.expected)
+
+ # Infinities aren't compared using tolerances, so don't show a
+ # tolerance.
+ if math.isinf(self.expected):
+ return str(self.expected)
+
+ # If a sensible tolerance can't be calculated, self.tolerance will
+ # raise a ValueError. In this case, display '???'.
+ try:
+ vetted_tolerance = '{:.1e}'.format(self.tolerance)
+ except ValueError:
+ vetted_tolerance = '???'
+
+ if sys.version_info[0] == 2:
+ return '{0} +- {1}'.format(self.expected, vetted_tolerance)
+ else:
+ return u'{0} \u00b1 {1}'.format(self.expected, vetted_tolerance)
+
+ def __eq__(self, actual):
+ # Short-circuit exact equality.
+ if actual == self.expected:
+ return True
+
+ # Infinity shouldn't be approximately equal to anything but itself, but
+ # if there's a relative tolerance, it will be infinite and infinity
+ # will seem approximately equal to everything. The equal-to-itself
+ # case would have been short circuited above, so here we can just
+ # return false if the expected value is infinite. The abs() call is
+ # for compatibility with complex numbers.
+ if math.isinf(abs(self.expected)):
+ return False
+
+ # Return true if the two numbers are within the tolerance.
+ return abs(self.expected - actual) <= self.tolerance
+
+ __hash__ = None
+
+ def __ne__(self, actual):
+ return not (actual == self)
+
+ @property
+ def tolerance(self):
+ set_default = lambda x, default: x if x is not None else default
+
+ # Figure out what the absolute tolerance should be. ``self.abs`` is
+ # either None or a value specified by the user.
+ absolute_tolerance = set_default(self.abs, 1e-12)
+
+ if absolute_tolerance < 0:
+ raise ValueError("absolute tolerance can't be negative: {0}".format(absolute_tolerance))
+ if math.isnan(absolute_tolerance):
+ raise ValueError("absolute tolerance can't be NaN.")
+
+ # If the user specified an absolute tolerance but not a relative one,
+ # just return the absolute tolerance.
+ if self.rel is None:
+ if self.abs is not None:
+ return absolute_tolerance
+
+ # Figure out what the relative tolerance should be. ``self.rel`` is
+ # either None or a value specified by the user. This is done after
+ # we've made sure the user didn't ask for an absolute tolerance only,
+ # because we don't want to raise errors about the relative tolerance if
+ # we aren't even going to use it.
+ relative_tolerance = set_default(self.rel, 1e-6) * abs(self.expected)
+
+ if relative_tolerance < 0:
+ raise ValueError("relative tolerance can't be negative: {0}".format(absolute_tolerance))
+ if math.isnan(relative_tolerance):
+ raise ValueError("relative tolerance can't be NaN.")
+
+ # Return the larger of the relative and absolute tolerances.
+ return max(relative_tolerance, absolute_tolerance)
+
+
+#
+# the basic pytest Function item
+#
+
+class Function(FunctionMixin, pytest.Item, fixtures.FuncargnamesCompatAttr):
+ """ a Function Item is responsible for setting up and executing a
+ Python test function.
+ """
+ _genid = None
+ def __init__(self, name, parent, args=None, config=None,
+ callspec=None, callobj=NOTSET, keywords=None, session=None,
+ fixtureinfo=None, originalname=None):
+ super(Function, self).__init__(name, parent, config=config,
+ session=session)
+ self._args = args
+ if callobj is not NOTSET:
+ self.obj = callobj
+
+ self.keywords.update(self.obj.__dict__)
+ if callspec:
+ self.callspec = callspec
+ self.keywords.update(callspec.keywords)
+ if keywords:
+ self.keywords.update(keywords)
+
+ if fixtureinfo is None:
+ fixtureinfo = self.session._fixturemanager.getfixtureinfo(
+ self.parent, self.obj, self.cls,
+ funcargs=not self._isyieldedfunction())
+ self._fixtureinfo = fixtureinfo
+ self.fixturenames = fixtureinfo.names_closure
+ self._initrequest()
+
+ #: original function name, without any decorations (for example
+ #: parametrization adds a ``"[...]"`` suffix to function names).
+ #:
+ #: .. versionadded:: 3.0
+ self.originalname = originalname
+
+ def _initrequest(self):
+ self.funcargs = {}
+ if self._isyieldedfunction():
+ assert not hasattr(self, "callspec"), (
+ "yielded functions (deprecated) cannot have funcargs")
+ else:
+ if hasattr(self, "callspec"):
+ callspec = self.callspec
+ assert not callspec.funcargs
+ self._genid = callspec.id
+ if hasattr(callspec, "param"):
+ self.param = callspec.param
+ self._request = fixtures.FixtureRequest(self)
+
+ @property
+ def function(self):
+ "underlying python 'function' object"
+ return getattr(self.obj, 'im_func', self.obj)
+
+ def _getobj(self):
+ name = self.name
+ i = name.find("[") # parametrization
+ if i != -1:
+ name = name[:i]
+ return getattr(self.parent.obj, name)
+
+ @property
+ def _pyfuncitem(self):
+ "(compatonly) for code expecting pytest-2.2 style request objects"
+ return self
+
+ def _isyieldedfunction(self):
+ return getattr(self, "_args", None) is not None
+
+ def runtest(self):
+ """ execute the underlying test function. """
+ self.ihook.pytest_pyfunc_call(pyfuncitem=self)
+
+ def setup(self):
+ super(Function, self).setup()
+ fixtures.fillfixtures(self)
diff --git a/lib/spack/external/_pytest/recwarn.py b/lib/spack/external/_pytest/recwarn.py
new file mode 100644
index 0000000000..87823bfbc6
--- /dev/null
+++ b/lib/spack/external/_pytest/recwarn.py
@@ -0,0 +1,226 @@
+""" recording warnings during test function execution. """
+
+import inspect
+
+import _pytest._code
+import py
+import sys
+import warnings
+import pytest
+
+
+@pytest.yield_fixture
+def recwarn(request):
+ """Return a WarningsRecorder instance that provides these methods:
+
+ * ``pop(category=None)``: return last warning matching the category.
+ * ``clear()``: clear list of warnings
+
+ See http://docs.python.org/library/warnings.html for information
+ on warning categories.
+ """
+ wrec = WarningsRecorder()
+ with wrec:
+ warnings.simplefilter('default')
+ yield wrec
+
+
+def pytest_namespace():
+ return {'deprecated_call': deprecated_call,
+ 'warns': warns}
+
+
+def deprecated_call(func=None, *args, **kwargs):
+ """ assert that calling ``func(*args, **kwargs)`` triggers a
+ ``DeprecationWarning`` or ``PendingDeprecationWarning``.
+
+ This function can be used as a context manager::
+
+ >>> import warnings
+ >>> def api_call_v2():
+ ... warnings.warn('use v3 of this api', DeprecationWarning)
+ ... return 200
+
+ >>> with deprecated_call():
+ ... assert api_call_v2() == 200
+
+ Note: we cannot use WarningsRecorder here because it is still subject
+ to the mechanism that prevents warnings of the same type from being
+ triggered twice for the same module. See #1190.
+ """
+ if not func:
+ return WarningsChecker(expected_warning=DeprecationWarning)
+
+ categories = []
+
+ def warn_explicit(message, category, *args, **kwargs):
+ categories.append(category)
+ old_warn_explicit(message, category, *args, **kwargs)
+
+ def warn(message, category=None, *args, **kwargs):
+ if isinstance(message, Warning):
+ categories.append(message.__class__)
+ else:
+ categories.append(category)
+ old_warn(message, category, *args, **kwargs)
+
+ old_warn = warnings.warn
+ old_warn_explicit = warnings.warn_explicit
+ warnings.warn_explicit = warn_explicit
+ warnings.warn = warn
+ try:
+ ret = func(*args, **kwargs)
+ finally:
+ warnings.warn_explicit = old_warn_explicit
+ warnings.warn = old_warn
+ deprecation_categories = (DeprecationWarning, PendingDeprecationWarning)
+ if not any(issubclass(c, deprecation_categories) for c in categories):
+ __tracebackhide__ = True
+ raise AssertionError("%r did not produce DeprecationWarning" % (func,))
+ return ret
+
+
+def warns(expected_warning, *args, **kwargs):
+ """Assert that code raises a particular class of warning.
+
+ Specifically, the input @expected_warning can be a warning class or
+ tuple of warning classes, and the code must return that warning
+ (if a single class) or one of those warnings (if a tuple).
+
+ This helper produces a list of ``warnings.WarningMessage`` objects,
+ one for each warning raised.
+
+ This function can be used as a context manager, or any of the other ways
+ ``pytest.raises`` can be used::
+
+ >>> with warns(RuntimeWarning):
+ ... warnings.warn("my warning", RuntimeWarning)
+ """
+ wcheck = WarningsChecker(expected_warning)
+ if not args:
+ return wcheck
+ elif isinstance(args[0], str):
+ code, = args
+ assert isinstance(code, str)
+ frame = sys._getframe(1)
+ loc = frame.f_locals.copy()
+ loc.update(kwargs)
+
+ with wcheck:
+ code = _pytest._code.Source(code).compile()
+ py.builtin.exec_(code, frame.f_globals, loc)
+ else:
+ func = args[0]
+ with wcheck:
+ return func(*args[1:], **kwargs)
+
+
+class RecordedWarning(object):
+ def __init__(self, message, category, filename, lineno, file, line):
+ self.message = message
+ self.category = category
+ self.filename = filename
+ self.lineno = lineno
+ self.file = file
+ self.line = line
+
+
+class WarningsRecorder(object):
+ """A context manager to record raised warnings.
+
+ Adapted from `warnings.catch_warnings`.
+ """
+
+ def __init__(self, module=None):
+ self._module = sys.modules['warnings'] if module is None else module
+ self._entered = False
+ self._list = []
+
+ @property
+ def list(self):
+ """The list of recorded warnings."""
+ return self._list
+
+ def __getitem__(self, i):
+ """Get a recorded warning by index."""
+ return self._list[i]
+
+ def __iter__(self):
+ """Iterate through the recorded warnings."""
+ return iter(self._list)
+
+ def __len__(self):
+ """The number of recorded warnings."""
+ return len(self._list)
+
+ def pop(self, cls=Warning):
+ """Pop the first recorded warning, raise exception if not exists."""
+ for i, w in enumerate(self._list):
+ if issubclass(w.category, cls):
+ return self._list.pop(i)
+ __tracebackhide__ = True
+ raise AssertionError("%r not found in warning list" % cls)
+
+ def clear(self):
+ """Clear the list of recorded warnings."""
+ self._list[:] = []
+
+ def __enter__(self):
+ if self._entered:
+ __tracebackhide__ = True
+ raise RuntimeError("Cannot enter %r twice" % self)
+ self._entered = True
+ self._filters = self._module.filters
+ self._module.filters = self._filters[:]
+ self._showwarning = self._module.showwarning
+
+ def showwarning(message, category, filename, lineno,
+ file=None, line=None):
+ self._list.append(RecordedWarning(
+ message, category, filename, lineno, file, line))
+
+ # still perform old showwarning functionality
+ self._showwarning(
+ message, category, filename, lineno, file=file, line=line)
+
+ self._module.showwarning = showwarning
+
+ # allow the same warning to be raised more than once
+
+ self._module.simplefilter('always')
+ return self
+
+ def __exit__(self, *exc_info):
+ if not self._entered:
+ __tracebackhide__ = True
+ raise RuntimeError("Cannot exit %r without entering first" % self)
+ self._module.filters = self._filters
+ self._module.showwarning = self._showwarning
+
+
+class WarningsChecker(WarningsRecorder):
+ def __init__(self, expected_warning=None, module=None):
+ super(WarningsChecker, self).__init__(module=module)
+
+ msg = ("exceptions must be old-style classes or "
+ "derived from Warning, not %s")
+ if isinstance(expected_warning, tuple):
+ for exc in expected_warning:
+ if not inspect.isclass(exc):
+ raise TypeError(msg % type(exc))
+ elif inspect.isclass(expected_warning):
+ expected_warning = (expected_warning,)
+ elif expected_warning is not None:
+ raise TypeError(msg % type(expected_warning))
+
+ self.expected_warning = expected_warning
+
+ def __exit__(self, *exc_info):
+ super(WarningsChecker, self).__exit__(*exc_info)
+
+ # only check if we're not currently handling an exception
+ if all(a is None for a in exc_info):
+ if self.expected_warning is not None:
+ if not any(r.category in self.expected_warning for r in self):
+ __tracebackhide__ = True
+ pytest.fail("DID NOT WARN")
diff --git a/lib/spack/external/_pytest/resultlog.py b/lib/spack/external/_pytest/resultlog.py
new file mode 100644
index 0000000000..fc00259834
--- /dev/null
+++ b/lib/spack/external/_pytest/resultlog.py
@@ -0,0 +1,107 @@
+""" log machine-parseable test session result information in a plain
+text file.
+"""
+
+import py
+import os
+
+def pytest_addoption(parser):
+ group = parser.getgroup("terminal reporting", "resultlog plugin options")
+ group.addoption('--resultlog', '--result-log', action="store",
+ metavar="path", default=None,
+ help="DEPRECATED path for machine-readable result log.")
+
+def pytest_configure(config):
+ resultlog = config.option.resultlog
+ # prevent opening resultlog on slave nodes (xdist)
+ if resultlog and not hasattr(config, 'slaveinput'):
+ dirname = os.path.dirname(os.path.abspath(resultlog))
+ if not os.path.isdir(dirname):
+ os.makedirs(dirname)
+ logfile = open(resultlog, 'w', 1) # line buffered
+ config._resultlog = ResultLog(config, logfile)
+ config.pluginmanager.register(config._resultlog)
+
+ from _pytest.deprecated import RESULT_LOG
+ config.warn('C1', RESULT_LOG)
+
+def pytest_unconfigure(config):
+ resultlog = getattr(config, '_resultlog', None)
+ if resultlog:
+ resultlog.logfile.close()
+ del config._resultlog
+ config.pluginmanager.unregister(resultlog)
+
+def generic_path(item):
+ chain = item.listchain()
+ gpath = [chain[0].name]
+ fspath = chain[0].fspath
+ fspart = False
+ for node in chain[1:]:
+ newfspath = node.fspath
+ if newfspath == fspath:
+ if fspart:
+ gpath.append(':')
+ fspart = False
+ else:
+ gpath.append('.')
+ else:
+ gpath.append('/')
+ fspart = True
+ name = node.name
+ if name[0] in '([':
+ gpath.pop()
+ gpath.append(name)
+ fspath = newfspath
+ return ''.join(gpath)
+
+class ResultLog(object):
+ def __init__(self, config, logfile):
+ self.config = config
+ self.logfile = logfile # preferably line buffered
+
+ def write_log_entry(self, testpath, lettercode, longrepr):
+ py.builtin.print_("%s %s" % (lettercode, testpath), file=self.logfile)
+ for line in longrepr.splitlines():
+ py.builtin.print_(" %s" % line, file=self.logfile)
+
+ def log_outcome(self, report, lettercode, longrepr):
+ testpath = getattr(report, 'nodeid', None)
+ if testpath is None:
+ testpath = report.fspath
+ self.write_log_entry(testpath, lettercode, longrepr)
+
+ def pytest_runtest_logreport(self, report):
+ if report.when != "call" and report.passed:
+ return
+ res = self.config.hook.pytest_report_teststatus(report=report)
+ code = res[1]
+ if code == 'x':
+ longrepr = str(report.longrepr)
+ elif code == 'X':
+ longrepr = ''
+ elif report.passed:
+ longrepr = ""
+ elif report.failed:
+ longrepr = str(report.longrepr)
+ elif report.skipped:
+ longrepr = str(report.longrepr[2])
+ self.log_outcome(report, code, longrepr)
+
+ def pytest_collectreport(self, report):
+ if not report.passed:
+ if report.failed:
+ code = "F"
+ longrepr = str(report.longrepr)
+ else:
+ assert report.skipped
+ code = "S"
+ longrepr = "%s:%d: %s" % report.longrepr
+ self.log_outcome(report, code, longrepr)
+
+ def pytest_internalerror(self, excrepr):
+ reprcrash = getattr(excrepr, 'reprcrash', None)
+ path = getattr(reprcrash, "path", None)
+ if path is None:
+ path = "cwd:%s" % py.path.local()
+ self.write_log_entry(path, '!', str(excrepr))
diff --git a/lib/spack/external/_pytest/runner.py b/lib/spack/external/_pytest/runner.py
new file mode 100644
index 0000000000..eb29e7370c
--- /dev/null
+++ b/lib/spack/external/_pytest/runner.py
@@ -0,0 +1,578 @@
+""" basic collect and runtest protocol implementations """
+import bdb
+import sys
+from time import time
+
+import py
+import pytest
+from _pytest._code.code import TerminalRepr, ExceptionInfo
+
+
+def pytest_namespace():
+ return {
+ 'fail' : fail,
+ 'skip' : skip,
+ 'importorskip' : importorskip,
+ 'exit' : exit,
+ }
+
+#
+# pytest plugin hooks
+
+def pytest_addoption(parser):
+ group = parser.getgroup("terminal reporting", "reporting", after="general")
+ group.addoption('--durations',
+ action="store", type=int, default=None, metavar="N",
+ help="show N slowest setup/test durations (N=0 for all)."),
+
+def pytest_terminal_summary(terminalreporter):
+ durations = terminalreporter.config.option.durations
+ if durations is None:
+ return
+ tr = terminalreporter
+ dlist = []
+ for replist in tr.stats.values():
+ for rep in replist:
+ if hasattr(rep, 'duration'):
+ dlist.append(rep)
+ if not dlist:
+ return
+ dlist.sort(key=lambda x: x.duration)
+ dlist.reverse()
+ if not durations:
+ tr.write_sep("=", "slowest test durations")
+ else:
+ tr.write_sep("=", "slowest %s test durations" % durations)
+ dlist = dlist[:durations]
+
+ for rep in dlist:
+ nodeid = rep.nodeid.replace("::()::", "::")
+ tr.write_line("%02.2fs %-8s %s" %
+ (rep.duration, rep.when, nodeid))
+
+def pytest_sessionstart(session):
+ session._setupstate = SetupState()
+def pytest_sessionfinish(session):
+ session._setupstate.teardown_all()
+
+class NodeInfo:
+ def __init__(self, location):
+ self.location = location
+
+def pytest_runtest_protocol(item, nextitem):
+ item.ihook.pytest_runtest_logstart(
+ nodeid=item.nodeid, location=item.location,
+ )
+ runtestprotocol(item, nextitem=nextitem)
+ return True
+
+def runtestprotocol(item, log=True, nextitem=None):
+ hasrequest = hasattr(item, "_request")
+ if hasrequest and not item._request:
+ item._initrequest()
+ rep = call_and_report(item, "setup", log)
+ reports = [rep]
+ if rep.passed:
+ if item.config.option.setupshow:
+ show_test_item(item)
+ if not item.config.option.setuponly:
+ reports.append(call_and_report(item, "call", log))
+ reports.append(call_and_report(item, "teardown", log,
+ nextitem=nextitem))
+ # after all teardown hooks have been called
+ # want funcargs and request info to go away
+ if hasrequest:
+ item._request = False
+ item.funcargs = None
+ return reports
+
+def show_test_item(item):
+ """Show test function, parameters and the fixtures of the test item."""
+ tw = item.config.get_terminal_writer()
+ tw.line()
+ tw.write(' ' * 8)
+ tw.write(item._nodeid)
+ used_fixtures = sorted(item._fixtureinfo.name2fixturedefs.keys())
+ if used_fixtures:
+ tw.write(' (fixtures used: {0})'.format(', '.join(used_fixtures)))
+
+def pytest_runtest_setup(item):
+ item.session._setupstate.prepare(item)
+
+def pytest_runtest_call(item):
+ try:
+ item.runtest()
+ except Exception:
+ # Store trace info to allow postmortem debugging
+ type, value, tb = sys.exc_info()
+ tb = tb.tb_next # Skip *this* frame
+ sys.last_type = type
+ sys.last_value = value
+ sys.last_traceback = tb
+ del tb # Get rid of it in this namespace
+ raise
+
+def pytest_runtest_teardown(item, nextitem):
+ item.session._setupstate.teardown_exact(item, nextitem)
+
+def pytest_report_teststatus(report):
+ if report.when in ("setup", "teardown"):
+ if report.failed:
+ # category, shortletter, verbose-word
+ return "error", "E", "ERROR"
+ elif report.skipped:
+ return "skipped", "s", "SKIPPED"
+ else:
+ return "", "", ""
+
+
+#
+# Implementation
+
+def call_and_report(item, when, log=True, **kwds):
+ call = call_runtest_hook(item, when, **kwds)
+ hook = item.ihook
+ report = hook.pytest_runtest_makereport(item=item, call=call)
+ if log:
+ hook.pytest_runtest_logreport(report=report)
+ if check_interactive_exception(call, report):
+ hook.pytest_exception_interact(node=item, call=call, report=report)
+ return report
+
+def check_interactive_exception(call, report):
+ return call.excinfo and not (
+ hasattr(report, "wasxfail") or
+ call.excinfo.errisinstance(skip.Exception) or
+ call.excinfo.errisinstance(bdb.BdbQuit))
+
+def call_runtest_hook(item, when, **kwds):
+ hookname = "pytest_runtest_" + when
+ ihook = getattr(item.ihook, hookname)
+ return CallInfo(lambda: ihook(item=item, **kwds), when=when)
+
+class CallInfo:
+ """ Result/Exception info a function invocation. """
+ #: None or ExceptionInfo object.
+ excinfo = None
+ def __init__(self, func, when):
+ #: context of invocation: one of "setup", "call",
+ #: "teardown", "memocollect"
+ self.when = when
+ self.start = time()
+ try:
+ self.result = func()
+ except KeyboardInterrupt:
+ self.stop = time()
+ raise
+ except:
+ self.excinfo = ExceptionInfo()
+ self.stop = time()
+
+ def __repr__(self):
+ if self.excinfo:
+ status = "exception: %s" % str(self.excinfo.value)
+ else:
+ status = "result: %r" % (self.result,)
+ return "<CallInfo when=%r %s>" % (self.when, status)
+
+def getslaveinfoline(node):
+ try:
+ return node._slaveinfocache
+ except AttributeError:
+ d = node.slaveinfo
+ ver = "%s.%s.%s" % d['version_info'][:3]
+ node._slaveinfocache = s = "[%s] %s -- Python %s %s" % (
+ d['id'], d['sysplatform'], ver, d['executable'])
+ return s
+
+class BaseReport(object):
+
+ def __init__(self, **kw):
+ self.__dict__.update(kw)
+
+ def toterminal(self, out):
+ if hasattr(self, 'node'):
+ out.line(getslaveinfoline(self.node))
+
+ longrepr = self.longrepr
+ if longrepr is None:
+ return
+
+ if hasattr(longrepr, 'toterminal'):
+ longrepr.toterminal(out)
+ else:
+ try:
+ out.line(longrepr)
+ except UnicodeEncodeError:
+ out.line("<unprintable longrepr>")
+
+ def get_sections(self, prefix):
+ for name, content in self.sections:
+ if name.startswith(prefix):
+ yield prefix, content
+
+ @property
+ def longreprtext(self):
+ """
+ Read-only property that returns the full string representation
+ of ``longrepr``.
+
+ .. versionadded:: 3.0
+ """
+ tw = py.io.TerminalWriter(stringio=True)
+ tw.hasmarkup = False
+ self.toterminal(tw)
+ exc = tw.stringio.getvalue()
+ return exc.strip()
+
+ @property
+ def capstdout(self):
+ """Return captured text from stdout, if capturing is enabled
+
+ .. versionadded:: 3.0
+ """
+ return ''.join(content for (prefix, content) in self.get_sections('Captured stdout'))
+
+ @property
+ def capstderr(self):
+ """Return captured text from stderr, if capturing is enabled
+
+ .. versionadded:: 3.0
+ """
+ return ''.join(content for (prefix, content) in self.get_sections('Captured stderr'))
+
+ passed = property(lambda x: x.outcome == "passed")
+ failed = property(lambda x: x.outcome == "failed")
+ skipped = property(lambda x: x.outcome == "skipped")
+
+ @property
+ def fspath(self):
+ return self.nodeid.split("::")[0]
+
+def pytest_runtest_makereport(item, call):
+ when = call.when
+ duration = call.stop-call.start
+ keywords = dict([(x,1) for x in item.keywords])
+ excinfo = call.excinfo
+ sections = []
+ if not call.excinfo:
+ outcome = "passed"
+ longrepr = None
+ else:
+ if not isinstance(excinfo, ExceptionInfo):
+ outcome = "failed"
+ longrepr = excinfo
+ elif excinfo.errisinstance(pytest.skip.Exception):
+ outcome = "skipped"
+ r = excinfo._getreprcrash()
+ longrepr = (str(r.path), r.lineno, r.message)
+ else:
+ outcome = "failed"
+ if call.when == "call":
+ longrepr = item.repr_failure(excinfo)
+ else: # exception in setup or teardown
+ longrepr = item._repr_failure_py(excinfo,
+ style=item.config.option.tbstyle)
+ for rwhen, key, content in item._report_sections:
+ sections.append(("Captured %s %s" %(key, rwhen), content))
+ return TestReport(item.nodeid, item.location,
+ keywords, outcome, longrepr, when,
+ sections, duration)
+
+class TestReport(BaseReport):
+ """ Basic test report object (also used for setup and teardown calls if
+ they fail).
+ """
+ def __init__(self, nodeid, location, keywords, outcome,
+ longrepr, when, sections=(), duration=0, **extra):
+ #: normalized collection node id
+ self.nodeid = nodeid
+
+ #: a (filesystempath, lineno, domaininfo) tuple indicating the
+ #: actual location of a test item - it might be different from the
+ #: collected one e.g. if a method is inherited from a different module.
+ self.location = location
+
+ #: a name -> value dictionary containing all keywords and
+ #: markers associated with a test invocation.
+ self.keywords = keywords
+
+ #: test outcome, always one of "passed", "failed", "skipped".
+ self.outcome = outcome
+
+ #: None or a failure representation.
+ self.longrepr = longrepr
+
+ #: one of 'setup', 'call', 'teardown' to indicate runtest phase.
+ self.when = when
+
+ #: list of pairs ``(str, str)`` of extra information which needs to
+ #: marshallable. Used by pytest to add captured text
+ #: from ``stdout`` and ``stderr``, but may be used by other plugins
+ #: to add arbitrary information to reports.
+ self.sections = list(sections)
+
+ #: time it took to run just the test
+ self.duration = duration
+
+ self.__dict__.update(extra)
+
+ def __repr__(self):
+ return "<TestReport %r when=%r outcome=%r>" % (
+ self.nodeid, self.when, self.outcome)
+
+class TeardownErrorReport(BaseReport):
+ outcome = "failed"
+ when = "teardown"
+ def __init__(self, longrepr, **extra):
+ self.longrepr = longrepr
+ self.sections = []
+ self.__dict__.update(extra)
+
+def pytest_make_collect_report(collector):
+ call = CallInfo(collector._memocollect, "memocollect")
+ longrepr = None
+ if not call.excinfo:
+ outcome = "passed"
+ else:
+ from _pytest import nose
+ skip_exceptions = (Skipped,) + nose.get_skip_exceptions()
+ if call.excinfo.errisinstance(skip_exceptions):
+ outcome = "skipped"
+ r = collector._repr_failure_py(call.excinfo, "line").reprcrash
+ longrepr = (str(r.path), r.lineno, r.message)
+ else:
+ outcome = "failed"
+ errorinfo = collector.repr_failure(call.excinfo)
+ if not hasattr(errorinfo, "toterminal"):
+ errorinfo = CollectErrorRepr(errorinfo)
+ longrepr = errorinfo
+ rep = CollectReport(collector.nodeid, outcome, longrepr,
+ getattr(call, 'result', None))
+ rep.call = call # see collect_one_node
+ return rep
+
+
+class CollectReport(BaseReport):
+ def __init__(self, nodeid, outcome, longrepr, result,
+ sections=(), **extra):
+ self.nodeid = nodeid
+ self.outcome = outcome
+ self.longrepr = longrepr
+ self.result = result or []
+ self.sections = list(sections)
+ self.__dict__.update(extra)
+
+ @property
+ def location(self):
+ return (self.fspath, None, self.fspath)
+
+ def __repr__(self):
+ return "<CollectReport %r lenresult=%s outcome=%r>" % (
+ self.nodeid, len(self.result), self.outcome)
+
+class CollectErrorRepr(TerminalRepr):
+ def __init__(self, msg):
+ self.longrepr = msg
+ def toterminal(self, out):
+ out.line(self.longrepr, red=True)
+
+class SetupState(object):
+ """ shared state for setting up/tearing down test items or collectors. """
+ def __init__(self):
+ self.stack = []
+ self._finalizers = {}
+
+ def addfinalizer(self, finalizer, colitem):
+ """ attach a finalizer to the given colitem.
+ if colitem is None, this will add a finalizer that
+ is called at the end of teardown_all().
+ """
+ assert colitem and not isinstance(colitem, tuple)
+ assert py.builtin.callable(finalizer)
+ #assert colitem in self.stack # some unit tests don't setup stack :/
+ self._finalizers.setdefault(colitem, []).append(finalizer)
+
+ def _pop_and_teardown(self):
+ colitem = self.stack.pop()
+ self._teardown_with_finalization(colitem)
+
+ def _callfinalizers(self, colitem):
+ finalizers = self._finalizers.pop(colitem, None)
+ exc = None
+ while finalizers:
+ fin = finalizers.pop()
+ try:
+ fin()
+ except Exception:
+ # XXX Only first exception will be seen by user,
+ # ideally all should be reported.
+ if exc is None:
+ exc = sys.exc_info()
+ if exc:
+ py.builtin._reraise(*exc)
+
+ def _teardown_with_finalization(self, colitem):
+ self._callfinalizers(colitem)
+ if hasattr(colitem, "teardown"):
+ colitem.teardown()
+ for colitem in self._finalizers:
+ assert colitem is None or colitem in self.stack \
+ or isinstance(colitem, tuple)
+
+ def teardown_all(self):
+ while self.stack:
+ self._pop_and_teardown()
+ for key in list(self._finalizers):
+ self._teardown_with_finalization(key)
+ assert not self._finalizers
+
+ def teardown_exact(self, item, nextitem):
+ needed_collectors = nextitem and nextitem.listchain() or []
+ self._teardown_towards(needed_collectors)
+
+ def _teardown_towards(self, needed_collectors):
+ while self.stack:
+ if self.stack == needed_collectors[:len(self.stack)]:
+ break
+ self._pop_and_teardown()
+
+ def prepare(self, colitem):
+ """ setup objects along the collector chain to the test-method
+ and teardown previously setup objects."""
+ needed_collectors = colitem.listchain()
+ self._teardown_towards(needed_collectors)
+
+ # check if the last collection node has raised an error
+ for col in self.stack:
+ if hasattr(col, '_prepare_exc'):
+ py.builtin._reraise(*col._prepare_exc)
+ for col in needed_collectors[len(self.stack):]:
+ self.stack.append(col)
+ try:
+ col.setup()
+ except Exception:
+ col._prepare_exc = sys.exc_info()
+ raise
+
+def collect_one_node(collector):
+ ihook = collector.ihook
+ ihook.pytest_collectstart(collector=collector)
+ rep = ihook.pytest_make_collect_report(collector=collector)
+ call = rep.__dict__.pop("call", None)
+ if call and check_interactive_exception(call, rep):
+ ihook.pytest_exception_interact(node=collector, call=call, report=rep)
+ return rep
+
+
+# =============================================================
+# Test OutcomeExceptions and helpers for creating them.
+
+
+class OutcomeException(Exception):
+ """ OutcomeException and its subclass instances indicate and
+ contain info about test and collection outcomes.
+ """
+ def __init__(self, msg=None, pytrace=True):
+ Exception.__init__(self, msg)
+ self.msg = msg
+ self.pytrace = pytrace
+
+ def __repr__(self):
+ if self.msg:
+ val = self.msg
+ if isinstance(val, bytes):
+ val = py._builtin._totext(val, errors='replace')
+ return val
+ return "<%s instance>" %(self.__class__.__name__,)
+ __str__ = __repr__
+
+class Skipped(OutcomeException):
+ # XXX hackish: on 3k we fake to live in the builtins
+ # in order to have Skipped exception printing shorter/nicer
+ __module__ = 'builtins'
+
+ def __init__(self, msg=None, pytrace=True, allow_module_level=False):
+ OutcomeException.__init__(self, msg=msg, pytrace=pytrace)
+ self.allow_module_level = allow_module_level
+
+
+class Failed(OutcomeException):
+ """ raised from an explicit call to pytest.fail() """
+ __module__ = 'builtins'
+
+
+class Exit(KeyboardInterrupt):
+ """ raised for immediate program exits (no tracebacks/summaries)"""
+ def __init__(self, msg="unknown reason"):
+ self.msg = msg
+ KeyboardInterrupt.__init__(self, msg)
+
+# exposed helper methods
+
+def exit(msg):
+ """ exit testing process as if KeyboardInterrupt was triggered. """
+ __tracebackhide__ = True
+ raise Exit(msg)
+
+
+exit.Exception = Exit
+
+
+def skip(msg=""):
+ """ skip an executing test with the given message. Note: it's usually
+ better to use the pytest.mark.skipif marker to declare a test to be
+ skipped under certain conditions like mismatching platforms or
+ dependencies. See the pytest_skipping plugin for details.
+ """
+ __tracebackhide__ = True
+ raise Skipped(msg=msg)
+
+
+skip.Exception = Skipped
+
+
+def fail(msg="", pytrace=True):
+ """ explicitly fail an currently-executing test with the given Message.
+
+ :arg pytrace: if false the msg represents the full failure information
+ and no python traceback will be reported.
+ """
+ __tracebackhide__ = True
+ raise Failed(msg=msg, pytrace=pytrace)
+
+
+fail.Exception = Failed
+
+
+def importorskip(modname, minversion=None):
+ """ return imported module if it has at least "minversion" as its
+ __version__ attribute. If no minversion is specified the a skip
+ is only triggered if the module can not be imported.
+ """
+ __tracebackhide__ = True
+ compile(modname, '', 'eval') # to catch syntaxerrors
+ should_skip = False
+ try:
+ __import__(modname)
+ except ImportError:
+ # Do not raise chained exception here(#1485)
+ should_skip = True
+ if should_skip:
+ raise Skipped("could not import %r" %(modname,), allow_module_level=True)
+ mod = sys.modules[modname]
+ if minversion is None:
+ return mod
+ verattr = getattr(mod, '__version__', None)
+ if minversion is not None:
+ try:
+ from pkg_resources import parse_version as pv
+ except ImportError:
+ raise Skipped("we have a required version for %r but can not import "
+ "pkg_resources to parse version strings." % (modname,),
+ allow_module_level=True)
+ if verattr is None or pv(verattr) < pv(minversion):
+ raise Skipped("module %r has __version__ %r, required is: %r" %(
+ modname, verattr, minversion), allow_module_level=True)
+ return mod
+
diff --git a/lib/spack/external/_pytest/setuponly.py b/lib/spack/external/_pytest/setuponly.py
new file mode 100644
index 0000000000..1752c575f5
--- /dev/null
+++ b/lib/spack/external/_pytest/setuponly.py
@@ -0,0 +1,72 @@
+import pytest
+import sys
+
+
+def pytest_addoption(parser):
+ group = parser.getgroup("debugconfig")
+ group.addoption('--setuponly', '--setup-only', action="store_true",
+ help="only setup fixtures, do not execute tests.")
+ group.addoption('--setupshow', '--setup-show', action="store_true",
+ help="show setup of fixtures while executing tests.")
+
+
+@pytest.hookimpl(hookwrapper=True)
+def pytest_fixture_setup(fixturedef, request):
+ yield
+ config = request.config
+ if config.option.setupshow:
+ if hasattr(request, 'param'):
+ # Save the fixture parameter so ._show_fixture_action() can
+ # display it now and during the teardown (in .finish()).
+ if fixturedef.ids:
+ if callable(fixturedef.ids):
+ fixturedef.cached_param = fixturedef.ids(request.param)
+ else:
+ fixturedef.cached_param = fixturedef.ids[
+ request.param_index]
+ else:
+ fixturedef.cached_param = request.param
+ _show_fixture_action(fixturedef, 'SETUP')
+
+
+def pytest_fixture_post_finalizer(fixturedef):
+ if hasattr(fixturedef, "cached_result"):
+ config = fixturedef._fixturemanager.config
+ if config.option.setupshow:
+ _show_fixture_action(fixturedef, 'TEARDOWN')
+ if hasattr(fixturedef, "cached_param"):
+ del fixturedef.cached_param
+
+
+def _show_fixture_action(fixturedef, msg):
+ config = fixturedef._fixturemanager.config
+ capman = config.pluginmanager.getplugin('capturemanager')
+ if capman:
+ out, err = capman.suspendcapture()
+
+ tw = config.get_terminal_writer()
+ tw.line()
+ tw.write(' ' * 2 * fixturedef.scopenum)
+ tw.write('{step} {scope} {fixture}'.format(
+ step=msg.ljust(8), # align the output to TEARDOWN
+ scope=fixturedef.scope[0].upper(),
+ fixture=fixturedef.argname))
+
+ if msg == 'SETUP':
+ deps = sorted(arg for arg in fixturedef.argnames if arg != 'request')
+ if deps:
+ tw.write(' (fixtures used: {0})'.format(', '.join(deps)))
+
+ if hasattr(fixturedef, 'cached_param'):
+ tw.write('[{0}]'.format(fixturedef.cached_param))
+
+ if capman:
+ capman.resumecapture()
+ sys.stdout.write(out)
+ sys.stderr.write(err)
+
+
+@pytest.hookimpl(tryfirst=True)
+def pytest_cmdline_main(config):
+ if config.option.setuponly:
+ config.option.setupshow = True
diff --git a/lib/spack/external/_pytest/setupplan.py b/lib/spack/external/_pytest/setupplan.py
new file mode 100644
index 0000000000..f0853dee54
--- /dev/null
+++ b/lib/spack/external/_pytest/setupplan.py
@@ -0,0 +1,23 @@
+import pytest
+
+
+def pytest_addoption(parser):
+ group = parser.getgroup("debugconfig")
+ group.addoption('--setupplan', '--setup-plan', action="store_true",
+ help="show what fixtures and tests would be executed but "
+ "don't execute anything.")
+
+
+@pytest.hookimpl(tryfirst=True)
+def pytest_fixture_setup(fixturedef, request):
+ # Will return a dummy fixture if the setuponly option is provided.
+ if request.config.option.setupplan:
+ fixturedef.cached_result = (None, None, None)
+ return fixturedef.cached_result
+
+
+@pytest.hookimpl(tryfirst=True)
+def pytest_cmdline_main(config):
+ if config.option.setupplan:
+ config.option.setuponly = True
+ config.option.setupshow = True
diff --git a/lib/spack/external/_pytest/skipping.py b/lib/spack/external/_pytest/skipping.py
new file mode 100644
index 0000000000..a8eaea98aa
--- /dev/null
+++ b/lib/spack/external/_pytest/skipping.py
@@ -0,0 +1,375 @@
+""" support for skip/xfail functions and markers. """
+import os
+import sys
+import traceback
+
+import py
+import pytest
+from _pytest.mark import MarkInfo, MarkDecorator
+
+
+def pytest_addoption(parser):
+ group = parser.getgroup("general")
+ group.addoption('--runxfail',
+ action="store_true", dest="runxfail", default=False,
+ help="run tests even if they are marked xfail")
+
+ parser.addini("xfail_strict", "default for the strict parameter of xfail "
+ "markers when not given explicitly (default: "
+ "False)",
+ default=False,
+ type="bool")
+
+
+def pytest_configure(config):
+ if config.option.runxfail:
+ old = pytest.xfail
+ config._cleanup.append(lambda: setattr(pytest, "xfail", old))
+
+ def nop(*args, **kwargs):
+ pass
+
+ nop.Exception = XFailed
+ setattr(pytest, "xfail", nop)
+
+ config.addinivalue_line("markers",
+ "skip(reason=None): skip the given test function with an optional reason. "
+ "Example: skip(reason=\"no way of currently testing this\") skips the "
+ "test."
+ )
+ config.addinivalue_line("markers",
+ "skipif(condition): skip the given test function if eval(condition) "
+ "results in a True value. Evaluation happens within the "
+ "module global context. Example: skipif('sys.platform == \"win32\"') "
+ "skips the test if we are on the win32 platform. see "
+ "http://pytest.org/latest/skipping.html"
+ )
+ config.addinivalue_line("markers",
+ "xfail(condition, reason=None, run=True, raises=None, strict=False): "
+ "mark the the test function as an expected failure if eval(condition) "
+ "has a True value. Optionally specify a reason for better reporting "
+ "and run=False if you don't even want to execute the test function. "
+ "If only specific exception(s) are expected, you can list them in "
+ "raises, and if the test fails in other ways, it will be reported as "
+ "a true failure. See http://pytest.org/latest/skipping.html"
+ )
+
+
+def pytest_namespace():
+ return dict(xfail=xfail)
+
+
+class XFailed(pytest.fail.Exception):
+ """ raised from an explicit call to pytest.xfail() """
+
+
+def xfail(reason=""):
+ """ xfail an executing test or setup functions with the given reason."""
+ __tracebackhide__ = True
+ raise XFailed(reason)
+
+
+xfail.Exception = XFailed
+
+
+class MarkEvaluator:
+ def __init__(self, item, name):
+ self.item = item
+ self.name = name
+
+ @property
+ def holder(self):
+ return self.item.keywords.get(self.name)
+
+ def __bool__(self):
+ return bool(self.holder)
+ __nonzero__ = __bool__
+
+ def wasvalid(self):
+ return not hasattr(self, 'exc')
+
+ def invalidraise(self, exc):
+ raises = self.get('raises')
+ if not raises:
+ return
+ return not isinstance(exc, raises)
+
+ def istrue(self):
+ try:
+ return self._istrue()
+ except Exception:
+ self.exc = sys.exc_info()
+ if isinstance(self.exc[1], SyntaxError):
+ msg = [" " * (self.exc[1].offset + 4) + "^",]
+ msg.append("SyntaxError: invalid syntax")
+ else:
+ msg = traceback.format_exception_only(*self.exc[:2])
+ pytest.fail("Error evaluating %r expression\n"
+ " %s\n"
+ "%s"
+ %(self.name, self.expr, "\n".join(msg)),
+ pytrace=False)
+
+ def _getglobals(self):
+ d = {'os': os, 'sys': sys, 'config': self.item.config}
+ d.update(self.item.obj.__globals__)
+ return d
+
+ def _istrue(self):
+ if hasattr(self, 'result'):
+ return self.result
+ if self.holder:
+ d = self._getglobals()
+ if self.holder.args or 'condition' in self.holder.kwargs:
+ self.result = False
+ # "holder" might be a MarkInfo or a MarkDecorator; only
+ # MarkInfo keeps track of all parameters it received in an
+ # _arglist attribute
+ if hasattr(self.holder, '_arglist'):
+ arglist = self.holder._arglist
+ else:
+ arglist = [(self.holder.args, self.holder.kwargs)]
+ for args, kwargs in arglist:
+ if 'condition' in kwargs:
+ args = (kwargs['condition'],)
+ for expr in args:
+ self.expr = expr
+ if isinstance(expr, py.builtin._basestring):
+ result = cached_eval(self.item.config, expr, d)
+ else:
+ if "reason" not in kwargs:
+ # XXX better be checked at collection time
+ msg = "you need to specify reason=STRING " \
+ "when using booleans as conditions."
+ pytest.fail(msg)
+ result = bool(expr)
+ if result:
+ self.result = True
+ self.reason = kwargs.get('reason', None)
+ self.expr = expr
+ return self.result
+ else:
+ self.result = True
+ return getattr(self, 'result', False)
+
+ def get(self, attr, default=None):
+ return self.holder.kwargs.get(attr, default)
+
+ def getexplanation(self):
+ expl = getattr(self, 'reason', None) or self.get('reason', None)
+ if not expl:
+ if not hasattr(self, 'expr'):
+ return ""
+ else:
+ return "condition: " + str(self.expr)
+ return expl
+
+
+@pytest.hookimpl(tryfirst=True)
+def pytest_runtest_setup(item):
+ # Check if skip or skipif are specified as pytest marks
+
+ skipif_info = item.keywords.get('skipif')
+ if isinstance(skipif_info, (MarkInfo, MarkDecorator)):
+ eval_skipif = MarkEvaluator(item, 'skipif')
+ if eval_skipif.istrue():
+ item._evalskip = eval_skipif
+ pytest.skip(eval_skipif.getexplanation())
+
+ skip_info = item.keywords.get('skip')
+ if isinstance(skip_info, (MarkInfo, MarkDecorator)):
+ item._evalskip = True
+ if 'reason' in skip_info.kwargs:
+ pytest.skip(skip_info.kwargs['reason'])
+ elif skip_info.args:
+ pytest.skip(skip_info.args[0])
+ else:
+ pytest.skip("unconditional skip")
+
+ item._evalxfail = MarkEvaluator(item, 'xfail')
+ check_xfail_no_run(item)
+
+
+@pytest.mark.hookwrapper
+def pytest_pyfunc_call(pyfuncitem):
+ check_xfail_no_run(pyfuncitem)
+ outcome = yield
+ passed = outcome.excinfo is None
+ if passed:
+ check_strict_xfail(pyfuncitem)
+
+
+def check_xfail_no_run(item):
+ """check xfail(run=False)"""
+ if not item.config.option.runxfail:
+ evalxfail = item._evalxfail
+ if evalxfail.istrue():
+ if not evalxfail.get('run', True):
+ pytest.xfail("[NOTRUN] " + evalxfail.getexplanation())
+
+
+def check_strict_xfail(pyfuncitem):
+ """check xfail(strict=True) for the given PASSING test"""
+ evalxfail = pyfuncitem._evalxfail
+ if evalxfail.istrue():
+ strict_default = pyfuncitem.config.getini('xfail_strict')
+ is_strict_xfail = evalxfail.get('strict', strict_default)
+ if is_strict_xfail:
+ del pyfuncitem._evalxfail
+ explanation = evalxfail.getexplanation()
+ pytest.fail('[XPASS(strict)] ' + explanation, pytrace=False)
+
+
+@pytest.hookimpl(hookwrapper=True)
+def pytest_runtest_makereport(item, call):
+ outcome = yield
+ rep = outcome.get_result()
+ evalxfail = getattr(item, '_evalxfail', None)
+ evalskip = getattr(item, '_evalskip', None)
+ # unitttest special case, see setting of _unexpectedsuccess
+ if hasattr(item, '_unexpectedsuccess') and rep.when == "call":
+ from _pytest.compat import _is_unittest_unexpected_success_a_failure
+ if item._unexpectedsuccess:
+ rep.longrepr = "Unexpected success: {0}".format(item._unexpectedsuccess)
+ else:
+ rep.longrepr = "Unexpected success"
+ if _is_unittest_unexpected_success_a_failure():
+ rep.outcome = "failed"
+ else:
+ rep.outcome = "passed"
+ rep.wasxfail = rep.longrepr
+ elif item.config.option.runxfail:
+ pass # don't interefere
+ elif call.excinfo and call.excinfo.errisinstance(pytest.xfail.Exception):
+ rep.wasxfail = "reason: " + call.excinfo.value.msg
+ rep.outcome = "skipped"
+ elif evalxfail and not rep.skipped and evalxfail.wasvalid() and \
+ evalxfail.istrue():
+ if call.excinfo:
+ if evalxfail.invalidraise(call.excinfo.value):
+ rep.outcome = "failed"
+ else:
+ rep.outcome = "skipped"
+ rep.wasxfail = evalxfail.getexplanation()
+ elif call.when == "call":
+ strict_default = item.config.getini('xfail_strict')
+ is_strict_xfail = evalxfail.get('strict', strict_default)
+ explanation = evalxfail.getexplanation()
+ if is_strict_xfail:
+ rep.outcome = "failed"
+ rep.longrepr = "[XPASS(strict)] {0}".format(explanation)
+ else:
+ rep.outcome = "passed"
+ rep.wasxfail = explanation
+ elif evalskip is not None and rep.skipped and type(rep.longrepr) is tuple:
+ # skipped by mark.skipif; change the location of the failure
+ # to point to the item definition, otherwise it will display
+ # the location of where the skip exception was raised within pytest
+ filename, line, reason = rep.longrepr
+ filename, line = item.location[:2]
+ rep.longrepr = filename, line, reason
+
+# called by terminalreporter progress reporting
+def pytest_report_teststatus(report):
+ if hasattr(report, "wasxfail"):
+ if report.skipped:
+ return "xfailed", "x", "xfail"
+ elif report.passed:
+ return "xpassed", "X", ("XPASS", {'yellow': True})
+
+# called by the terminalreporter instance/plugin
+def pytest_terminal_summary(terminalreporter):
+ tr = terminalreporter
+ if not tr.reportchars:
+ #for name in "xfailed skipped failed xpassed":
+ # if not tr.stats.get(name, 0):
+ # tr.write_line("HINT: use '-r' option to see extra "
+ # "summary info about tests")
+ # break
+ return
+
+ lines = []
+ for char in tr.reportchars:
+ if char == "x":
+ show_xfailed(terminalreporter, lines)
+ elif char == "X":
+ show_xpassed(terminalreporter, lines)
+ elif char in "fF":
+ show_simple(terminalreporter, lines, 'failed', "FAIL %s")
+ elif char in "sS":
+ show_skipped(terminalreporter, lines)
+ elif char == "E":
+ show_simple(terminalreporter, lines, 'error', "ERROR %s")
+ elif char == 'p':
+ show_simple(terminalreporter, lines, 'passed', "PASSED %s")
+
+ if lines:
+ tr._tw.sep("=", "short test summary info")
+ for line in lines:
+ tr._tw.line(line)
+
+def show_simple(terminalreporter, lines, stat, format):
+ failed = terminalreporter.stats.get(stat)
+ if failed:
+ for rep in failed:
+ pos = terminalreporter.config.cwd_relative_nodeid(rep.nodeid)
+ lines.append(format %(pos,))
+
+def show_xfailed(terminalreporter, lines):
+ xfailed = terminalreporter.stats.get("xfailed")
+ if xfailed:
+ for rep in xfailed:
+ pos = terminalreporter.config.cwd_relative_nodeid(rep.nodeid)
+ reason = rep.wasxfail
+ lines.append("XFAIL %s" % (pos,))
+ if reason:
+ lines.append(" " + str(reason))
+
+def show_xpassed(terminalreporter, lines):
+ xpassed = terminalreporter.stats.get("xpassed")
+ if xpassed:
+ for rep in xpassed:
+ pos = terminalreporter.config.cwd_relative_nodeid(rep.nodeid)
+ reason = rep.wasxfail
+ lines.append("XPASS %s %s" %(pos, reason))
+
+def cached_eval(config, expr, d):
+ if not hasattr(config, '_evalcache'):
+ config._evalcache = {}
+ try:
+ return config._evalcache[expr]
+ except KeyError:
+ import _pytest._code
+ exprcode = _pytest._code.compile(expr, mode="eval")
+ config._evalcache[expr] = x = eval(exprcode, d)
+ return x
+
+
+def folded_skips(skipped):
+ d = {}
+ for event in skipped:
+ key = event.longrepr
+ assert len(key) == 3, (event, key)
+ d.setdefault(key, []).append(event)
+ l = []
+ for key, events in d.items():
+ l.append((len(events),) + key)
+ return l
+
+def show_skipped(terminalreporter, lines):
+ tr = terminalreporter
+ skipped = tr.stats.get('skipped', [])
+ if skipped:
+ #if not tr.hasopt('skipped'):
+ # tr.write_line(
+ # "%d skipped tests, specify -rs for more info" %
+ # len(skipped))
+ # return
+ fskips = folded_skips(skipped)
+ if fskips:
+ #tr.write_sep("_", "skipped test summary")
+ for num, fspath, lineno, reason in fskips:
+ if reason.startswith("Skipped: "):
+ reason = reason[9:]
+ lines.append("SKIP [%d] %s:%d: %s" %
+ (num, fspath, lineno, reason))
diff --git a/lib/spack/external/_pytest/terminal.py b/lib/spack/external/_pytest/terminal.py
new file mode 100644
index 0000000000..16bf757338
--- /dev/null
+++ b/lib/spack/external/_pytest/terminal.py
@@ -0,0 +1,593 @@
+""" terminal reporting of the full testing process.
+
+This is a good source for looking at the various reporting hooks.
+"""
+from _pytest.main import EXIT_OK, EXIT_TESTSFAILED, EXIT_INTERRUPTED, \
+ EXIT_USAGEERROR, EXIT_NOTESTSCOLLECTED
+import pytest
+import py
+import sys
+import time
+import platform
+
+import _pytest._pluggy as pluggy
+
+
+def pytest_addoption(parser):
+ group = parser.getgroup("terminal reporting", "reporting", after="general")
+ group._addoption('-v', '--verbose', action="count",
+ dest="verbose", default=0, help="increase verbosity."),
+ group._addoption('-q', '--quiet', action="count",
+ dest="quiet", default=0, help="decrease verbosity."),
+ group._addoption('-r',
+ action="store", dest="reportchars", default='', metavar="chars",
+ help="show extra test summary info as specified by chars (f)ailed, "
+ "(E)error, (s)skipped, (x)failed, (X)passed, "
+ "(p)passed, (P)passed with output, (a)all except pP. "
+ "The pytest warnings are displayed at all times except when "
+ "--disable-pytest-warnings is set")
+ group._addoption('--disable-pytest-warnings', default=False,
+ dest='disablepytestwarnings', action='store_true',
+ help='disable warnings summary, overrides -r w flag')
+ group._addoption('-l', '--showlocals',
+ action="store_true", dest="showlocals", default=False,
+ help="show locals in tracebacks (disabled by default).")
+ group._addoption('--tb', metavar="style",
+ action="store", dest="tbstyle", default='auto',
+ choices=['auto', 'long', 'short', 'no', 'line', 'native'],
+ help="traceback print mode (auto/long/short/line/native/no).")
+ group._addoption('--fulltrace', '--full-trace',
+ action="store_true", default=False,
+ help="don't cut any tracebacks (default is to cut).")
+ group._addoption('--color', metavar="color",
+ action="store", dest="color", default='auto',
+ choices=['yes', 'no', 'auto'],
+ help="color terminal output (yes/no/auto).")
+
+def pytest_configure(config):
+ config.option.verbose -= config.option.quiet
+ reporter = TerminalReporter(config, sys.stdout)
+ config.pluginmanager.register(reporter, 'terminalreporter')
+ if config.option.debug or config.option.traceconfig:
+ def mywriter(tags, args):
+ msg = " ".join(map(str, args))
+ reporter.write_line("[traceconfig] " + msg)
+ config.trace.root.setprocessor("pytest:config", mywriter)
+
+def getreportopt(config):
+ reportopts = ""
+ reportchars = config.option.reportchars
+ if not config.option.disablepytestwarnings and 'w' not in reportchars:
+ reportchars += 'w'
+ elif config.option.disablepytestwarnings and 'w' in reportchars:
+ reportchars = reportchars.replace('w', '')
+ if reportchars:
+ for char in reportchars:
+ if char not in reportopts and char != 'a':
+ reportopts += char
+ elif char == 'a':
+ reportopts = 'fEsxXw'
+ return reportopts
+
+def pytest_report_teststatus(report):
+ if report.passed:
+ letter = "."
+ elif report.skipped:
+ letter = "s"
+ elif report.failed:
+ letter = "F"
+ if report.when != "call":
+ letter = "f"
+ return report.outcome, letter, report.outcome.upper()
+
+class WarningReport:
+ def __init__(self, code, message, nodeid=None, fslocation=None):
+ self.code = code
+ self.message = message
+ self.nodeid = nodeid
+ self.fslocation = fslocation
+
+
+class TerminalReporter:
+ def __init__(self, config, file=None):
+ import _pytest.config
+ self.config = config
+ self.verbosity = self.config.option.verbose
+ self.showheader = self.verbosity >= 0
+ self.showfspath = self.verbosity >= 0
+ self.showlongtestinfo = self.verbosity > 0
+ self._numcollected = 0
+
+ self.stats = {}
+ self.startdir = py.path.local()
+ if file is None:
+ file = sys.stdout
+ self._tw = self.writer = _pytest.config.create_terminal_writer(config,
+ file)
+ self.currentfspath = None
+ self.reportchars = getreportopt(config)
+ self.hasmarkup = self._tw.hasmarkup
+ self.isatty = file.isatty()
+
+ def hasopt(self, char):
+ char = {'xfailed': 'x', 'skipped': 's'}.get(char, char)
+ return char in self.reportchars
+
+ def write_fspath_result(self, nodeid, res):
+ fspath = self.config.rootdir.join(nodeid.split("::")[0])
+ if fspath != self.currentfspath:
+ self.currentfspath = fspath
+ fspath = self.startdir.bestrelpath(fspath)
+ self._tw.line()
+ self._tw.write(fspath + " ")
+ self._tw.write(res)
+
+ def write_ensure_prefix(self, prefix, extra="", **kwargs):
+ if self.currentfspath != prefix:
+ self._tw.line()
+ self.currentfspath = prefix
+ self._tw.write(prefix)
+ if extra:
+ self._tw.write(extra, **kwargs)
+ self.currentfspath = -2
+
+ def ensure_newline(self):
+ if self.currentfspath:
+ self._tw.line()
+ self.currentfspath = None
+
+ def write(self, content, **markup):
+ self._tw.write(content, **markup)
+
+ def write_line(self, line, **markup):
+ if not py.builtin._istext(line):
+ line = py.builtin.text(line, errors="replace")
+ self.ensure_newline()
+ self._tw.line(line, **markup)
+
+ def rewrite(self, line, **markup):
+ line = str(line)
+ self._tw.write("\r" + line, **markup)
+
+ def write_sep(self, sep, title=None, **markup):
+ self.ensure_newline()
+ self._tw.sep(sep, title, **markup)
+
+ def section(self, title, sep="=", **kw):
+ self._tw.sep(sep, title, **kw)
+
+ def line(self, msg, **kw):
+ self._tw.line(msg, **kw)
+
+ def pytest_internalerror(self, excrepr):
+ for line in py.builtin.text(excrepr).split("\n"):
+ self.write_line("INTERNALERROR> " + line)
+ return 1
+
+ def pytest_logwarning(self, code, fslocation, message, nodeid):
+ warnings = self.stats.setdefault("warnings", [])
+ if isinstance(fslocation, tuple):
+ fslocation = "%s:%d" % fslocation
+ warning = WarningReport(code=code, fslocation=fslocation,
+ message=message, nodeid=nodeid)
+ warnings.append(warning)
+
+ def pytest_plugin_registered(self, plugin):
+ if self.config.option.traceconfig:
+ msg = "PLUGIN registered: %s" % (plugin,)
+ # XXX this event may happen during setup/teardown time
+ # which unfortunately captures our output here
+ # which garbles our output if we use self.write_line
+ self.write_line(msg)
+
+ def pytest_deselected(self, items):
+ self.stats.setdefault('deselected', []).extend(items)
+
+ def pytest_runtest_logstart(self, nodeid, location):
+ # ensure that the path is printed before the
+ # 1st test of a module starts running
+ if self.showlongtestinfo:
+ line = self._locationline(nodeid, *location)
+ self.write_ensure_prefix(line, "")
+ elif self.showfspath:
+ fsid = nodeid.split("::")[0]
+ self.write_fspath_result(fsid, "")
+
+ def pytest_runtest_logreport(self, report):
+ rep = report
+ res = self.config.hook.pytest_report_teststatus(report=rep)
+ cat, letter, word = res
+ self.stats.setdefault(cat, []).append(rep)
+ self._tests_ran = True
+ if not letter and not word:
+ # probably passed setup/teardown
+ return
+ if self.verbosity <= 0:
+ if not hasattr(rep, 'node') and self.showfspath:
+ self.write_fspath_result(rep.nodeid, letter)
+ else:
+ self._tw.write(letter)
+ else:
+ if isinstance(word, tuple):
+ word, markup = word
+ else:
+ if rep.passed:
+ markup = {'green':True}
+ elif rep.failed:
+ markup = {'red':True}
+ elif rep.skipped:
+ markup = {'yellow':True}
+ line = self._locationline(rep.nodeid, *rep.location)
+ if not hasattr(rep, 'node'):
+ self.write_ensure_prefix(line, word, **markup)
+ #self._tw.write(word, **markup)
+ else:
+ self.ensure_newline()
+ if hasattr(rep, 'node'):
+ self._tw.write("[%s] " % rep.node.gateway.id)
+ self._tw.write(word, **markup)
+ self._tw.write(" " + line)
+ self.currentfspath = -2
+
+ def pytest_collection(self):
+ if not self.isatty and self.config.option.verbose >= 1:
+ self.write("collecting ... ", bold=True)
+
+ def pytest_collectreport(self, report):
+ if report.failed:
+ self.stats.setdefault("error", []).append(report)
+ elif report.skipped:
+ self.stats.setdefault("skipped", []).append(report)
+ items = [x for x in report.result if isinstance(x, pytest.Item)]
+ self._numcollected += len(items)
+ if self.isatty:
+ #self.write_fspath_result(report.nodeid, 'E')
+ self.report_collect()
+
+ def report_collect(self, final=False):
+ if self.config.option.verbose < 0:
+ return
+
+ errors = len(self.stats.get('error', []))
+ skipped = len(self.stats.get('skipped', []))
+ if final:
+ line = "collected "
+ else:
+ line = "collecting "
+ line += str(self._numcollected) + " items"
+ if errors:
+ line += " / %d errors" % errors
+ if skipped:
+ line += " / %d skipped" % skipped
+ if self.isatty:
+ if final:
+ line += " \n"
+ self.rewrite(line, bold=True)
+ else:
+ self.write_line(line)
+
+ def pytest_collection_modifyitems(self):
+ self.report_collect(True)
+
+ @pytest.hookimpl(trylast=True)
+ def pytest_sessionstart(self, session):
+ self._sessionstarttime = time.time()
+ if not self.showheader:
+ return
+ self.write_sep("=", "test session starts", bold=True)
+ verinfo = platform.python_version()
+ msg = "platform %s -- Python %s" % (sys.platform, verinfo)
+ if hasattr(sys, 'pypy_version_info'):
+ verinfo = ".".join(map(str, sys.pypy_version_info[:3]))
+ msg += "[pypy-%s-%s]" % (verinfo, sys.pypy_version_info[3])
+ msg += ", pytest-%s, py-%s, pluggy-%s" % (
+ pytest.__version__, py.__version__, pluggy.__version__)
+ if self.verbosity > 0 or self.config.option.debug or \
+ getattr(self.config.option, 'pastebin', None):
+ msg += " -- " + str(sys.executable)
+ self.write_line(msg)
+ lines = self.config.hook.pytest_report_header(
+ config=self.config, startdir=self.startdir)
+ lines.reverse()
+ for line in flatten(lines):
+ self.write_line(line)
+
+ def pytest_report_header(self, config):
+ inifile = ""
+ if config.inifile:
+ inifile = config.rootdir.bestrelpath(config.inifile)
+ lines = ["rootdir: %s, inifile: %s" %(config.rootdir, inifile)]
+
+ plugininfo = config.pluginmanager.list_plugin_distinfo()
+ if plugininfo:
+
+ lines.append(
+ "plugins: %s" % ", ".join(_plugin_nameversions(plugininfo)))
+ return lines
+
+ def pytest_collection_finish(self, session):
+ if self.config.option.collectonly:
+ self._printcollecteditems(session.items)
+ if self.stats.get('failed'):
+ self._tw.sep("!", "collection failures")
+ for rep in self.stats.get('failed'):
+ rep.toterminal(self._tw)
+ return 1
+ return 0
+ if not self.showheader:
+ return
+ #for i, testarg in enumerate(self.config.args):
+ # self.write_line("test path %d: %s" %(i+1, testarg))
+
+ def _printcollecteditems(self, items):
+ # to print out items and their parent collectors
+ # we take care to leave out Instances aka ()
+ # because later versions are going to get rid of them anyway
+ if self.config.option.verbose < 0:
+ if self.config.option.verbose < -1:
+ counts = {}
+ for item in items:
+ name = item.nodeid.split('::', 1)[0]
+ counts[name] = counts.get(name, 0) + 1
+ for name, count in sorted(counts.items()):
+ self._tw.line("%s: %d" % (name, count))
+ else:
+ for item in items:
+ nodeid = item.nodeid
+ nodeid = nodeid.replace("::()::", "::")
+ self._tw.line(nodeid)
+ return
+ stack = []
+ indent = ""
+ for item in items:
+ needed_collectors = item.listchain()[1:] # strip root node
+ while stack:
+ if stack == needed_collectors[:len(stack)]:
+ break
+ stack.pop()
+ for col in needed_collectors[len(stack):]:
+ stack.append(col)
+ #if col.name == "()":
+ # continue
+ indent = (len(stack) - 1) * " "
+ self._tw.line("%s%s" % (indent, col))
+
+ @pytest.hookimpl(hookwrapper=True)
+ def pytest_sessionfinish(self, exitstatus):
+ outcome = yield
+ outcome.get_result()
+ self._tw.line("")
+ summary_exit_codes = (
+ EXIT_OK, EXIT_TESTSFAILED, EXIT_INTERRUPTED, EXIT_USAGEERROR,
+ EXIT_NOTESTSCOLLECTED)
+ if exitstatus in summary_exit_codes:
+ self.config.hook.pytest_terminal_summary(terminalreporter=self,
+ exitstatus=exitstatus)
+ self.summary_errors()
+ self.summary_failures()
+ self.summary_warnings()
+ self.summary_passes()
+ if exitstatus == EXIT_INTERRUPTED:
+ self._report_keyboardinterrupt()
+ del self._keyboardinterrupt_memo
+ self.summary_deselected()
+ self.summary_stats()
+
+ def pytest_keyboard_interrupt(self, excinfo):
+ self._keyboardinterrupt_memo = excinfo.getrepr(funcargs=True)
+
+ def pytest_unconfigure(self):
+ if hasattr(self, '_keyboardinterrupt_memo'):
+ self._report_keyboardinterrupt()
+
+ def _report_keyboardinterrupt(self):
+ excrepr = self._keyboardinterrupt_memo
+ msg = excrepr.reprcrash.message
+ self.write_sep("!", msg)
+ if "KeyboardInterrupt" in msg:
+ if self.config.option.fulltrace:
+ excrepr.toterminal(self._tw)
+ else:
+ self._tw.line("to show a full traceback on KeyboardInterrupt use --fulltrace", yellow=True)
+ excrepr.reprcrash.toterminal(self._tw)
+
+ def _locationline(self, nodeid, fspath, lineno, domain):
+ def mkrel(nodeid):
+ line = self.config.cwd_relative_nodeid(nodeid)
+ if domain and line.endswith(domain):
+ line = line[:-len(domain)]
+ l = domain.split("[")
+ l[0] = l[0].replace('.', '::') # don't replace '.' in params
+ line += "[".join(l)
+ return line
+ # collect_fspath comes from testid which has a "/"-normalized path
+
+ if fspath:
+ res = mkrel(nodeid).replace("::()", "") # parens-normalization
+ if nodeid.split("::")[0] != fspath.replace("\\", "/"):
+ res += " <- " + self.startdir.bestrelpath(fspath)
+ else:
+ res = "[location]"
+ return res + " "
+
+ def _getfailureheadline(self, rep):
+ if hasattr(rep, 'location'):
+ fspath, lineno, domain = rep.location
+ return domain
+ else:
+ return "test session" # XXX?
+
+ def _getcrashline(self, rep):
+ try:
+ return str(rep.longrepr.reprcrash)
+ except AttributeError:
+ try:
+ return str(rep.longrepr)[:50]
+ except AttributeError:
+ return ""
+
+ #
+ # summaries for sessionfinish
+ #
+ def getreports(self, name):
+ l = []
+ for x in self.stats.get(name, []):
+ if not hasattr(x, '_pdbshown'):
+ l.append(x)
+ return l
+
+ def summary_warnings(self):
+ if self.hasopt("w"):
+ warnings = self.stats.get("warnings")
+ if not warnings:
+ return
+ self.write_sep("=", "pytest-warning summary")
+ for w in warnings:
+ self._tw.line("W%s %s %s" % (w.code,
+ w.fslocation, w.message))
+
+ def summary_passes(self):
+ if self.config.option.tbstyle != "no":
+ if self.hasopt("P"):
+ reports = self.getreports('passed')
+ if not reports:
+ return
+ self.write_sep("=", "PASSES")
+ for rep in reports:
+ msg = self._getfailureheadline(rep)
+ self.write_sep("_", msg)
+ self._outrep_summary(rep)
+
+ def print_teardown_sections(self, rep):
+ for secname, content in rep.sections:
+ if 'teardown' in secname:
+ self._tw.sep('-', secname)
+ if content[-1:] == "\n":
+ content = content[:-1]
+ self._tw.line(content)
+
+
+ def summary_failures(self):
+ if self.config.option.tbstyle != "no":
+ reports = self.getreports('failed')
+ if not reports:
+ return
+ self.write_sep("=", "FAILURES")
+ for rep in reports:
+ if self.config.option.tbstyle == "line":
+ line = self._getcrashline(rep)
+ self.write_line(line)
+ else:
+ msg = self._getfailureheadline(rep)
+ markup = {'red': True, 'bold': True}
+ self.write_sep("_", msg, **markup)
+ self._outrep_summary(rep)
+ for report in self.getreports(''):
+ if report.nodeid == rep.nodeid and report.when == 'teardown':
+ self.print_teardown_sections(report)
+
+ def summary_errors(self):
+ if self.config.option.tbstyle != "no":
+ reports = self.getreports('error')
+ if not reports:
+ return
+ self.write_sep("=", "ERRORS")
+ for rep in self.stats['error']:
+ msg = self._getfailureheadline(rep)
+ if not hasattr(rep, 'when'):
+ # collect
+ msg = "ERROR collecting " + msg
+ elif rep.when == "setup":
+ msg = "ERROR at setup of " + msg
+ elif rep.when == "teardown":
+ msg = "ERROR at teardown of " + msg
+ self.write_sep("_", msg)
+ self._outrep_summary(rep)
+
+ def _outrep_summary(self, rep):
+ rep.toterminal(self._tw)
+ for secname, content in rep.sections:
+ self._tw.sep("-", secname)
+ if content[-1:] == "\n":
+ content = content[:-1]
+ self._tw.line(content)
+
+ def summary_stats(self):
+ session_duration = time.time() - self._sessionstarttime
+ (line, color) = build_summary_stats_line(self.stats)
+ msg = "%s in %.2f seconds" % (line, session_duration)
+ markup = {color: True, 'bold': True}
+
+ if self.verbosity >= 0:
+ self.write_sep("=", msg, **markup)
+ if self.verbosity == -1:
+ self.write_line(msg, **markup)
+
+ def summary_deselected(self):
+ if 'deselected' in self.stats:
+ self.write_sep("=", "%d tests deselected" % (
+ len(self.stats['deselected'])), bold=True)
+
+def repr_pythonversion(v=None):
+ if v is None:
+ v = sys.version_info
+ try:
+ return "%s.%s.%s-%s-%s" % v
+ except (TypeError, ValueError):
+ return str(v)
+
+def flatten(l):
+ for x in l:
+ if isinstance(x, (list, tuple)):
+ for y in flatten(x):
+ yield y
+ else:
+ yield x
+
+def build_summary_stats_line(stats):
+ keys = ("failed passed skipped deselected "
+ "xfailed xpassed warnings error").split()
+ key_translation = {'warnings': 'pytest-warnings'}
+ unknown_key_seen = False
+ for key in stats.keys():
+ if key not in keys:
+ if key: # setup/teardown reports have an empty key, ignore them
+ keys.append(key)
+ unknown_key_seen = True
+ parts = []
+ for key in keys:
+ val = stats.get(key, None)
+ if val:
+ key_name = key_translation.get(key, key)
+ parts.append("%d %s" % (len(val), key_name))
+
+ if parts:
+ line = ", ".join(parts)
+ else:
+ line = "no tests ran"
+
+ if 'failed' in stats or 'error' in stats:
+ color = 'red'
+ elif 'warnings' in stats or unknown_key_seen:
+ color = 'yellow'
+ elif 'passed' in stats:
+ color = 'green'
+ else:
+ color = 'yellow'
+
+ return (line, color)
+
+
+def _plugin_nameversions(plugininfo):
+ l = []
+ for plugin, dist in plugininfo:
+ # gets us name and version!
+ name = '{dist.project_name}-{dist.version}'.format(dist=dist)
+ # questionable convenience, but it keeps things short
+ if name.startswith("pytest-"):
+ name = name[7:]
+ # we decided to print python package names
+ # they can have more than one plugin
+ if name not in l:
+ l.append(name)
+ return l
diff --git a/lib/spack/external/_pytest/tmpdir.py b/lib/spack/external/_pytest/tmpdir.py
new file mode 100644
index 0000000000..28a6b06366
--- /dev/null
+++ b/lib/spack/external/_pytest/tmpdir.py
@@ -0,0 +1,124 @@
+""" support for providing temporary directories to test functions. """
+import re
+
+import pytest
+import py
+from _pytest.monkeypatch import MonkeyPatch
+
+
+class TempdirFactory:
+ """Factory for temporary directories under the common base temp directory.
+
+ The base directory can be configured using the ``--basetemp`` option.
+ """
+
+ def __init__(self, config):
+ self.config = config
+ self.trace = config.trace.get("tmpdir")
+
+ def ensuretemp(self, string, dir=1):
+ """ (deprecated) return temporary directory path with
+ the given string as the trailing part. It is usually
+ better to use the 'tmpdir' function argument which
+ provides an empty unique-per-test-invocation directory
+ and is guaranteed to be empty.
+ """
+ #py.log._apiwarn(">1.1", "use tmpdir function argument")
+ return self.getbasetemp().ensure(string, dir=dir)
+
+ def mktemp(self, basename, numbered=True):
+ """Create a subdirectory of the base temporary directory and return it.
+ If ``numbered``, ensure the directory is unique by adding a number
+ prefix greater than any existing one.
+ """
+ basetemp = self.getbasetemp()
+ if not numbered:
+ p = basetemp.mkdir(basename)
+ else:
+ p = py.path.local.make_numbered_dir(prefix=basename,
+ keep=0, rootdir=basetemp, lock_timeout=None)
+ self.trace("mktemp", p)
+ return p
+
+ def getbasetemp(self):
+ """ return base temporary directory. """
+ try:
+ return self._basetemp
+ except AttributeError:
+ basetemp = self.config.option.basetemp
+ if basetemp:
+ basetemp = py.path.local(basetemp)
+ if basetemp.check():
+ basetemp.remove()
+ basetemp.mkdir()
+ else:
+ temproot = py.path.local.get_temproot()
+ user = get_user()
+ if user:
+ # use a sub-directory in the temproot to speed-up
+ # make_numbered_dir() call
+ rootdir = temproot.join('pytest-of-%s' % user)
+ else:
+ rootdir = temproot
+ rootdir.ensure(dir=1)
+ basetemp = py.path.local.make_numbered_dir(prefix='pytest-',
+ rootdir=rootdir)
+ self._basetemp = t = basetemp.realpath()
+ self.trace("new basetemp", t)
+ return t
+
+ def finish(self):
+ self.trace("finish")
+
+
+def get_user():
+ """Return the current user name, or None if getuser() does not work
+ in the current environment (see #1010).
+ """
+ import getpass
+ try:
+ return getpass.getuser()
+ except (ImportError, KeyError):
+ return None
+
+
+# backward compatibility
+TempdirHandler = TempdirFactory
+
+
+def pytest_configure(config):
+ """Create a TempdirFactory and attach it to the config object.
+
+ This is to comply with existing plugins which expect the handler to be
+ available at pytest_configure time, but ideally should be moved entirely
+ to the tmpdir_factory session fixture.
+ """
+ mp = MonkeyPatch()
+ t = TempdirFactory(config)
+ config._cleanup.extend([mp.undo, t.finish])
+ mp.setattr(config, '_tmpdirhandler', t, raising=False)
+ mp.setattr(pytest, 'ensuretemp', t.ensuretemp, raising=False)
+
+
+@pytest.fixture(scope='session')
+def tmpdir_factory(request):
+ """Return a TempdirFactory instance for the test session.
+ """
+ return request.config._tmpdirhandler
+
+
+@pytest.fixture
+def tmpdir(request, tmpdir_factory):
+ """Return a temporary directory path object
+ which is unique to each test function invocation,
+ created as a sub directory of the base temporary
+ directory. The returned object is a `py.path.local`_
+ path object.
+ """
+ name = request.node.name
+ name = re.sub("[\W]", "_", name)
+ MAXVAL = 30
+ if len(name) > MAXVAL:
+ name = name[:MAXVAL]
+ x = tmpdir_factory.mktemp(name, numbered=True)
+ return x
diff --git a/lib/spack/external/_pytest/unittest.py b/lib/spack/external/_pytest/unittest.py
new file mode 100644
index 0000000000..73224010b2
--- /dev/null
+++ b/lib/spack/external/_pytest/unittest.py
@@ -0,0 +1,217 @@
+""" discovery and running of std-library "unittest" style tests. """
+from __future__ import absolute_import
+
+import sys
+import traceback
+
+import pytest
+# for transfering markers
+import _pytest._code
+from _pytest.python import transfer_markers
+from _pytest.skipping import MarkEvaluator
+
+
+def pytest_pycollect_makeitem(collector, name, obj):
+ # has unittest been imported and is obj a subclass of its TestCase?
+ try:
+ if not issubclass(obj, sys.modules["unittest"].TestCase):
+ return
+ except Exception:
+ return
+ # yes, so let's collect it
+ return UnitTestCase(name, parent=collector)
+
+
+class UnitTestCase(pytest.Class):
+ # marker for fixturemanger.getfixtureinfo()
+ # to declare that our children do not support funcargs
+ nofuncargs = True
+
+ def setup(self):
+ cls = self.obj
+ if getattr(cls, '__unittest_skip__', False):
+ return # skipped
+ setup = getattr(cls, 'setUpClass', None)
+ if setup is not None:
+ setup()
+ teardown = getattr(cls, 'tearDownClass', None)
+ if teardown is not None:
+ self.addfinalizer(teardown)
+ super(UnitTestCase, self).setup()
+
+ def collect(self):
+ from unittest import TestLoader
+ cls = self.obj
+ if not getattr(cls, "__test__", True):
+ return
+ self.session._fixturemanager.parsefactories(self, unittest=True)
+ loader = TestLoader()
+ module = self.getparent(pytest.Module).obj
+ foundsomething = False
+ for name in loader.getTestCaseNames(self.obj):
+ x = getattr(self.obj, name)
+ if not getattr(x, '__test__', True):
+ continue
+ funcobj = getattr(x, 'im_func', x)
+ transfer_markers(funcobj, cls, module)
+ yield TestCaseFunction(name, parent=self)
+ foundsomething = True
+
+ if not foundsomething:
+ runtest = getattr(self.obj, 'runTest', None)
+ if runtest is not None:
+ ut = sys.modules.get("twisted.trial.unittest", None)
+ if ut is None or runtest != ut.TestCase.runTest:
+ yield TestCaseFunction('runTest', parent=self)
+
+
+
+class TestCaseFunction(pytest.Function):
+ _excinfo = None
+
+ def setup(self):
+ self._testcase = self.parent.obj(self.name)
+ self._fix_unittest_skip_decorator()
+ self._obj = getattr(self._testcase, self.name)
+ if hasattr(self._testcase, 'setup_method'):
+ self._testcase.setup_method(self._obj)
+ if hasattr(self, "_request"):
+ self._request._fillfixtures()
+
+ def _fix_unittest_skip_decorator(self):
+ """
+ The @unittest.skip decorator calls functools.wraps(self._testcase)
+ The call to functools.wraps() fails unless self._testcase
+ has a __name__ attribute. This is usually automatically supplied
+ if the test is a function or method, but we need to add manually
+ here.
+
+ See issue #1169
+ """
+ if sys.version_info[0] == 2:
+ setattr(self._testcase, "__name__", self.name)
+
+ def teardown(self):
+ if hasattr(self._testcase, 'teardown_method'):
+ self._testcase.teardown_method(self._obj)
+ # Allow garbage collection on TestCase instance attributes.
+ self._testcase = None
+ self._obj = None
+
+ def startTest(self, testcase):
+ pass
+
+ def _addexcinfo(self, rawexcinfo):
+ # unwrap potential exception info (see twisted trial support below)
+ rawexcinfo = getattr(rawexcinfo, '_rawexcinfo', rawexcinfo)
+ try:
+ excinfo = _pytest._code.ExceptionInfo(rawexcinfo)
+ except TypeError:
+ try:
+ try:
+ l = traceback.format_exception(*rawexcinfo)
+ l.insert(0, "NOTE: Incompatible Exception Representation, "
+ "displaying natively:\n\n")
+ pytest.fail("".join(l), pytrace=False)
+ except (pytest.fail.Exception, KeyboardInterrupt):
+ raise
+ except:
+ pytest.fail("ERROR: Unknown Incompatible Exception "
+ "representation:\n%r" %(rawexcinfo,), pytrace=False)
+ except KeyboardInterrupt:
+ raise
+ except pytest.fail.Exception:
+ excinfo = _pytest._code.ExceptionInfo()
+ self.__dict__.setdefault('_excinfo', []).append(excinfo)
+
+ def addError(self, testcase, rawexcinfo):
+ self._addexcinfo(rawexcinfo)
+ def addFailure(self, testcase, rawexcinfo):
+ self._addexcinfo(rawexcinfo)
+
+ def addSkip(self, testcase, reason):
+ try:
+ pytest.skip(reason)
+ except pytest.skip.Exception:
+ self._evalskip = MarkEvaluator(self, 'SkipTest')
+ self._evalskip.result = True
+ self._addexcinfo(sys.exc_info())
+
+ def addExpectedFailure(self, testcase, rawexcinfo, reason=""):
+ try:
+ pytest.xfail(str(reason))
+ except pytest.xfail.Exception:
+ self._addexcinfo(sys.exc_info())
+
+ def addUnexpectedSuccess(self, testcase, reason=""):
+ self._unexpectedsuccess = reason
+
+ def addSuccess(self, testcase):
+ pass
+
+ def stopTest(self, testcase):
+ pass
+
+ def runtest(self):
+ if self.config.pluginmanager.get_plugin("pdbinvoke") is None:
+ self._testcase(result=self)
+ else:
+ # disables tearDown and cleanups for post mortem debugging (see #1890)
+ self._testcase.debug()
+
+
+ def _prunetraceback(self, excinfo):
+ pytest.Function._prunetraceback(self, excinfo)
+ traceback = excinfo.traceback.filter(
+ lambda x:not x.frame.f_globals.get('__unittest'))
+ if traceback:
+ excinfo.traceback = traceback
+
+@pytest.hookimpl(tryfirst=True)
+def pytest_runtest_makereport(item, call):
+ if isinstance(item, TestCaseFunction):
+ if item._excinfo:
+ call.excinfo = item._excinfo.pop(0)
+ try:
+ del call.result
+ except AttributeError:
+ pass
+
+# twisted trial support
+
+@pytest.hookimpl(hookwrapper=True)
+def pytest_runtest_protocol(item):
+ if isinstance(item, TestCaseFunction) and \
+ 'twisted.trial.unittest' in sys.modules:
+ ut = sys.modules['twisted.python.failure']
+ Failure__init__ = ut.Failure.__init__
+ check_testcase_implements_trial_reporter()
+
+ def excstore(self, exc_value=None, exc_type=None, exc_tb=None,
+ captureVars=None):
+ if exc_value is None:
+ self._rawexcinfo = sys.exc_info()
+ else:
+ if exc_type is None:
+ exc_type = type(exc_value)
+ self._rawexcinfo = (exc_type, exc_value, exc_tb)
+ try:
+ Failure__init__(self, exc_value, exc_type, exc_tb,
+ captureVars=captureVars)
+ except TypeError:
+ Failure__init__(self, exc_value, exc_type, exc_tb)
+
+ ut.Failure.__init__ = excstore
+ yield
+ ut.Failure.__init__ = Failure__init__
+ else:
+ yield
+
+
+def check_testcase_implements_trial_reporter(done=[]):
+ if done:
+ return
+ from zope.interface import classImplements
+ from twisted.trial.itrial import IReporter
+ classImplements(TestCaseFunction, IReporter)
+ done.append(1)
diff --git a/lib/spack/external/_pytest/vendored_packages/README.md b/lib/spack/external/_pytest/vendored_packages/README.md
new file mode 100644
index 0000000000..b5fe6febb0
--- /dev/null
+++ b/lib/spack/external/_pytest/vendored_packages/README.md
@@ -0,0 +1,13 @@
+This directory vendors the `pluggy` module.
+
+For a more detailed discussion for the reasons to vendoring this
+package, please see [this issue](https://github.com/pytest-dev/pytest/issues/944).
+
+To update the current version, execute:
+
+```
+$ pip install -U pluggy==<version> --no-compile --target=_pytest/vendored_packages
+```
+
+And commit the modified files. The `pluggy-<version>.dist-info` directory
+created by `pip` should be added as well.
diff --git a/lib/spack/external/_pytest/vendored_packages/__init__.py b/lib/spack/external/_pytest/vendored_packages/__init__.py
new file mode 100644
index 0000000000..e69de29bb2
--- /dev/null
+++ b/lib/spack/external/_pytest/vendored_packages/__init__.py
diff --git a/lib/spack/external/_pytest/vendored_packages/pluggy-0.4.0.dist-info/DESCRIPTION.rst b/lib/spack/external/_pytest/vendored_packages/pluggy-0.4.0.dist-info/DESCRIPTION.rst
new file mode 100644
index 0000000000..da0e7a6ed7
--- /dev/null
+++ b/lib/spack/external/_pytest/vendored_packages/pluggy-0.4.0.dist-info/DESCRIPTION.rst
@@ -0,0 +1,11 @@
+
+Plugin registration and hook calling for Python
+===============================================
+
+This is the plugin manager as used by pytest but stripped
+of pytest specific details.
+
+During the 0.x series this plugin does not have much documentation
+except extensive docstrings in the pluggy.py module.
+
+
diff --git a/lib/spack/external/_pytest/vendored_packages/pluggy-0.4.0.dist-info/INSTALLER b/lib/spack/external/_pytest/vendored_packages/pluggy-0.4.0.dist-info/INSTALLER
new file mode 100644
index 0000000000..a1b589e38a
--- /dev/null
+++ b/lib/spack/external/_pytest/vendored_packages/pluggy-0.4.0.dist-info/INSTALLER
@@ -0,0 +1 @@
+pip
diff --git a/lib/spack/external/_pytest/vendored_packages/pluggy-0.4.0.dist-info/LICENSE.txt b/lib/spack/external/_pytest/vendored_packages/pluggy-0.4.0.dist-info/LICENSE.txt
new file mode 100644
index 0000000000..121017d086
--- /dev/null
+++ b/lib/spack/external/_pytest/vendored_packages/pluggy-0.4.0.dist-info/LICENSE.txt
@@ -0,0 +1,22 @@
+The MIT License (MIT)
+
+Copyright (c) 2015 holger krekel (rather uses bitbucket/hpk42)
+
+Permission is hereby granted, free of charge, to any person obtaining a copy
+of this software and associated documentation files (the "Software"), to deal
+in the Software without restriction, including without limitation the rights
+to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+copies of the Software, and to permit persons to whom the Software is
+furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in all
+copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+SOFTWARE.
+
diff --git a/lib/spack/external/_pytest/vendored_packages/pluggy-0.4.0.dist-info/METADATA b/lib/spack/external/_pytest/vendored_packages/pluggy-0.4.0.dist-info/METADATA
new file mode 100644
index 0000000000..bd88517c94
--- /dev/null
+++ b/lib/spack/external/_pytest/vendored_packages/pluggy-0.4.0.dist-info/METADATA
@@ -0,0 +1,40 @@
+Metadata-Version: 2.0
+Name: pluggy
+Version: 0.4.0
+Summary: plugin and hook calling mechanisms for python
+Home-page: https://github.com/pytest-dev/pluggy
+Author: Holger Krekel
+Author-email: holger at merlinux.eu
+License: MIT license
+Platform: unix
+Platform: linux
+Platform: osx
+Platform: win32
+Classifier: Development Status :: 4 - Beta
+Classifier: Intended Audience :: Developers
+Classifier: License :: OSI Approved :: MIT License
+Classifier: Operating System :: POSIX
+Classifier: Operating System :: Microsoft :: Windows
+Classifier: Operating System :: MacOS :: MacOS X
+Classifier: Topic :: Software Development :: Testing
+Classifier: Topic :: Software Development :: Libraries
+Classifier: Topic :: Utilities
+Classifier: Programming Language :: Python :: 2
+Classifier: Programming Language :: Python :: 2.6
+Classifier: Programming Language :: Python :: 2.7
+Classifier: Programming Language :: Python :: 3
+Classifier: Programming Language :: Python :: 3.3
+Classifier: Programming Language :: Python :: 3.4
+Classifier: Programming Language :: Python :: 3.5
+
+
+Plugin registration and hook calling for Python
+===============================================
+
+This is the plugin manager as used by pytest but stripped
+of pytest specific details.
+
+During the 0.x series this plugin does not have much documentation
+except extensive docstrings in the pluggy.py module.
+
+
diff --git a/lib/spack/external/_pytest/vendored_packages/pluggy-0.4.0.dist-info/RECORD b/lib/spack/external/_pytest/vendored_packages/pluggy-0.4.0.dist-info/RECORD
new file mode 100644
index 0000000000..3003a3bf2b
--- /dev/null
+++ b/lib/spack/external/_pytest/vendored_packages/pluggy-0.4.0.dist-info/RECORD
@@ -0,0 +1,9 @@
+pluggy.py,sha256=u0oG9cv-oLOkNvEBlwnnu8pp1AyxpoERgUO00S3rvpQ,31543
+pluggy-0.4.0.dist-info/DESCRIPTION.rst,sha256=ltvjkFd40LW_xShthp6RRVM6OB_uACYDFR3kTpKw7o4,307
+pluggy-0.4.0.dist-info/LICENSE.txt,sha256=ruwhUOyV1HgE9F35JVL9BCZ9vMSALx369I4xq9rhpkM,1134
+pluggy-0.4.0.dist-info/METADATA,sha256=pe2hbsqKFaLHC6wAQPpFPn0KlpcPfLBe_BnS4O70bfk,1364
+pluggy-0.4.0.dist-info/RECORD,,
+pluggy-0.4.0.dist-info/WHEEL,sha256=9Z5Xm-eel1bTS7e6ogYiKz0zmPEqDwIypurdHN1hR40,116
+pluggy-0.4.0.dist-info/metadata.json,sha256=T3go5L2qOa_-H-HpCZi3EoVKb8sZ3R-fOssbkWo2nvM,1119
+pluggy-0.4.0.dist-info/top_level.txt,sha256=xKSCRhai-v9MckvMuWqNz16c1tbsmOggoMSwTgcpYHE,7
+pluggy-0.4.0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
diff --git a/lib/spack/external/_pytest/vendored_packages/pluggy-0.4.0.dist-info/WHEEL b/lib/spack/external/_pytest/vendored_packages/pluggy-0.4.0.dist-info/WHEEL
new file mode 100644
index 0000000000..8b6dd1b5a8
--- /dev/null
+++ b/lib/spack/external/_pytest/vendored_packages/pluggy-0.4.0.dist-info/WHEEL
@@ -0,0 +1,6 @@
+Wheel-Version: 1.0
+Generator: bdist_wheel (0.29.0)
+Root-Is-Purelib: true
+Tag: py2-none-any
+Tag: py3-none-any
+
diff --git a/lib/spack/external/_pytest/vendored_packages/pluggy-0.4.0.dist-info/metadata.json b/lib/spack/external/_pytest/vendored_packages/pluggy-0.4.0.dist-info/metadata.json
new file mode 100644
index 0000000000..cde22aff02
--- /dev/null
+++ b/lib/spack/external/_pytest/vendored_packages/pluggy-0.4.0.dist-info/metadata.json
@@ -0,0 +1 @@
+{"classifiers": ["Development Status :: 4 - Beta", "Intended Audience :: Developers", "License :: OSI Approved :: MIT License", "Operating System :: POSIX", "Operating System :: Microsoft :: Windows", "Operating System :: MacOS :: MacOS X", "Topic :: Software Development :: Testing", "Topic :: Software Development :: Libraries", "Topic :: Utilities", "Programming Language :: Python :: 2", "Programming Language :: Python :: 2.6", "Programming Language :: Python :: 2.7", "Programming Language :: Python :: 3", "Programming Language :: Python :: 3.3", "Programming Language :: Python :: 3.4", "Programming Language :: Python :: 3.5"], "extensions": {"python.details": {"contacts": [{"email": "holger at merlinux.eu", "name": "Holger Krekel", "role": "author"}], "document_names": {"description": "DESCRIPTION.rst", "license": "LICENSE.txt"}, "project_urls": {"Home": "https://github.com/pytest-dev/pluggy"}}}, "generator": "bdist_wheel (0.29.0)", "license": "MIT license", "metadata_version": "2.0", "name": "pluggy", "platform": "unix", "summary": "plugin and hook calling mechanisms for python", "version": "0.4.0"} \ No newline at end of file
diff --git a/lib/spack/external/_pytest/vendored_packages/pluggy-0.4.0.dist-info/top_level.txt b/lib/spack/external/_pytest/vendored_packages/pluggy-0.4.0.dist-info/top_level.txt
new file mode 100644
index 0000000000..11bdb5c1f5
--- /dev/null
+++ b/lib/spack/external/_pytest/vendored_packages/pluggy-0.4.0.dist-info/top_level.txt
@@ -0,0 +1 @@
+pluggy
diff --git a/lib/spack/external/_pytest/vendored_packages/pluggy.py b/lib/spack/external/_pytest/vendored_packages/pluggy.py
new file mode 100644
index 0000000000..9c13932b36
--- /dev/null
+++ b/lib/spack/external/_pytest/vendored_packages/pluggy.py
@@ -0,0 +1,802 @@
+"""
+PluginManager, basic initialization and tracing.
+
+pluggy is the cristallized core of plugin management as used
+by some 150 plugins for pytest.
+
+Pluggy uses semantic versioning. Breaking changes are only foreseen for
+Major releases (incremented X in "X.Y.Z"). If you want to use pluggy in
+your project you should thus use a dependency restriction like
+"pluggy>=0.1.0,<1.0" to avoid surprises.
+
+pluggy is concerned with hook specification, hook implementations and hook
+calling. For any given hook specification a hook call invokes up to N implementations.
+A hook implementation can influence its position and type of execution:
+if attributed "tryfirst" or "trylast" it will be tried to execute
+first or last. However, if attributed "hookwrapper" an implementation
+can wrap all calls to non-hookwrapper implementations. A hookwrapper
+can thus execute some code ahead and after the execution of other hooks.
+
+Hook specification is done by way of a regular python function where
+both the function name and the names of all its arguments are significant.
+Each hook implementation function is verified against the original specification
+function, including the names of all its arguments. To allow for hook specifications
+to evolve over the livetime of a project, hook implementations can
+accept less arguments. One can thus add new arguments and semantics to
+a hook specification by adding another argument typically without breaking
+existing hook implementations.
+
+The chosen approach is meant to let a hook designer think carefuly about
+which objects are needed by an extension writer. By contrast, subclass-based
+extension mechanisms often expose a lot more state and behaviour than needed,
+thus restricting future developments.
+
+Pluggy currently consists of functionality for:
+
+- a way to register new hook specifications. Without a hook
+ specification no hook calling can be performed.
+
+- a registry of plugins which contain hook implementation functions. It
+ is possible to register plugins for which a hook specification is not yet
+ known and validate all hooks when the system is in a more referentially
+ consistent state. Setting an "optionalhook" attribution to a hook
+ implementation will avoid PluginValidationError's if a specification
+ is missing. This allows to have optional integration between plugins.
+
+- a "hook" relay object from which you can launch 1:N calls to
+ registered hook implementation functions
+
+- a mechanism for ordering hook implementation functions
+
+- mechanisms for two different type of 1:N calls: "firstresult" for when
+ the call should stop when the first implementation returns a non-None result.
+ And the other (default) way of guaranteeing that all hook implementations
+ will be called and their non-None result collected.
+
+- mechanisms for "historic" extension points such that all newly
+ registered functions will receive all hook calls that happened
+ before their registration.
+
+- a mechanism for discovering plugin objects which are based on
+ setuptools based entry points.
+
+- a simple tracing mechanism, including tracing of plugin calls and
+ their arguments.
+
+"""
+import sys
+import inspect
+
+__version__ = '0.4.0'
+
+__all__ = ["PluginManager", "PluginValidationError", "HookCallError",
+ "HookspecMarker", "HookimplMarker"]
+
+_py3 = sys.version_info > (3, 0)
+
+
+class HookspecMarker:
+ """ Decorator helper class for marking functions as hook specifications.
+
+ You can instantiate it with a project_name to get a decorator.
+ Calling PluginManager.add_hookspecs later will discover all marked functions
+ if the PluginManager uses the same project_name.
+ """
+
+ def __init__(self, project_name):
+ self.project_name = project_name
+
+ def __call__(self, function=None, firstresult=False, historic=False):
+ """ if passed a function, directly sets attributes on the function
+ which will make it discoverable to add_hookspecs(). If passed no
+ function, returns a decorator which can be applied to a function
+ later using the attributes supplied.
+
+ If firstresult is True the 1:N hook call (N being the number of registered
+ hook implementation functions) will stop at I<=N when the I'th function
+ returns a non-None result.
+
+ If historic is True calls to a hook will be memorized and replayed
+ on later registered plugins.
+
+ """
+ def setattr_hookspec_opts(func):
+ if historic and firstresult:
+ raise ValueError("cannot have a historic firstresult hook")
+ setattr(func, self.project_name + "_spec",
+ dict(firstresult=firstresult, historic=historic))
+ return func
+
+ if function is not None:
+ return setattr_hookspec_opts(function)
+ else:
+ return setattr_hookspec_opts
+
+
+class HookimplMarker:
+ """ Decorator helper class for marking functions as hook implementations.
+
+ You can instantiate with a project_name to get a decorator.
+ Calling PluginManager.register later will discover all marked functions
+ if the PluginManager uses the same project_name.
+ """
+ def __init__(self, project_name):
+ self.project_name = project_name
+
+ def __call__(self, function=None, hookwrapper=False, optionalhook=False,
+ tryfirst=False, trylast=False):
+
+ """ if passed a function, directly sets attributes on the function
+ which will make it discoverable to register(). If passed no function,
+ returns a decorator which can be applied to a function later using
+ the attributes supplied.
+
+ If optionalhook is True a missing matching hook specification will not result
+ in an error (by default it is an error if no matching spec is found).
+
+ If tryfirst is True this hook implementation will run as early as possible
+ in the chain of N hook implementations for a specfication.
+
+ If trylast is True this hook implementation will run as late as possible
+ in the chain of N hook implementations.
+
+ If hookwrapper is True the hook implementations needs to execute exactly
+ one "yield". The code before the yield is run early before any non-hookwrapper
+ function is run. The code after the yield is run after all non-hookwrapper
+ function have run. The yield receives an ``_CallOutcome`` object representing
+ the exception or result outcome of the inner calls (including other hookwrapper
+ calls).
+
+ """
+ def setattr_hookimpl_opts(func):
+ setattr(func, self.project_name + "_impl",
+ dict(hookwrapper=hookwrapper, optionalhook=optionalhook,
+ tryfirst=tryfirst, trylast=trylast))
+ return func
+
+ if function is None:
+ return setattr_hookimpl_opts
+ else:
+ return setattr_hookimpl_opts(function)
+
+
+def normalize_hookimpl_opts(opts):
+ opts.setdefault("tryfirst", False)
+ opts.setdefault("trylast", False)
+ opts.setdefault("hookwrapper", False)
+ opts.setdefault("optionalhook", False)
+
+
+class _TagTracer:
+ def __init__(self):
+ self._tag2proc = {}
+ self.writer = None
+ self.indent = 0
+
+ def get(self, name):
+ return _TagTracerSub(self, (name,))
+
+ def format_message(self, tags, args):
+ if isinstance(args[-1], dict):
+ extra = args[-1]
+ args = args[:-1]
+ else:
+ extra = {}
+
+ content = " ".join(map(str, args))
+ indent = " " * self.indent
+
+ lines = [
+ "%s%s [%s]\n" % (indent, content, ":".join(tags))
+ ]
+
+ for name, value in extra.items():
+ lines.append("%s %s: %s\n" % (indent, name, value))
+ return lines
+
+ def processmessage(self, tags, args):
+ if self.writer is not None and args:
+ lines = self.format_message(tags, args)
+ self.writer(''.join(lines))
+ try:
+ self._tag2proc[tags](tags, args)
+ except KeyError:
+ pass
+
+ def setwriter(self, writer):
+ self.writer = writer
+
+ def setprocessor(self, tags, processor):
+ if isinstance(tags, str):
+ tags = tuple(tags.split(":"))
+ else:
+ assert isinstance(tags, tuple)
+ self._tag2proc[tags] = processor
+
+
+class _TagTracerSub:
+ def __init__(self, root, tags):
+ self.root = root
+ self.tags = tags
+
+ def __call__(self, *args):
+ self.root.processmessage(self.tags, args)
+
+ def setmyprocessor(self, processor):
+ self.root.setprocessor(self.tags, processor)
+
+ def get(self, name):
+ return self.__class__(self.root, self.tags + (name,))
+
+
+def _raise_wrapfail(wrap_controller, msg):
+ co = wrap_controller.gi_code
+ raise RuntimeError("wrap_controller at %r %s:%d %s" %
+ (co.co_name, co.co_filename, co.co_firstlineno, msg))
+
+
+def _wrapped_call(wrap_controller, func):
+ """ Wrap calling to a function with a generator which needs to yield
+ exactly once. The yield point will trigger calling the wrapped function
+ and return its _CallOutcome to the yield point. The generator then needs
+ to finish (raise StopIteration) in order for the wrapped call to complete.
+ """
+ try:
+ next(wrap_controller) # first yield
+ except StopIteration:
+ _raise_wrapfail(wrap_controller, "did not yield")
+ call_outcome = _CallOutcome(func)
+ try:
+ wrap_controller.send(call_outcome)
+ _raise_wrapfail(wrap_controller, "has second yield")
+ except StopIteration:
+ pass
+ return call_outcome.get_result()
+
+
+class _CallOutcome:
+ """ Outcome of a function call, either an exception or a proper result.
+ Calling the ``get_result`` method will return the result or reraise
+ the exception raised when the function was called. """
+ excinfo = None
+
+ def __init__(self, func):
+ try:
+ self.result = func()
+ except BaseException:
+ self.excinfo = sys.exc_info()
+
+ def force_result(self, result):
+ self.result = result
+ self.excinfo = None
+
+ def get_result(self):
+ if self.excinfo is None:
+ return self.result
+ else:
+ ex = self.excinfo
+ if _py3:
+ raise ex[1].with_traceback(ex[2])
+ _reraise(*ex) # noqa
+
+if not _py3:
+ exec("""
+def _reraise(cls, val, tb):
+ raise cls, val, tb
+""")
+
+
+class _TracedHookExecution:
+ def __init__(self, pluginmanager, before, after):
+ self.pluginmanager = pluginmanager
+ self.before = before
+ self.after = after
+ self.oldcall = pluginmanager._inner_hookexec
+ assert not isinstance(self.oldcall, _TracedHookExecution)
+ self.pluginmanager._inner_hookexec = self
+
+ def __call__(self, hook, hook_impls, kwargs):
+ self.before(hook.name, hook_impls, kwargs)
+ outcome = _CallOutcome(lambda: self.oldcall(hook, hook_impls, kwargs))
+ self.after(outcome, hook.name, hook_impls, kwargs)
+ return outcome.get_result()
+
+ def undo(self):
+ self.pluginmanager._inner_hookexec = self.oldcall
+
+
+class PluginManager(object):
+ """ Core Pluginmanager class which manages registration
+ of plugin objects and 1:N hook calling.
+
+ You can register new hooks by calling ``add_hookspec(module_or_class)``.
+ You can register plugin objects (which contain hooks) by calling
+ ``register(plugin)``. The Pluginmanager is initialized with a
+ prefix that is searched for in the names of the dict of registered
+ plugin objects. An optional excludefunc allows to blacklist names which
+ are not considered as hooks despite a matching prefix.
+
+ For debugging purposes you can call ``enable_tracing()``
+ which will subsequently send debug information to the trace helper.
+ """
+
+ def __init__(self, project_name, implprefix=None):
+ """ if implprefix is given implementation functions
+ will be recognized if their name matches the implprefix. """
+ self.project_name = project_name
+ self._name2plugin = {}
+ self._plugin2hookcallers = {}
+ self._plugin_distinfo = []
+ self.trace = _TagTracer().get("pluginmanage")
+ self.hook = _HookRelay(self.trace.root.get("hook"))
+ self._implprefix = implprefix
+ self._inner_hookexec = lambda hook, methods, kwargs: \
+ _MultiCall(methods, kwargs, hook.spec_opts).execute()
+
+ def _hookexec(self, hook, methods, kwargs):
+ # called from all hookcaller instances.
+ # enable_tracing will set its own wrapping function at self._inner_hookexec
+ return self._inner_hookexec(hook, methods, kwargs)
+
+ def register(self, plugin, name=None):
+ """ Register a plugin and return its canonical name or None if the name
+ is blocked from registering. Raise a ValueError if the plugin is already
+ registered. """
+ plugin_name = name or self.get_canonical_name(plugin)
+
+ if plugin_name in self._name2plugin or plugin in self._plugin2hookcallers:
+ if self._name2plugin.get(plugin_name, -1) is None:
+ return # blocked plugin, return None to indicate no registration
+ raise ValueError("Plugin already registered: %s=%s\n%s" %
+ (plugin_name, plugin, self._name2plugin))
+
+ # XXX if an error happens we should make sure no state has been
+ # changed at point of return
+ self._name2plugin[plugin_name] = plugin
+
+ # register matching hook implementations of the plugin
+ self._plugin2hookcallers[plugin] = hookcallers = []
+ for name in dir(plugin):
+ hookimpl_opts = self.parse_hookimpl_opts(plugin, name)
+ if hookimpl_opts is not None:
+ normalize_hookimpl_opts(hookimpl_opts)
+ method = getattr(plugin, name)
+ hookimpl = HookImpl(plugin, plugin_name, method, hookimpl_opts)
+ hook = getattr(self.hook, name, None)
+ if hook is None:
+ hook = _HookCaller(name, self._hookexec)
+ setattr(self.hook, name, hook)
+ elif hook.has_spec():
+ self._verify_hook(hook, hookimpl)
+ hook._maybe_apply_history(hookimpl)
+ hook._add_hookimpl(hookimpl)
+ hookcallers.append(hook)
+ return plugin_name
+
+ def parse_hookimpl_opts(self, plugin, name):
+ method = getattr(plugin, name)
+ try:
+ res = getattr(method, self.project_name + "_impl", None)
+ except Exception:
+ res = {}
+ if res is not None and not isinstance(res, dict):
+ # false positive
+ res = None
+ elif res is None and self._implprefix and name.startswith(self._implprefix):
+ res = {}
+ return res
+
+ def unregister(self, plugin=None, name=None):
+ """ unregister a plugin object and all its contained hook implementations
+ from internal data structures. """
+ if name is None:
+ assert plugin is not None, "one of name or plugin needs to be specified"
+ name = self.get_name(plugin)
+
+ if plugin is None:
+ plugin = self.get_plugin(name)
+
+ # if self._name2plugin[name] == None registration was blocked: ignore
+ if self._name2plugin.get(name):
+ del self._name2plugin[name]
+
+ for hookcaller in self._plugin2hookcallers.pop(plugin, []):
+ hookcaller._remove_plugin(plugin)
+
+ return plugin
+
+ def set_blocked(self, name):
+ """ block registrations of the given name, unregister if already registered. """
+ self.unregister(name=name)
+ self._name2plugin[name] = None
+
+ def is_blocked(self, name):
+ """ return True if the name blogs registering plugins of that name. """
+ return name in self._name2plugin and self._name2plugin[name] is None
+
+ def add_hookspecs(self, module_or_class):
+ """ add new hook specifications defined in the given module_or_class.
+ Functions are recognized if they have been decorated accordingly. """
+ names = []
+ for name in dir(module_or_class):
+ spec_opts = self.parse_hookspec_opts(module_or_class, name)
+ if spec_opts is not None:
+ hc = getattr(self.hook, name, None)
+ if hc is None:
+ hc = _HookCaller(name, self._hookexec, module_or_class, spec_opts)
+ setattr(self.hook, name, hc)
+ else:
+ # plugins registered this hook without knowing the spec
+ hc.set_specification(module_or_class, spec_opts)
+ for hookfunction in (hc._wrappers + hc._nonwrappers):
+ self._verify_hook(hc, hookfunction)
+ names.append(name)
+
+ if not names:
+ raise ValueError("did not find any %r hooks in %r" %
+ (self.project_name, module_or_class))
+
+ def parse_hookspec_opts(self, module_or_class, name):
+ method = getattr(module_or_class, name)
+ return getattr(method, self.project_name + "_spec", None)
+
+ def get_plugins(self):
+ """ return the set of registered plugins. """
+ return set(self._plugin2hookcallers)
+
+ def is_registered(self, plugin):
+ """ Return True if the plugin is already registered. """
+ return plugin in self._plugin2hookcallers
+
+ def get_canonical_name(self, plugin):
+ """ Return canonical name for a plugin object. Note that a plugin
+ may be registered under a different name which was specified
+ by the caller of register(plugin, name). To obtain the name
+ of an registered plugin use ``get_name(plugin)`` instead."""
+ return getattr(plugin, "__name__", None) or str(id(plugin))
+
+ def get_plugin(self, name):
+ """ Return a plugin or None for the given name. """
+ return self._name2plugin.get(name)
+
+ def has_plugin(self, name):
+ """ Return True if a plugin with the given name is registered. """
+ return self.get_plugin(name) is not None
+
+ def get_name(self, plugin):
+ """ Return name for registered plugin or None if not registered. """
+ for name, val in self._name2plugin.items():
+ if plugin == val:
+ return name
+
+ def _verify_hook(self, hook, hookimpl):
+ if hook.is_historic() and hookimpl.hookwrapper:
+ raise PluginValidationError(
+ "Plugin %r\nhook %r\nhistoric incompatible to hookwrapper" %
+ (hookimpl.plugin_name, hook.name))
+
+ for arg in hookimpl.argnames:
+ if arg not in hook.argnames:
+ raise PluginValidationError(
+ "Plugin %r\nhook %r\nargument %r not available\n"
+ "plugin definition: %s\n"
+ "available hookargs: %s" %
+ (hookimpl.plugin_name, hook.name, arg,
+ _formatdef(hookimpl.function), ", ".join(hook.argnames)))
+
+ def check_pending(self):
+ """ Verify that all hooks which have not been verified against
+ a hook specification are optional, otherwise raise PluginValidationError"""
+ for name in self.hook.__dict__:
+ if name[0] != "_":
+ hook = getattr(self.hook, name)
+ if not hook.has_spec():
+ for hookimpl in (hook._wrappers + hook._nonwrappers):
+ if not hookimpl.optionalhook:
+ raise PluginValidationError(
+ "unknown hook %r in plugin %r" %
+ (name, hookimpl.plugin))
+
+ def load_setuptools_entrypoints(self, entrypoint_name):
+ """ Load modules from querying the specified setuptools entrypoint name.
+ Return the number of loaded plugins. """
+ from pkg_resources import (iter_entry_points, DistributionNotFound,
+ VersionConflict)
+ for ep in iter_entry_points(entrypoint_name):
+ # is the plugin registered or blocked?
+ if self.get_plugin(ep.name) or self.is_blocked(ep.name):
+ continue
+ try:
+ plugin = ep.load()
+ except DistributionNotFound:
+ continue
+ except VersionConflict as e:
+ raise PluginValidationError(
+ "Plugin %r could not be loaded: %s!" % (ep.name, e))
+ self.register(plugin, name=ep.name)
+ self._plugin_distinfo.append((plugin, ep.dist))
+ return len(self._plugin_distinfo)
+
+ def list_plugin_distinfo(self):
+ """ return list of distinfo/plugin tuples for all setuptools registered
+ plugins. """
+ return list(self._plugin_distinfo)
+
+ def list_name_plugin(self):
+ """ return list of name/plugin pairs. """
+ return list(self._name2plugin.items())
+
+ def get_hookcallers(self, plugin):
+ """ get all hook callers for the specified plugin. """
+ return self._plugin2hookcallers.get(plugin)
+
+ def add_hookcall_monitoring(self, before, after):
+ """ add before/after tracing functions for all hooks
+ and return an undo function which, when called,
+ will remove the added tracers.
+
+ ``before(hook_name, hook_impls, kwargs)`` will be called ahead
+ of all hook calls and receive a hookcaller instance, a list
+ of HookImpl instances and the keyword arguments for the hook call.
+
+ ``after(outcome, hook_name, hook_impls, kwargs)`` receives the
+ same arguments as ``before`` but also a :py:class:`_CallOutcome`` object
+ which represents the result of the overall hook call.
+ """
+ return _TracedHookExecution(self, before, after).undo
+
+ def enable_tracing(self):
+ """ enable tracing of hook calls and return an undo function. """
+ hooktrace = self.hook._trace
+
+ def before(hook_name, methods, kwargs):
+ hooktrace.root.indent += 1
+ hooktrace(hook_name, kwargs)
+
+ def after(outcome, hook_name, methods, kwargs):
+ if outcome.excinfo is None:
+ hooktrace("finish", hook_name, "-->", outcome.result)
+ hooktrace.root.indent -= 1
+
+ return self.add_hookcall_monitoring(before, after)
+
+ def subset_hook_caller(self, name, remove_plugins):
+ """ Return a new _HookCaller instance for the named method
+ which manages calls to all registered plugins except the
+ ones from remove_plugins. """
+ orig = getattr(self.hook, name)
+ plugins_to_remove = [plug for plug in remove_plugins if hasattr(plug, name)]
+ if plugins_to_remove:
+ hc = _HookCaller(orig.name, orig._hookexec, orig._specmodule_or_class,
+ orig.spec_opts)
+ for hookimpl in (orig._wrappers + orig._nonwrappers):
+ plugin = hookimpl.plugin
+ if plugin not in plugins_to_remove:
+ hc._add_hookimpl(hookimpl)
+ # we also keep track of this hook caller so it
+ # gets properly removed on plugin unregistration
+ self._plugin2hookcallers.setdefault(plugin, []).append(hc)
+ return hc
+ return orig
+
+
+class _MultiCall:
+ """ execute a call into multiple python functions/methods. """
+
+ # XXX note that the __multicall__ argument is supported only
+ # for pytest compatibility reasons. It was never officially
+ # supported there and is explicitely deprecated since 2.8
+ # so we can remove it soon, allowing to avoid the below recursion
+ # in execute() and simplify/speed up the execute loop.
+
+ def __init__(self, hook_impls, kwargs, specopts={}):
+ self.hook_impls = hook_impls
+ self.kwargs = kwargs
+ self.kwargs["__multicall__"] = self
+ self.specopts = specopts
+
+ def execute(self):
+ all_kwargs = self.kwargs
+ self.results = results = []
+ firstresult = self.specopts.get("firstresult")
+
+ while self.hook_impls:
+ hook_impl = self.hook_impls.pop()
+ try:
+ args = [all_kwargs[argname] for argname in hook_impl.argnames]
+ except KeyError:
+ for argname in hook_impl.argnames:
+ if argname not in all_kwargs:
+ raise HookCallError(
+ "hook call must provide argument %r" % (argname,))
+ if hook_impl.hookwrapper:
+ return _wrapped_call(hook_impl.function(*args), self.execute)
+ res = hook_impl.function(*args)
+ if res is not None:
+ if firstresult:
+ return res
+ results.append(res)
+
+ if not firstresult:
+ return results
+
+ def __repr__(self):
+ status = "%d meths" % (len(self.hook_impls),)
+ if hasattr(self, "results"):
+ status = ("%d results, " % len(self.results)) + status
+ return "<_MultiCall %s, kwargs=%r>" % (status, self.kwargs)
+
+
+def varnames(func, startindex=None):
+ """ return argument name tuple for a function, method, class or callable.
+
+ In case of a class, its "__init__" method is considered.
+ For methods the "self" parameter is not included unless you are passing
+ an unbound method with Python3 (which has no supports for unbound methods)
+ """
+ cache = getattr(func, "__dict__", {})
+ try:
+ return cache["_varnames"]
+ except KeyError:
+ pass
+ if inspect.isclass(func):
+ try:
+ func = func.__init__
+ except AttributeError:
+ return ()
+ startindex = 1
+ else:
+ if not inspect.isfunction(func) and not inspect.ismethod(func):
+ try:
+ func = getattr(func, '__call__', func)
+ except Exception:
+ return ()
+ if startindex is None:
+ startindex = int(inspect.ismethod(func))
+
+ try:
+ rawcode = func.__code__
+ except AttributeError:
+ return ()
+ try:
+ x = rawcode.co_varnames[startindex:rawcode.co_argcount]
+ except AttributeError:
+ x = ()
+ else:
+ defaults = func.__defaults__
+ if defaults:
+ x = x[:-len(defaults)]
+ try:
+ cache["_varnames"] = x
+ except TypeError:
+ pass
+ return x
+
+
+class _HookRelay:
+ """ hook holder object for performing 1:N hook calls where N is the number
+ of registered plugins.
+
+ """
+
+ def __init__(self, trace):
+ self._trace = trace
+
+
+class _HookCaller(object):
+ def __init__(self, name, hook_execute, specmodule_or_class=None, spec_opts=None):
+ self.name = name
+ self._wrappers = []
+ self._nonwrappers = []
+ self._hookexec = hook_execute
+ if specmodule_or_class is not None:
+ assert spec_opts is not None
+ self.set_specification(specmodule_or_class, spec_opts)
+
+ def has_spec(self):
+ return hasattr(self, "_specmodule_or_class")
+
+ def set_specification(self, specmodule_or_class, spec_opts):
+ assert not self.has_spec()
+ self._specmodule_or_class = specmodule_or_class
+ specfunc = getattr(specmodule_or_class, self.name)
+ argnames = varnames(specfunc, startindex=inspect.isclass(specmodule_or_class))
+ assert "self" not in argnames # sanity check
+ self.argnames = ["__multicall__"] + list(argnames)
+ self.spec_opts = spec_opts
+ if spec_opts.get("historic"):
+ self._call_history = []
+
+ def is_historic(self):
+ return hasattr(self, "_call_history")
+
+ def _remove_plugin(self, plugin):
+ def remove(wrappers):
+ for i, method in enumerate(wrappers):
+ if method.plugin == plugin:
+ del wrappers[i]
+ return True
+ if remove(self._wrappers) is None:
+ if remove(self._nonwrappers) is None:
+ raise ValueError("plugin %r not found" % (plugin,))
+
+ def _add_hookimpl(self, hookimpl):
+ if hookimpl.hookwrapper:
+ methods = self._wrappers
+ else:
+ methods = self._nonwrappers
+
+ if hookimpl.trylast:
+ methods.insert(0, hookimpl)
+ elif hookimpl.tryfirst:
+ methods.append(hookimpl)
+ else:
+ # find last non-tryfirst method
+ i = len(methods) - 1
+ while i >= 0 and methods[i].tryfirst:
+ i -= 1
+ methods.insert(i + 1, hookimpl)
+
+ def __repr__(self):
+ return "<_HookCaller %r>" % (self.name,)
+
+ def __call__(self, **kwargs):
+ assert not self.is_historic()
+ return self._hookexec(self, self._nonwrappers + self._wrappers, kwargs)
+
+ def call_historic(self, proc=None, kwargs=None):
+ self._call_history.append((kwargs or {}, proc))
+ # historizing hooks don't return results
+ self._hookexec(self, self._nonwrappers + self._wrappers, kwargs)
+
+ def call_extra(self, methods, kwargs):
+ """ Call the hook with some additional temporarily participating
+ methods using the specified kwargs as call parameters. """
+ old = list(self._nonwrappers), list(self._wrappers)
+ for method in methods:
+ opts = dict(hookwrapper=False, trylast=False, tryfirst=False)
+ hookimpl = HookImpl(None, "<temp>", method, opts)
+ self._add_hookimpl(hookimpl)
+ try:
+ return self(**kwargs)
+ finally:
+ self._nonwrappers, self._wrappers = old
+
+ def _maybe_apply_history(self, method):
+ if self.is_historic():
+ for kwargs, proc in self._call_history:
+ res = self._hookexec(self, [method], kwargs)
+ if res and proc is not None:
+ proc(res[0])
+
+
+class HookImpl:
+ def __init__(self, plugin, plugin_name, function, hook_impl_opts):
+ self.function = function
+ self.argnames = varnames(self.function)
+ self.plugin = plugin
+ self.opts = hook_impl_opts
+ self.plugin_name = plugin_name
+ self.__dict__.update(hook_impl_opts)
+
+
+class PluginValidationError(Exception):
+ """ plugin failed validation. """
+
+
+class HookCallError(Exception):
+ """ Hook was called wrongly. """
+
+
+if hasattr(inspect, 'signature'):
+ def _formatdef(func):
+ return "%s%s" % (
+ func.__name__,
+ str(inspect.signature(func))
+ )
+else:
+ def _formatdef(func):
+ return "%s%s" % (
+ func.__name__,
+ inspect.formatargspec(*inspect.getargspec(func))
+ )
diff --git a/lib/spack/external/distro.py b/lib/spack/external/distro.py
new file mode 100644
index 0000000000..ca25339ec9
--- /dev/null
+++ b/lib/spack/external/distro.py
@@ -0,0 +1,1081 @@
+# Copyright 2015,2016 Nir Cohen
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+"""
+The ``distro`` package (``distro`` stands for Linux Distribution) provides
+information about the Linux distribution it runs on, such as a reliable
+machine-readable distro ID, or version information.
+
+It is a renewed alternative implementation for Python's original
+:py:func:`platform.linux_distribution` function, but it provides much more
+functionality. An alternative implementation became necessary because Python
+3.5 deprecated this function, and Python 3.7 is expected to remove it
+altogether. Its predecessor function :py:func:`platform.dist` was already
+deprecated since Python 2.6 and is also expected to be removed in Python 3.7.
+Still, there are many cases in which access to Linux distribution information
+is needed. See `Python issue 1322 <https://bugs.python.org/issue1322>`_ for
+more information.
+"""
+
+import os
+import re
+import sys
+import json
+import shlex
+import logging
+import argparse
+import subprocess
+
+
+if not sys.platform.startswith('linux'):
+ raise ImportError('Unsupported platform: {0}'.format(sys.platform))
+
+_UNIXCONFDIR = '/etc'
+_OS_RELEASE_BASENAME = 'os-release'
+
+#: Translation table for normalizing the "ID" attribute defined in os-release
+#: files, for use by the :func:`distro.id` method.
+#:
+#: * Key: Value as defined in the os-release file, translated to lower case,
+#: with blanks translated to underscores.
+#:
+#: * Value: Normalized value.
+NORMALIZED_OS_ID = {}
+
+#: Translation table for normalizing the "Distributor ID" attribute returned by
+#: the lsb_release command, for use by the :func:`distro.id` method.
+#:
+#: * Key: Value as returned by the lsb_release command, translated to lower
+#: case, with blanks translated to underscores.
+#:
+#: * Value: Normalized value.
+NORMALIZED_LSB_ID = {
+ 'enterpriseenterprise': 'oracle', # Oracle Enterprise Linux
+ 'redhatenterpriseworkstation': 'rhel', # RHEL 6, 7 Workstation
+ 'redhatenterpriseserver': 'rhel', # RHEL 6, 7 Server
+}
+
+#: Translation table for normalizing the distro ID derived from the file name
+#: of distro release files, for use by the :func:`distro.id` method.
+#:
+#: * Key: Value as derived from the file name of a distro release file,
+#: translated to lower case, with blanks translated to underscores.
+#:
+#: * Value: Normalized value.
+NORMALIZED_DISTRO_ID = {
+ 'redhat': 'rhel', # RHEL 6.x, 7.x
+}
+
+# Pattern for content of distro release file (reversed)
+_DISTRO_RELEASE_CONTENT_REVERSED_PATTERN = re.compile(
+ r'(?:[^)]*\)(.*)\()? *(?:STL )?([\d.+\-a-z]*\d) *(?:esaeler *)?(.+)')
+
+# Pattern for base file name of distro release file
+_DISTRO_RELEASE_BASENAME_PATTERN = re.compile(
+ r'(\w+)[-_](release|version)$')
+
+# Base file names to be ignored when searching for distro release file
+_DISTRO_RELEASE_IGNORE_BASENAMES = (
+ 'debian_version',
+ 'lsb-release',
+ 'oem-release',
+ _OS_RELEASE_BASENAME,
+ 'system-release'
+)
+
+
+def linux_distribution(full_distribution_name=True):
+ """
+ Return information about the current Linux distribution as a tuple
+ ``(id_name, version, codename)`` with items as follows:
+
+ * ``id_name``: If *full_distribution_name* is false, the result of
+ :func:`distro.id`. Otherwise, the result of :func:`distro.name`.
+
+ * ``version``: The result of :func:`distro.version`.
+
+ * ``codename``: The result of :func:`distro.codename`.
+
+ The interface of this function is compatible with the original
+ :py:func:`platform.linux_distribution` function, supporting a subset of
+ its parameters.
+
+ The data it returns may not exactly be the same, because it uses more data
+ sources than the original function, and that may lead to different data if
+ the Linux distribution is not consistent across multiple data sources it
+ provides (there are indeed such distributions ...).
+
+ Another reason for differences is the fact that the :func:`distro.id`
+ method normalizes the distro ID string to a reliable machine-readable value
+ for a number of popular Linux distributions.
+ """
+ return _distro.linux_distribution(full_distribution_name)
+
+
+def id():
+ """
+ Return the distro ID of the current Linux distribution, as a
+ machine-readable string.
+
+ For a number of Linux distributions, the returned distro ID value is
+ *reliable*, in the sense that it is documented and that it does not change
+ across releases of the distribution.
+
+ This package maintains the following reliable distro ID values:
+
+ ============== =========================================
+ Distro ID Distribution
+ ============== =========================================
+ "ubuntu" Ubuntu
+ "debian" Debian
+ "rhel" RedHat Enterprise Linux
+ "centos" CentOS
+ "fedora" Fedora
+ "sles" SUSE Linux Enterprise Server
+ "opensuse" openSUSE
+ "amazon" Amazon Linux
+ "arch" Arch Linux
+ "cloudlinux" CloudLinux OS
+ "exherbo" Exherbo Linux
+ "gentoo" GenToo Linux
+ "ibm_powerkvm" IBM PowerKVM
+ "kvmibm" KVM for IBM z Systems
+ "linuxmint" Linux Mint
+ "mageia" Mageia
+ "mandriva" Mandriva Linux
+ "parallels" Parallels
+ "pidora" Pidora
+ "raspbian" Raspbian
+ "oracle" Oracle Linux (and Oracle Enterprise Linux)
+ "scientific" Scientific Linux
+ "slackware" Slackware
+ "xenserver" XenServer
+ ============== =========================================
+
+ If you have a need to get distros for reliable IDs added into this set,
+ or if you find that the :func:`distro.id` function returns a different
+ distro ID for one of the listed distros, please create an issue in the
+ `distro issue tracker`_.
+
+ **Lookup hierarchy and transformations:**
+
+ First, the ID is obtained from the following sources, in the specified
+ order. The first available and non-empty value is used:
+
+ * the value of the "ID" attribute of the os-release file,
+
+ * the value of the "Distributor ID" attribute returned by the lsb_release
+ command,
+
+ * the first part of the file name of the distro release file,
+
+ The so determined ID value then passes the following transformations,
+ before it is returned by this method:
+
+ * it is translated to lower case,
+
+ * blanks (which should not be there anyway) are translated to underscores,
+
+ * a normalization of the ID is performed, based upon
+ `normalization tables`_. The purpose of this normalization is to ensure
+ that the ID is as reliable as possible, even across incompatible changes
+ in the Linux distributions. A common reason for an incompatible change is
+ the addition of an os-release file, or the addition of the lsb_release
+ command, with ID values that differ from what was previously determined
+ from the distro release file name.
+ """
+ return _distro.id()
+
+
+def name(pretty=False):
+ """
+ Return the name of the current Linux distribution, as a human-readable
+ string.
+
+ If *pretty* is false, the name is returned without version or codename.
+ (e.g. "CentOS Linux")
+
+ If *pretty* is true, the version and codename are appended.
+ (e.g. "CentOS Linux 7.1.1503 (Core)")
+
+ **Lookup hierarchy:**
+
+ The name is obtained from the following sources, in the specified order.
+ The first available and non-empty value is used:
+
+ * If *pretty* is false:
+
+ - the value of the "NAME" attribute of the os-release file,
+
+ - the value of the "Distributor ID" attribute returned by the lsb_release
+ command,
+
+ - the value of the "<name>" field of the distro release file.
+
+ * If *pretty* is true:
+
+ - the value of the "PRETTY_NAME" attribute of the os-release file,
+
+ - the value of the "Description" attribute returned by the lsb_release
+ command,
+
+ - the value of the "<name>" field of the distro release file, appended
+ with the value of the pretty version ("<version_id>" and "<codename>"
+ fields) of the distro release file, if available.
+ """
+ return _distro.name(pretty)
+
+
+def version(pretty=False, best=False):
+ """
+ Return the version of the current Linux distribution, as a human-readable
+ string.
+
+ If *pretty* is false, the version is returned without codename (e.g.
+ "7.0").
+
+ If *pretty* is true, the codename in parenthesis is appended, if the
+ codename is non-empty (e.g. "7.0 (Maipo)").
+
+ Some distributions provide version numbers with different precisions in
+ the different sources of distribution information. Examining the different
+ sources in a fixed priority order does not always yield the most precise
+ version (e.g. for Debian 8.2, or CentOS 7.1).
+
+ The *best* parameter can be used to control the approach for the returned
+ version:
+
+ If *best* is false, the first non-empty version number in priority order of
+ the examined sources is returned.
+
+ If *best* is true, the most precise version number out of all examined
+ sources is returned.
+
+ **Lookup hierarchy:**
+
+ In all cases, the version number is obtained from the following sources.
+ If *best* is false, this order represents the priority order:
+
+ * the value of the "VERSION_ID" attribute of the os-release file,
+ * the value of the "Release" attribute returned by the lsb_release
+ command,
+ * the version number parsed from the "<version_id>" field of the first line
+ of the distro release file,
+ * the version number parsed from the "PRETTY_NAME" attribute of the
+ os-release file, if it follows the format of the distro release files.
+ * the version number parsed from the "Description" attribute returned by
+ the lsb_release command, if it follows the format of the distro release
+ files.
+ """
+ return _distro.version(pretty, best)
+
+
+def version_parts(best=False):
+ """
+ Return the version of the current Linux distribution as a tuple
+ ``(major, minor, build_number)`` with items as follows:
+
+ * ``major``: The result of :func:`distro.major_version`.
+
+ * ``minor``: The result of :func:`distro.minor_version`.
+
+ * ``build_number``: The result of :func:`distro.build_number`.
+
+ For a description of the *best* parameter, see the :func:`distro.version`
+ method.
+ """
+ return _distro.version_parts(best)
+
+
+def major_version(best=False):
+ """
+ Return the major version of the current Linux distribution, as a string,
+ if provided.
+ Otherwise, the empty string is returned. The major version is the first
+ part of the dot-separated version string.
+
+ For a description of the *best* parameter, see the :func:`distro.version`
+ method.
+ """
+ return _distro.major_version(best)
+
+
+def minor_version(best=False):
+ """
+ Return the minor version of the current Linux distribution, as a string,
+ if provided.
+ Otherwise, the empty string is returned. The minor version is the second
+ part of the dot-separated version string.
+
+ For a description of the *best* parameter, see the :func:`distro.version`
+ method.
+ """
+ return _distro.minor_version(best)
+
+
+def build_number(best=False):
+ """
+ Return the build number of the current Linux distribution, as a string,
+ if provided.
+ Otherwise, the empty string is returned. The build number is the third part
+ of the dot-separated version string.
+
+ For a description of the *best* parameter, see the :func:`distro.version`
+ method.
+ """
+ return _distro.build_number(best)
+
+
+def like():
+ """
+ Return a space-separated list of distro IDs of distributions that are
+ closely related to the current Linux distribution in regards to packaging
+ and programming interfaces, for example distributions the current
+ distribution is a derivative from.
+
+ **Lookup hierarchy:**
+
+ This information item is only provided by the os-release file.
+ For details, see the description of the "ID_LIKE" attribute in the
+ `os-release man page
+ <http://www.freedesktop.org/software/systemd/man/os-release.html>`_.
+ """
+ return _distro.like()
+
+
+def codename():
+ """
+ Return the codename for the release of the current Linux distribution,
+ as a string.
+
+ If the distribution does not have a codename, an empty string is returned.
+
+ Note that the returned codename is not always really a codename. For
+ example, openSUSE returns "x86_64". This function does not handle such
+ cases in any special way and just returns the string it finds, if any.
+
+ **Lookup hierarchy:**
+
+ * the codename within the "VERSION" attribute of the os-release file, if
+ provided,
+
+ * the value of the "Codename" attribute returned by the lsb_release
+ command,
+
+ * the value of the "<codename>" field of the distro release file.
+ """
+ return _distro.codename()
+
+
+def info(pretty=False, best=False):
+ """
+ Return certain machine-readable information items about the current Linux
+ distribution in a dictionary, as shown in the following example:
+
+ .. sourcecode:: python
+
+ {
+ 'id': 'rhel',
+ 'version': '7.0',
+ 'version_parts': {
+ 'major': '7',
+ 'minor': '0',
+ 'build_number': ''
+ },
+ 'like': 'fedora',
+ 'codename': 'Maipo'
+ }
+
+ The dictionary structure and keys are always the same, regardless of which
+ information items are available in the underlying data sources. The values
+ for the various keys are as follows:
+
+ * ``id``: The result of :func:`distro.id`.
+
+ * ``version``: The result of :func:`distro.version`.
+
+ * ``version_parts -> major``: The result of :func:`distro.major_version`.
+
+ * ``version_parts -> minor``: The result of :func:`distro.minor_version`.
+
+ * ``version_parts -> build_number``: The result of
+ :func:`distro.build_number`.
+
+ * ``like``: The result of :func:`distro.like`.
+
+ * ``codename``: The result of :func:`distro.codename`.
+
+ For a description of the *pretty* and *best* parameters, see the
+ :func:`distro.version` method.
+ """
+ return _distro.info(pretty, best)
+
+
+def os_release_info():
+ """
+ Return a dictionary containing key-value pairs for the information items
+ from the os-release file data source of the current Linux distribution.
+
+ See `os-release file`_ for details about these information items.
+ """
+ return _distro.os_release_info()
+
+
+def lsb_release_info():
+ """
+ Return a dictionary containing key-value pairs for the information items
+ from the lsb_release command data source of the current Linux distribution.
+
+ See `lsb_release command output`_ for details about these information
+ items.
+ """
+ return _distro.lsb_release_info()
+
+
+def distro_release_info():
+ """
+ Return a dictionary containing key-value pairs for the information items
+ from the distro release file data source of the current Linux distribution.
+
+ See `distro release file`_ for details about these information items.
+ """
+ return _distro.distro_release_info()
+
+
+def os_release_attr(attribute):
+ """
+ Return a single named information item from the os-release file data source
+ of the current Linux distribution.
+
+ Parameters:
+
+ * ``attribute`` (string): Key of the information item.
+
+ Returns:
+
+ * (string): Value of the information item, if the item exists.
+ The empty string, if the item does not exist.
+
+ See `os-release file`_ for details about these information items.
+ """
+ return _distro.os_release_attr(attribute)
+
+
+def lsb_release_attr(attribute):
+ """
+ Return a single named information item from the lsb_release command output
+ data source of the current Linux distribution.
+
+ Parameters:
+
+ * ``attribute`` (string): Key of the information item.
+
+ Returns:
+
+ * (string): Value of the information item, if the item exists.
+ The empty string, if the item does not exist.
+
+ See `lsb_release command output`_ for details about these information
+ items.
+ """
+ return _distro.lsb_release_attr(attribute)
+
+
+def distro_release_attr(attribute):
+ """
+ Return a single named information item from the distro release file
+ data source of the current Linux distribution.
+
+ Parameters:
+
+ * ``attribute`` (string): Key of the information item.
+
+ Returns:
+
+ * (string): Value of the information item, if the item exists.
+ The empty string, if the item does not exist.
+
+ See `distro release file`_ for details about these information items.
+ """
+ return _distro.distro_release_attr(attribute)
+
+
+class LinuxDistribution(object):
+ """
+ Provides information about a Linux distribution.
+
+ This package creates a private module-global instance of this class with
+ default initialization arguments, that is used by the
+ `consolidated accessor functions`_ and `single source accessor functions`_.
+ By using default initialization arguments, that module-global instance
+ returns data about the current Linux distribution (i.e. the distro this
+ package runs on).
+
+ Normally, it is not necessary to create additional instances of this class.
+ However, in situations where control is needed over the exact data sources
+ that are used, instances of this class can be created with a specific
+ distro release file, or a specific os-release file, or without invoking the
+ lsb_release command.
+ """
+
+ def __init__(self,
+ include_lsb=True,
+ os_release_file='',
+ distro_release_file=''):
+ """
+ The initialization method of this class gathers information from the
+ available data sources, and stores that in private instance attributes.
+ Subsequent access to the information items uses these private instance
+ attributes, so that the data sources are read only once.
+
+ Parameters:
+
+ * ``include_lsb`` (bool): Controls whether the
+ `lsb_release command output`_ is included as a data source.
+
+ If the lsb_release command is not available in the program execution
+ path, the data source for the lsb_release command will be empty.
+
+ * ``os_release_file`` (string): The path name of the
+ `os-release file`_ that is to be used as a data source.
+
+ An empty string (the default) will cause the default path name to
+ be used (see `os-release file`_ for details).
+
+ If the specified or defaulted os-release file does not exist, the
+ data source for the os-release file will be empty.
+
+ * ``distro_release_file`` (string): The path name of the
+ `distro release file`_ that is to be used as a data source.
+
+ An empty string (the default) will cause a default search algorithm
+ to be used (see `distro release file`_ for details).
+
+ If the specified distro release file does not exist, or if no default
+ distro release file can be found, the data source for the distro
+ release file will be empty.
+
+ Public instance attributes:
+
+ * ``os_release_file`` (string): The path name of the
+ `os-release file`_ that is actually used as a data source. The
+ empty string if no distro release file is used as a data source.
+
+ * ``distro_release_file`` (string): The path name of the
+ `distro release file`_ that is actually used as a data source. The
+ empty string if no distro release file is used as a data source.
+
+ Raises:
+
+ * :py:exc:`IOError`: Some I/O issue with an os-release file or distro
+ release file.
+
+ * :py:exc:`subprocess.CalledProcessError`: The lsb_release command had
+ some issue (other than not being available in the program execution
+ path).
+
+ * :py:exc:`UnicodeError`: A data source has unexpected characters or
+ uses an unexpected encoding.
+ """
+ self.os_release_file = os_release_file or \
+ os.path.join(_UNIXCONFDIR, _OS_RELEASE_BASENAME)
+ self.distro_release_file = distro_release_file or '' # updated later
+ self._os_release_info = self._get_os_release_info()
+ self._lsb_release_info = self._get_lsb_release_info() \
+ if include_lsb else {}
+ self._distro_release_info = self._get_distro_release_info()
+
+ def __repr__(self):
+ """Return repr of all info
+ """
+ return \
+ "LinuxDistribution(" \
+ "os_release_file={0!r}, " \
+ "distro_release_file={1!r}, " \
+ "_os_release_info={2!r}, " \
+ "_lsb_release_info={3!r}, " \
+ "_distro_release_info={4!r})".format(
+ self.os_release_file,
+ self.distro_release_file,
+ self._os_release_info,
+ self._lsb_release_info,
+ self._distro_release_info)
+
+ def linux_distribution(self, full_distribution_name=True):
+ """
+ Return information about the Linux distribution that is compatible
+ with Python's :func:`platform.linux_distribution`, supporting a subset
+ of its parameters.
+
+ For details, see :func:`distro.linux_distribution`.
+ """
+ return (
+ self.name() if full_distribution_name else self.id(),
+ self.version(),
+ self.codename()
+ )
+
+ def id(self):
+ """Return the distro ID of the Linux distribution, as a string.
+
+ For details, see :func:`distro.id`.
+ """
+ def normalize(distro_id, table):
+ distro_id = distro_id.lower().replace(' ', '_')
+ return table.get(distro_id, distro_id)
+
+ distro_id = self.os_release_attr('id')
+ if distro_id:
+ return normalize(distro_id, NORMALIZED_OS_ID)
+
+ distro_id = self.lsb_release_attr('distributor_id')
+ if distro_id:
+ return normalize(distro_id, NORMALIZED_LSB_ID)
+
+ distro_id = self.distro_release_attr('id')
+ if distro_id:
+ return normalize(distro_id, NORMALIZED_DISTRO_ID)
+
+ return ''
+
+ def name(self, pretty=False):
+ """
+ Return the name of the Linux distribution, as a string.
+
+ For details, see :func:`distro.name`.
+ """
+ name = self.os_release_attr('name') \
+ or self.lsb_release_attr('distributor_id') \
+ or self.distro_release_attr('name')
+ if pretty:
+ name = self.os_release_attr('pretty_name') \
+ or self.lsb_release_attr('description')
+ if not name:
+ name = self.distro_release_attr('name')
+ version = self.version(pretty=True)
+ if version:
+ name = name + ' ' + version
+ return name or ''
+
+ def version(self, pretty=False, best=False):
+ """
+ Return the version of the Linux distribution, as a string.
+
+ For details, see :func:`distro.version`.
+ """
+ versions = [
+ self.os_release_attr('version_id'),
+ self.lsb_release_attr('release'),
+ self.distro_release_attr('version_id'),
+ self._parse_distro_release_content(
+ self.os_release_attr('pretty_name')).get('version_id', ''),
+ self._parse_distro_release_content(
+ self.lsb_release_attr('description')).get('version_id', '')
+ ]
+ version = ''
+ if best:
+ # This algorithm uses the last version in priority order that has
+ # the best precision. If the versions are not in conflict, that
+ # does not matter; otherwise, using the last one instead of the
+ # first one might be considered a surprise.
+ for v in versions:
+ if v.count(".") > version.count(".") or version == '':
+ version = v
+ else:
+ for v in versions:
+ if v != '':
+ version = v
+ break
+ if pretty and version and self.codename():
+ version = u'{0} ({1})'.format(version, self.codename())
+ return version
+
+ def version_parts(self, best=False):
+ """
+ Return the version of the Linux distribution, as a tuple of version
+ numbers.
+
+ For details, see :func:`distro.version_parts`.
+ """
+ version_str = self.version(best=best)
+ if version_str:
+ version_regex = re.compile(r'(\d+)\.?(\d+)?\.?(\d+)?')
+ matches = version_regex.match(version_str)
+ if matches:
+ major, minor, build_number = matches.groups()
+ return major, minor or '', build_number or ''
+ return '', '', ''
+
+ def major_version(self, best=False):
+ """
+ Return the major version number of the current distribution.
+
+ For details, see :func:`distro.major_version`.
+ """
+ return self.version_parts(best)[0]
+
+ def minor_version(self, best=False):
+ """
+ Return the minor version number of the Linux distribution.
+
+ For details, see :func:`distro.minor_version`.
+ """
+ return self.version_parts(best)[1]
+
+ def build_number(self, best=False):
+ """
+ Return the build number of the Linux distribution.
+
+ For details, see :func:`distro.build_number`.
+ """
+ return self.version_parts(best)[2]
+
+ def like(self):
+ """
+ Return the IDs of distributions that are like the Linux distribution.
+
+ For details, see :func:`distro.like`.
+ """
+ return self.os_release_attr('id_like') or ''
+
+ def codename(self):
+ """
+ Return the codename of the Linux distribution.
+
+ For details, see :func:`distro.codename`.
+ """
+ return self.os_release_attr('codename') \
+ or self.lsb_release_attr('codename') \
+ or self.distro_release_attr('codename') \
+ or ''
+
+ def info(self, pretty=False, best=False):
+ """
+ Return certain machine-readable information about the Linux
+ distribution.
+
+ For details, see :func:`distro.info`.
+ """
+ return dict(
+ id=self.id(),
+ version=self.version(pretty, best),
+ version_parts=dict(
+ major=self.major_version(best),
+ minor=self.minor_version(best),
+ build_number=self.build_number(best)
+ ),
+ like=self.like(),
+ codename=self.codename(),
+ )
+
+ def os_release_info(self):
+ """
+ Return a dictionary containing key-value pairs for the information
+ items from the os-release file data source of the Linux distribution.
+
+ For details, see :func:`distro.os_release_info`.
+ """
+ return self._os_release_info
+
+ def lsb_release_info(self):
+ """
+ Return a dictionary containing key-value pairs for the information
+ items from the lsb_release command data source of the Linux
+ distribution.
+
+ For details, see :func:`distro.lsb_release_info`.
+ """
+ return self._lsb_release_info
+
+ def distro_release_info(self):
+ """
+ Return a dictionary containing key-value pairs for the information
+ items from the distro release file data source of the Linux
+ distribution.
+
+ For details, see :func:`distro.distro_release_info`.
+ """
+ return self._distro_release_info
+
+ def os_release_attr(self, attribute):
+ """
+ Return a single named information item from the os-release file data
+ source of the Linux distribution.
+
+ For details, see :func:`distro.os_release_attr`.
+ """
+ return self._os_release_info.get(attribute, '')
+
+ def lsb_release_attr(self, attribute):
+ """
+ Return a single named information item from the lsb_release command
+ output data source of the Linux distribution.
+
+ For details, see :func:`distro.lsb_release_attr`.
+ """
+ return self._lsb_release_info.get(attribute, '')
+
+ def distro_release_attr(self, attribute):
+ """
+ Return a single named information item from the distro release file
+ data source of the Linux distribution.
+
+ For details, see :func:`distro.distro_release_attr`.
+ """
+ return self._distro_release_info.get(attribute, '')
+
+ def _get_os_release_info(self):
+ """
+ Get the information items from the specified os-release file.
+
+ Returns:
+ A dictionary containing all information items.
+ """
+ if os.path.isfile(self.os_release_file):
+ with open(self.os_release_file) as release_file:
+ return self._parse_os_release_content(release_file)
+ return {}
+
+ @staticmethod
+ def _parse_os_release_content(lines):
+ """
+ Parse the lines of an os-release file.
+
+ Parameters:
+
+ * lines: Iterable through the lines in the os-release file.
+ Each line must be a unicode string or a UTF-8 encoded byte
+ string.
+
+ Returns:
+ A dictionary containing all information items.
+ """
+ props = {}
+ lexer = shlex.shlex(lines, posix=True)
+ lexer.whitespace_split = True
+
+ # The shlex module defines its `wordchars` variable using literals,
+ # making it dependent on the encoding of the Python source file.
+ # In Python 2.6 and 2.7, the shlex source file is encoded in
+ # 'iso-8859-1', and the `wordchars` variable is defined as a byte
+ # string. This causes a UnicodeDecodeError to be raised when the
+ # parsed content is a unicode object. The following fix resolves that
+ # (... but it should be fixed in shlex...):
+ if sys.version_info[0] == 2 and isinstance(lexer.wordchars, bytes):
+ lexer.wordchars = lexer.wordchars.decode('iso-8859-1')
+
+ tokens = list(lexer)
+ for token in tokens:
+ # At this point, all shell-like parsing has been done (i.e.
+ # comments processed, quotes and backslash escape sequences
+ # processed, multi-line values assembled, trailing newlines
+ # stripped, etc.), so the tokens are now either:
+ # * variable assignments: var=value
+ # * commands or their arguments (not allowed in os-release)
+ if '=' in token:
+ k, v = token.split('=', 1)
+ if isinstance(v, bytes):
+ v = v.decode('utf-8')
+ props[k.lower()] = v
+ if k == 'VERSION':
+ # this handles cases in which the codename is in
+ # the `(CODENAME)` (rhel, centos, fedora) format
+ # or in the `, CODENAME` format (Ubuntu).
+ codename = re.search(r'(\(\D+\))|,(\s+)?\D+', v)
+ if codename:
+ codename = codename.group()
+ codename = codename.strip('()')
+ codename = codename.strip(',')
+ codename = codename.strip()
+ # codename appears within paranthese.
+ props['codename'] = codename
+ else:
+ props['codename'] = ''
+ else:
+ # Ignore any tokens that are not variable assignments
+ pass
+ return props
+
+ def _get_lsb_release_info(self):
+ """
+ Get the information items from the lsb_release command output.
+
+ Returns:
+ A dictionary containing all information items.
+ """
+ cmd = 'lsb_release -a'
+ process = subprocess.Popen(
+ cmd,
+ shell=True,
+ stdout=subprocess.PIPE,
+ stderr=subprocess.PIPE)
+ stdout, stderr = process.communicate()
+ stdout, stderr = stdout.decode('utf-8'), stderr.decode('utf-8')
+ code = process.returncode
+ if code == 0:
+ content = stdout.splitlines()
+ return self._parse_lsb_release_content(content)
+ elif code == 127: # Command not found
+ return {}
+ else:
+ if sys.version_info[:2] >= (3, 5):
+ raise subprocess.CalledProcessError(code, cmd, stdout, stderr)
+ elif sys.version_info[:2] >= (2, 7):
+ raise subprocess.CalledProcessError(code, cmd, stdout)
+ elif sys.version_info[:2] == (2, 6):
+ raise subprocess.CalledProcessError(code, cmd)
+
+ @staticmethod
+ def _parse_lsb_release_content(lines):
+ """
+ Parse the output of the lsb_release command.
+
+ Parameters:
+
+ * lines: Iterable through the lines of the lsb_release output.
+ Each line must be a unicode string or a UTF-8 encoded byte
+ string.
+
+ Returns:
+ A dictionary containing all information items.
+ """
+ props = {}
+ for line in lines:
+ line = line.decode('utf-8') if isinstance(line, bytes) else line
+ kv = line.strip('\n').split(':', 1)
+ if len(kv) != 2:
+ # Ignore lines without colon.
+ continue
+ k, v = kv
+ props.update({k.replace(' ', '_').lower(): v.strip()})
+ return props
+
+ def _get_distro_release_info(self):
+ """
+ Get the information items from the specified distro release file.
+
+ Returns:
+ A dictionary containing all information items.
+ """
+ if self.distro_release_file:
+ # If it was specified, we use it and parse what we can, even if
+ # its file name or content does not match the expected pattern.
+ distro_info = self._parse_distro_release_file(
+ self.distro_release_file)
+ basename = os.path.basename(self.distro_release_file)
+ # The file name pattern for user-specified distro release files
+ # is somewhat more tolerant (compared to when searching for the
+ # file), because we want to use what was specified as best as
+ # possible.
+ match = _DISTRO_RELEASE_BASENAME_PATTERN.match(basename)
+ if match:
+ distro_info['id'] = match.group(1)
+ return distro_info
+ else:
+ basenames = os.listdir(_UNIXCONFDIR)
+ # We sort for repeatability in cases where there are multiple
+ # distro specific files; e.g. CentOS, Oracle, Enterprise all
+ # containing `redhat-release` on top of their own.
+ basenames.sort()
+ for basename in basenames:
+ if basename in _DISTRO_RELEASE_IGNORE_BASENAMES:
+ continue
+ match = _DISTRO_RELEASE_BASENAME_PATTERN.match(basename)
+ if match:
+ filepath = os.path.join(_UNIXCONFDIR, basename)
+ distro_info = self._parse_distro_release_file(filepath)
+ if 'name' in distro_info:
+ # The name is always present if the pattern matches
+ self.distro_release_file = filepath
+ distro_info['id'] = match.group(1)
+ return distro_info
+ return {}
+
+ def _parse_distro_release_file(self, filepath):
+ """
+ Parse a distro release file.
+
+ Parameters:
+
+ * filepath: Path name of the distro release file.
+
+ Returns:
+ A dictionary containing all information items.
+ """
+ if os.path.isfile(filepath):
+ with open(filepath) as fp:
+ # Only parse the first line. For instance, on SLES there
+ # are multiple lines. We don't want them...
+ return self._parse_distro_release_content(fp.readline())
+ return {}
+
+ @staticmethod
+ def _parse_distro_release_content(line):
+ """
+ Parse a line from a distro release file.
+
+ Parameters:
+ * line: Line from the distro release file. Must be a unicode string
+ or a UTF-8 encoded byte string.
+
+ Returns:
+ A dictionary containing all information items.
+ """
+ if isinstance(line, bytes):
+ line = line.decode('utf-8')
+ matches = _DISTRO_RELEASE_CONTENT_REVERSED_PATTERN.match(
+ line.strip()[::-1])
+ distro_info = {}
+ if matches:
+ # regexp ensures non-None
+ distro_info['name'] = matches.group(3)[::-1]
+ if matches.group(2):
+ distro_info['version_id'] = matches.group(2)[::-1]
+ if matches.group(1):
+ distro_info['codename'] = matches.group(1)[::-1]
+ elif line:
+ distro_info['name'] = line.strip()
+ return distro_info
+
+
+_distro = LinuxDistribution()
+
+
+def main():
+ logger = logging.getLogger(__name__)
+ logger.setLevel(logging.DEBUG)
+ logger.addHandler(logging.StreamHandler(sys.stdout))
+
+ parser = argparse.ArgumentParser(description="Linux distro info tool")
+ parser.add_argument(
+ '--json',
+ '-j',
+ help="Output in machine readable format",
+ action="store_true")
+ args = parser.parse_args()
+
+ if args.json:
+ logger.info(json.dumps(info(), indent=4, sort_keys=True))
+ else:
+ logger.info('Name: %s', name(pretty=True))
+ distribution_version = version(pretty=True)
+ if distribution_version:
+ logger.info('Version: %s', distribution_version)
+ distribution_codename = codename()
+ if distribution_codename:
+ logger.info('Codename: %s', distribution_codename)
+
+
+if __name__ == '__main__':
+ main()
diff --git a/lib/spack/external/nose/LICENSE b/lib/spack/external/nose/LICENSE
deleted file mode 100644
index 9f6e791624..0000000000
--- a/lib/spack/external/nose/LICENSE
+++ /dev/null
@@ -1,502 +0,0 @@
- GNU LESSER GENERAL PUBLIC LICENSE
- Version 2.1, February 1999
-
- Copyright (C) 1991, 1999 Free Software Foundation, Inc.
- 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
- Everyone is permitted to copy and distribute verbatim copies
- of this license document, but changing it is not allowed.
-
-[This is the first released version of the Lesser GPL. It also counts
- as the successor of the GNU Library Public License, version 2, hence
- the version number 2.1.]
-
- Preamble
-
- The licenses for most software are designed to take away your
-freedom to share and change it. By contrast, the GNU General Public
-Licenses are intended to guarantee your freedom to share and change
-free software--to make sure the software is free for all its users.
-
- This license, the Lesser General Public License, applies to some
-specially designated software packages--typically libraries--of the
-Free Software Foundation and other authors who decide to use it. You
-can use it too, but we suggest you first think carefully about whether
-this license or the ordinary General Public License is the better
-strategy to use in any particular case, based on the explanations below.
-
- When we speak of free software, we are referring to freedom of use,
-not price. Our General Public Licenses are designed to make sure that
-you have the freedom to distribute copies of free software (and charge
-for this service if you wish); that you receive source code or can get
-it if you want it; that you can change the software and use pieces of
-it in new free programs; and that you are informed that you can do
-these things.
-
- To protect your rights, we need to make restrictions that forbid
-distributors to deny you these rights or to ask you to surrender these
-rights. These restrictions translate to certain responsibilities for
-you if you distribute copies of the library or if you modify it.
-
- For example, if you distribute copies of the library, whether gratis
-or for a fee, you must give the recipients all the rights that we gave
-you. You must make sure that they, too, receive or can get the source
-code. If you link other code with the library, you must provide
-complete object files to the recipients, so that they can relink them
-with the library after making changes to the library and recompiling
-it. And you must show them these terms so they know their rights.
-
- We protect your rights with a two-step method: (1) we copyright the
-library, and (2) we offer you this license, which gives you legal
-permission to copy, distribute and/or modify the library.
-
- To protect each distributor, we want to make it very clear that
-there is no warranty for the free library. Also, if the library is
-modified by someone else and passed on, the recipients should know
-that what they have is not the original version, so that the original
-author's reputation will not be affected by problems that might be
-introduced by others.
-
- Finally, software patents pose a constant threat to the existence of
-any free program. We wish to make sure that a company cannot
-effectively restrict the users of a free program by obtaining a
-restrictive license from a patent holder. Therefore, we insist that
-any patent license obtained for a version of the library must be
-consistent with the full freedom of use specified in this license.
-
- Most GNU software, including some libraries, is covered by the
-ordinary GNU General Public License. This license, the GNU Lesser
-General Public License, applies to certain designated libraries, and
-is quite different from the ordinary General Public License. We use
-this license for certain libraries in order to permit linking those
-libraries into non-free programs.
-
- When a program is linked with a library, whether statically or using
-a shared library, the combination of the two is legally speaking a
-combined work, a derivative of the original library. The ordinary
-General Public License therefore permits such linking only if the
-entire combination fits its criteria of freedom. The Lesser General
-Public License permits more lax criteria for linking other code with
-the library.
-
- We call this license the "Lesser" General Public License because it
-does Less to protect the user's freedom than the ordinary General
-Public License. It also provides other free software developers Less
-of an advantage over competing non-free programs. These disadvantages
-are the reason we use the ordinary General Public License for many
-libraries. However, the Lesser license provides advantages in certain
-special circumstances.
-
- For example, on rare occasions, there may be a special need to
-encourage the widest possible use of a certain library, so that it becomes
-a de-facto standard. To achieve this, non-free programs must be
-allowed to use the library. A more frequent case is that a free
-library does the same job as widely used non-free libraries. In this
-case, there is little to gain by limiting the free library to free
-software only, so we use the Lesser General Public License.
-
- In other cases, permission to use a particular library in non-free
-programs enables a greater number of people to use a large body of
-free software. For example, permission to use the GNU C Library in
-non-free programs enables many more people to use the whole GNU
-operating system, as well as its variant, the GNU/Linux operating
-system.
-
- Although the Lesser General Public License is Less protective of the
-users' freedom, it does ensure that the user of a program that is
-linked with the Library has the freedom and the wherewithal to run
-that program using a modified version of the Library.
-
- The precise terms and conditions for copying, distribution and
-modification follow. Pay close attention to the difference between a
-"work based on the library" and a "work that uses the library". The
-former contains code derived from the library, whereas the latter must
-be combined with the library in order to run.
-
- GNU LESSER GENERAL PUBLIC LICENSE
- TERMS AND CONDITIONS FOR COPYING, DISTRIBUTION AND MODIFICATION
-
- 0. This License Agreement applies to any software library or other
-program which contains a notice placed by the copyright holder or
-other authorized party saying it may be distributed under the terms of
-this Lesser General Public License (also called "this License").
-Each licensee is addressed as "you".
-
- A "library" means a collection of software functions and/or data
-prepared so as to be conveniently linked with application programs
-(which use some of those functions and data) to form executables.
-
- The "Library", below, refers to any such software library or work
-which has been distributed under these terms. A "work based on the
-Library" means either the Library or any derivative work under
-copyright law: that is to say, a work containing the Library or a
-portion of it, either verbatim or with modifications and/or translated
-straightforwardly into another language. (Hereinafter, translation is
-included without limitation in the term "modification".)
-
- "Source code" for a work means the preferred form of the work for
-making modifications to it. For a library, complete source code means
-all the source code for all modules it contains, plus any associated
-interface definition files, plus the scripts used to control compilation
-and installation of the library.
-
- Activities other than copying, distribution and modification are not
-covered by this License; they are outside its scope. The act of
-running a program using the Library is not restricted, and output from
-such a program is covered only if its contents constitute a work based
-on the Library (independent of the use of the Library in a tool for
-writing it). Whether that is true depends on what the Library does
-and what the program that uses the Library does.
-
- 1. You may copy and distribute verbatim copies of the Library's
-complete source code as you receive it, in any medium, provided that
-you conspicuously and appropriately publish on each copy an
-appropriate copyright notice and disclaimer of warranty; keep intact
-all the notices that refer to this License and to the absence of any
-warranty; and distribute a copy of this License along with the
-Library.
-
- You may charge a fee for the physical act of transferring a copy,
-and you may at your option offer warranty protection in exchange for a
-fee.
-
- 2. You may modify your copy or copies of the Library or any portion
-of it, thus forming a work based on the Library, and copy and
-distribute such modifications or work under the terms of Section 1
-above, provided that you also meet all of these conditions:
-
- a) The modified work must itself be a software library.
-
- b) You must cause the files modified to carry prominent notices
- stating that you changed the files and the date of any change.
-
- c) You must cause the whole of the work to be licensed at no
- charge to all third parties under the terms of this License.
-
- d) If a facility in the modified Library refers to a function or a
- table of data to be supplied by an application program that uses
- the facility, other than as an argument passed when the facility
- is invoked, then you must make a good faith effort to ensure that,
- in the event an application does not supply such function or
- table, the facility still operates, and performs whatever part of
- its purpose remains meaningful.
-
- (For example, a function in a library to compute square roots has
- a purpose that is entirely well-defined independent of the
- application. Therefore, Subsection 2d requires that any
- application-supplied function or table used by this function must
- be optional: if the application does not supply it, the square
- root function must still compute square roots.)
-
-These requirements apply to the modified work as a whole. If
-identifiable sections of that work are not derived from the Library,
-and can be reasonably considered independent and separate works in
-themselves, then this License, and its terms, do not apply to those
-sections when you distribute them as separate works. But when you
-distribute the same sections as part of a whole which is a work based
-on the Library, the distribution of the whole must be on the terms of
-this License, whose permissions for other licensees extend to the
-entire whole, and thus to each and every part regardless of who wrote
-it.
-
-Thus, it is not the intent of this section to claim rights or contest
-your rights to work written entirely by you; rather, the intent is to
-exercise the right to control the distribution of derivative or
-collective works based on the Library.
-
-In addition, mere aggregation of another work not based on the Library
-with the Library (or with a work based on the Library) on a volume of
-a storage or distribution medium does not bring the other work under
-the scope of this License.
-
- 3. You may opt to apply the terms of the ordinary GNU General Public
-License instead of this License to a given copy of the Library. To do
-this, you must alter all the notices that refer to this License, so
-that they refer to the ordinary GNU General Public License, version 2,
-instead of to this License. (If a newer version than version 2 of the
-ordinary GNU General Public License has appeared, then you can specify
-that version instead if you wish.) Do not make any other change in
-these notices.
-
- Once this change is made in a given copy, it is irreversible for
-that copy, so the ordinary GNU General Public License applies to all
-subsequent copies and derivative works made from that copy.
-
- This option is useful when you wish to copy part of the code of
-the Library into a program that is not a library.
-
- 4. You may copy and distribute the Library (or a portion or
-derivative of it, under Section 2) in object code or executable form
-under the terms of Sections 1 and 2 above provided that you accompany
-it with the complete corresponding machine-readable source code, which
-must be distributed under the terms of Sections 1 and 2 above on a
-medium customarily used for software interchange.
-
- If distribution of object code is made by offering access to copy
-from a designated place, then offering equivalent access to copy the
-source code from the same place satisfies the requirement to
-distribute the source code, even though third parties are not
-compelled to copy the source along with the object code.
-
- 5. A program that contains no derivative of any portion of the
-Library, but is designed to work with the Library by being compiled or
-linked with it, is called a "work that uses the Library". Such a
-work, in isolation, is not a derivative work of the Library, and
-therefore falls outside the scope of this License.
-
- However, linking a "work that uses the Library" with the Library
-creates an executable that is a derivative of the Library (because it
-contains portions of the Library), rather than a "work that uses the
-library". The executable is therefore covered by this License.
-Section 6 states terms for distribution of such executables.
-
- When a "work that uses the Library" uses material from a header file
-that is part of the Library, the object code for the work may be a
-derivative work of the Library even though the source code is not.
-Whether this is true is especially significant if the work can be
-linked without the Library, or if the work is itself a library. The
-threshold for this to be true is not precisely defined by law.
-
- If such an object file uses only numerical parameters, data
-structure layouts and accessors, and small macros and small inline
-functions (ten lines or less in length), then the use of the object
-file is unrestricted, regardless of whether it is legally a derivative
-work. (Executables containing this object code plus portions of the
-Library will still fall under Section 6.)
-
- Otherwise, if the work is a derivative of the Library, you may
-distribute the object code for the work under the terms of Section 6.
-Any executables containing that work also fall under Section 6,
-whether or not they are linked directly with the Library itself.
-
- 6. As an exception to the Sections above, you may also combine or
-link a "work that uses the Library" with the Library to produce a
-work containing portions of the Library, and distribute that work
-under terms of your choice, provided that the terms permit
-modification of the work for the customer's own use and reverse
-engineering for debugging such modifications.
-
- You must give prominent notice with each copy of the work that the
-Library is used in it and that the Library and its use are covered by
-this License. You must supply a copy of this License. If the work
-during execution displays copyright notices, you must include the
-copyright notice for the Library among them, as well as a reference
-directing the user to the copy of this License. Also, you must do one
-of these things:
-
- a) Accompany the work with the complete corresponding
- machine-readable source code for the Library including whatever
- changes were used in the work (which must be distributed under
- Sections 1 and 2 above); and, if the work is an executable linked
- with the Library, with the complete machine-readable "work that
- uses the Library", as object code and/or source code, so that the
- user can modify the Library and then relink to produce a modified
- executable containing the modified Library. (It is understood
- that the user who changes the contents of definitions files in the
- Library will not necessarily be able to recompile the application
- to use the modified definitions.)
-
- b) Use a suitable shared library mechanism for linking with the
- Library. A suitable mechanism is one that (1) uses at run time a
- copy of the library already present on the user's computer system,
- rather than copying library functions into the executable, and (2)
- will operate properly with a modified version of the library, if
- the user installs one, as long as the modified version is
- interface-compatible with the version that the work was made with.
-
- c) Accompany the work with a written offer, valid for at
- least three years, to give the same user the materials
- specified in Subsection 6a, above, for a charge no more
- than the cost of performing this distribution.
-
- d) If distribution of the work is made by offering access to copy
- from a designated place, offer equivalent access to copy the above
- specified materials from the same place.
-
- e) Verify that the user has already received a copy of these
- materials or that you have already sent this user a copy.
-
- For an executable, the required form of the "work that uses the
-Library" must include any data and utility programs needed for
-reproducing the executable from it. However, as a special exception,
-the materials to be distributed need not include anything that is
-normally distributed (in either source or binary form) with the major
-components (compiler, kernel, and so on) of the operating system on
-which the executable runs, unless that component itself accompanies
-the executable.
-
- It may happen that this requirement contradicts the license
-restrictions of other proprietary libraries that do not normally
-accompany the operating system. Such a contradiction means you cannot
-use both them and the Library together in an executable that you
-distribute.
-
- 7. You may place library facilities that are a work based on the
-Library side-by-side in a single library together with other library
-facilities not covered by this License, and distribute such a combined
-library, provided that the separate distribution of the work based on
-the Library and of the other library facilities is otherwise
-permitted, and provided that you do these two things:
-
- a) Accompany the combined library with a copy of the same work
- based on the Library, uncombined with any other library
- facilities. This must be distributed under the terms of the
- Sections above.
-
- b) Give prominent notice with the combined library of the fact
- that part of it is a work based on the Library, and explaining
- where to find the accompanying uncombined form of the same work.
-
- 8. You may not copy, modify, sublicense, link with, or distribute
-the Library except as expressly provided under this License. Any
-attempt otherwise to copy, modify, sublicense, link with, or
-distribute the Library is void, and will automatically terminate your
-rights under this License. However, parties who have received copies,
-or rights, from you under this License will not have their licenses
-terminated so long as such parties remain in full compliance.
-
- 9. You are not required to accept this License, since you have not
-signed it. However, nothing else grants you permission to modify or
-distribute the Library or its derivative works. These actions are
-prohibited by law if you do not accept this License. Therefore, by
-modifying or distributing the Library (or any work based on the
-Library), you indicate your acceptance of this License to do so, and
-all its terms and conditions for copying, distributing or modifying
-the Library or works based on it.
-
- 10. Each time you redistribute the Library (or any work based on the
-Library), the recipient automatically receives a license from the
-original licensor to copy, distribute, link with or modify the Library
-subject to these terms and conditions. You may not impose any further
-restrictions on the recipients' exercise of the rights granted herein.
-You are not responsible for enforcing compliance by third parties with
-this License.
-
- 11. If, as a consequence of a court judgment or allegation of patent
-infringement or for any other reason (not limited to patent issues),
-conditions are imposed on you (whether by court order, agreement or
-otherwise) that contradict the conditions of this License, they do not
-excuse you from the conditions of this License. If you cannot
-distribute so as to satisfy simultaneously your obligations under this
-License and any other pertinent obligations, then as a consequence you
-may not distribute the Library at all. For example, if a patent
-license would not permit royalty-free redistribution of the Library by
-all those who receive copies directly or indirectly through you, then
-the only way you could satisfy both it and this License would be to
-refrain entirely from distribution of the Library.
-
-If any portion of this section is held invalid or unenforceable under any
-particular circumstance, the balance of the section is intended to apply,
-and the section as a whole is intended to apply in other circumstances.
-
-It is not the purpose of this section to induce you to infringe any
-patents or other property right claims or to contest validity of any
-such claims; this section has the sole purpose of protecting the
-integrity of the free software distribution system which is
-implemented by public license practices. Many people have made
-generous contributions to the wide range of software distributed
-through that system in reliance on consistent application of that
-system; it is up to the author/donor to decide if he or she is willing
-to distribute software through any other system and a licensee cannot
-impose that choice.
-
-This section is intended to make thoroughly clear what is believed to
-be a consequence of the rest of this License.
-
- 12. If the distribution and/or use of the Library is restricted in
-certain countries either by patents or by copyrighted interfaces, the
-original copyright holder who places the Library under this License may add
-an explicit geographical distribution limitation excluding those countries,
-so that distribution is permitted only in or among countries not thus
-excluded. In such case, this License incorporates the limitation as if
-written in the body of this License.
-
- 13. The Free Software Foundation may publish revised and/or new
-versions of the Lesser General Public License from time to time.
-Such new versions will be similar in spirit to the present version,
-but may differ in detail to address new problems or concerns.
-
-Each version is given a distinguishing version number. If the Library
-specifies a version number of this License which applies to it and
-"any later version", you have the option of following the terms and
-conditions either of that version or of any later version published by
-the Free Software Foundation. If the Library does not specify a
-license version number, you may choose any version ever published by
-the Free Software Foundation.
-
- 14. If you wish to incorporate parts of the Library into other free
-programs whose distribution conditions are incompatible with these,
-write to the author to ask for permission. For software which is
-copyrighted by the Free Software Foundation, write to the Free
-Software Foundation; we sometimes make exceptions for this. Our
-decision will be guided by the two goals of preserving the free status
-of all derivatives of our free software and of promoting the sharing
-and reuse of software generally.
-
- NO WARRANTY
-
- 15. BECAUSE THE LIBRARY IS LICENSED FREE OF CHARGE, THERE IS NO
-WARRANTY FOR THE LIBRARY, TO THE EXTENT PERMITTED BY APPLICABLE LAW.
-EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT HOLDERS AND/OR
-OTHER PARTIES PROVIDE THE LIBRARY "AS IS" WITHOUT WARRANTY OF ANY
-KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO, THE
-IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
-PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE
-LIBRARY IS WITH YOU. SHOULD THE LIBRARY PROVE DEFECTIVE, YOU ASSUME
-THE COST OF ALL NECESSARY SERVICING, REPAIR OR CORRECTION.
-
- 16. IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN
-WRITING WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MAY MODIFY
-AND/OR REDISTRIBUTE THE LIBRARY AS PERMITTED ABOVE, BE LIABLE TO YOU
-FOR DAMAGES, INCLUDING ANY GENERAL, SPECIAL, INCIDENTAL OR
-CONSEQUENTIAL DAMAGES ARISING OUT OF THE USE OR INABILITY TO USE THE
-LIBRARY (INCLUDING BUT NOT LIMITED TO LOSS OF DATA OR DATA BEING
-RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD PARTIES OR A
-FAILURE OF THE LIBRARY TO OPERATE WITH ANY OTHER SOFTWARE), EVEN IF
-SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH
-DAMAGES.
-
- END OF TERMS AND CONDITIONS
-
- How to Apply These Terms to Your New Libraries
-
- If you develop a new library, and you want it to be of the greatest
-possible use to the public, we recommend making it free software that
-everyone can redistribute and change. You can do so by permitting
-redistribution under these terms (or, alternatively, under the terms of the
-ordinary General Public License).
-
- To apply these terms, attach the following notices to the library. It is
-safest to attach them to the start of each source file to most effectively
-convey the exclusion of warranty; and each file should have at least the
-"copyright" line and a pointer to where the full notice is found.
-
- <one line to give the library's name and a brief idea of what it does.>
- Copyright (C) <year> <name of author>
-
- This library is free software; you can redistribute it and/or
- modify it under the terms of the GNU Lesser General Public
- License as published by the Free Software Foundation; either
- version 2.1 of the License, or (at your option) any later version.
-
- This library is distributed in the hope that it will be useful,
- but WITHOUT ANY WARRANTY; without even the implied warranty of
- MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
- Lesser General Public License for more details.
-
- You should have received a copy of the GNU Lesser General Public
- License along with this library; if not, write to the Free Software
- Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
-
-Also add information on how to contact you by electronic and paper mail.
-
-You should also get your employer (if you work as a programmer) or your
-school, if any, to sign a "copyright disclaimer" for the library, if
-necessary. Here is a sample; alter the names:
-
- Yoyodyne, Inc., hereby disclaims all copyright interest in the
- library `Frob' (a library for tweaking knobs) written by James Random Hacker.
-
- <signature of Ty Coon>, 1 April 1990
- Ty Coon, President of Vice
-
-That's all there is to it!
diff --git a/lib/spack/external/nose/__init__.py b/lib/spack/external/nose/__init__.py
deleted file mode 100644
index 1ae1362b7a..0000000000
--- a/lib/spack/external/nose/__init__.py
+++ /dev/null
@@ -1,15 +0,0 @@
-from nose.core import collector, main, run, run_exit, runmodule
-# backwards compatibility
-from nose.exc import SkipTest, DeprecatedTest
-from nose.tools import with_setup
-
-__author__ = 'Jason Pellerin'
-__versioninfo__ = (1, 3, 7)
-__version__ = '.'.join(map(str, __versioninfo__))
-
-__all__ = [
- 'main', 'run', 'run_exit', 'runmodule', 'with_setup',
- 'SkipTest', 'DeprecatedTest', 'collector'
- ]
-
-
diff --git a/lib/spack/external/nose/__main__.py b/lib/spack/external/nose/__main__.py
deleted file mode 100644
index b402d9df12..0000000000
--- a/lib/spack/external/nose/__main__.py
+++ /dev/null
@@ -1,8 +0,0 @@
-import sys
-
-from nose.core import run_exit
-
-if sys.argv[0].endswith('__main__.py'):
- sys.argv[0] = '%s -m nose' % sys.executable
-
-run_exit()
diff --git a/lib/spack/external/nose/case.py b/lib/spack/external/nose/case.py
deleted file mode 100644
index cffa4ab4c9..0000000000
--- a/lib/spack/external/nose/case.py
+++ /dev/null
@@ -1,397 +0,0 @@
-"""nose unittest.TestCase subclasses. It is not necessary to subclass these
-classes when writing tests; they are used internally by nose.loader.TestLoader
-to create test cases from test functions and methods in test classes.
-"""
-import logging
-import sys
-import unittest
-from inspect import isfunction
-from nose.config import Config
-from nose.failure import Failure # for backwards compatibility
-from nose.util import resolve_name, test_address, try_run
-
-log = logging.getLogger(__name__)
-
-
-__all__ = ['Test']
-
-
-class Test(unittest.TestCase):
- """The universal test case wrapper.
-
- When a plugin sees a test, it will always see an instance of this
- class. To access the actual test case that will be run, access the
- test property of the nose.case.Test instance.
- """
- __test__ = False # do not collect
- def __init__(self, test, config=None, resultProxy=None):
- # sanity check
- if not callable(test):
- raise TypeError("nose.case.Test called with argument %r that "
- "is not callable. A callable is required."
- % test)
- self.test = test
- if config is None:
- config = Config()
- self.config = config
- self.tbinfo = None
- self.capturedOutput = None
- self.resultProxy = resultProxy
- self.plugins = config.plugins
- self.passed = None
- unittest.TestCase.__init__(self)
-
- def __call__(self, *arg, **kwarg):
- return self.run(*arg, **kwarg)
-
- def __str__(self):
- name = self.plugins.testName(self)
- if name is not None:
- return name
- return str(self.test)
-
- def __repr__(self):
- return "Test(%r)" % self.test
-
- def afterTest(self, result):
- """Called after test is complete (after result.stopTest)
- """
- try:
- afterTest = result.afterTest
- except AttributeError:
- pass
- else:
- afterTest(self.test)
-
- def beforeTest(self, result):
- """Called before test is run (before result.startTest)
- """
- try:
- beforeTest = result.beforeTest
- except AttributeError:
- pass
- else:
- beforeTest(self.test)
-
- def exc_info(self):
- """Extract exception info.
- """
- exc, exv, tb = sys.exc_info()
- return (exc, exv, tb)
-
- def id(self):
- """Get a short(er) description of the test
- """
- return self.test.id()
-
- def address(self):
- """Return a round-trip name for this test, a name that can be
- fed back as input to loadTestByName and (assuming the same
- plugin configuration) result in the loading of this test.
- """
- if hasattr(self.test, 'address'):
- return self.test.address()
- else:
- # not a nose case
- return test_address(self.test)
-
- def _context(self):
- try:
- return self.test.context
- except AttributeError:
- pass
- try:
- return self.test.__class__
- except AttributeError:
- pass
- try:
- return resolve_name(self.test.__module__)
- except AttributeError:
- pass
- return None
- context = property(_context, None, None,
- """Get the context object of this test (if any).""")
-
- def run(self, result):
- """Modified run for the test wrapper.
-
- From here we don't call result.startTest or stopTest or
- addSuccess. The wrapper calls addError/addFailure only if its
- own setup or teardown fails, or running the wrapped test fails
- (eg, if the wrapped "test" is not callable).
-
- Two additional methods are called, beforeTest and
- afterTest. These give plugins a chance to modify the wrapped
- test before it is called and do cleanup after it is
- called. They are called unconditionally.
- """
- if self.resultProxy:
- result = self.resultProxy(result, self)
- try:
- try:
- self.beforeTest(result)
- self.runTest(result)
- except KeyboardInterrupt:
- raise
- except:
- err = sys.exc_info()
- result.addError(self, err)
- finally:
- self.afterTest(result)
-
- def runTest(self, result):
- """Run the test. Plugins may alter the test by returning a
- value from prepareTestCase. The value must be callable and
- must accept one argument, the result instance.
- """
- test = self.test
- plug_test = self.config.plugins.prepareTestCase(self)
- if plug_test is not None:
- test = plug_test
- test(result)
-
- def shortDescription(self):
- desc = self.plugins.describeTest(self)
- if desc is not None:
- return desc
- # work around bug in unittest.TestCase.shortDescription
- # with multiline docstrings.
- test = self.test
- try:
- test._testMethodDoc = test._testMethodDoc.strip()# 2.5
- except AttributeError:
- try:
- # 2.4 and earlier
- test._TestCase__testMethodDoc = \
- test._TestCase__testMethodDoc.strip()
- except AttributeError:
- pass
- # 2.7 compat: shortDescription() always returns something
- # which is a change from 2.6 and below, and breaks the
- # testName plugin call.
- try:
- desc = self.test.shortDescription()
- except Exception:
- # this is probably caused by a problem in test.__str__() and is
- # only triggered by python 3.1's unittest!
- pass
- try:
- if desc == str(self.test):
- return
- except Exception:
- # If str() triggers an exception then ignore it.
- # see issue 422
- pass
- return desc
-
-
-class TestBase(unittest.TestCase):
- """Common functionality for FunctionTestCase and MethodTestCase.
- """
- __test__ = False # do not collect
-
- def id(self):
- return str(self)
-
- def runTest(self):
- self.test(*self.arg)
-
- def shortDescription(self):
- if hasattr(self.test, 'description'):
- return self.test.description
- func, arg = self._descriptors()
- doc = getattr(func, '__doc__', None)
- if not doc:
- doc = str(self)
- return doc.strip().split("\n")[0].strip()
-
-
-class FunctionTestCase(TestBase):
- """TestCase wrapper for test functions.
-
- Don't use this class directly; it is used internally in nose to
- create test cases for test functions.
- """
- __test__ = False # do not collect
-
- def __init__(self, test, setUp=None, tearDown=None, arg=tuple(),
- descriptor=None):
- """Initialize the MethodTestCase.
-
- Required argument:
-
- * test -- the test function to call.
-
- Optional arguments:
-
- * setUp -- function to run at setup.
-
- * tearDown -- function to run at teardown.
-
- * arg -- arguments to pass to the test function. This is to support
- generator functions that yield arguments.
-
- * descriptor -- the function, other than the test, that should be used
- to construct the test name. This is to support generator functions.
- """
-
- self.test = test
- self.setUpFunc = setUp
- self.tearDownFunc = tearDown
- self.arg = arg
- self.descriptor = descriptor
- TestBase.__init__(self)
-
- def address(self):
- """Return a round-trip name for this test, a name that can be
- fed back as input to loadTestByName and (assuming the same
- plugin configuration) result in the loading of this test.
- """
- if self.descriptor is not None:
- return test_address(self.descriptor)
- else:
- return test_address(self.test)
-
- def _context(self):
- return resolve_name(self.test.__module__)
- context = property(_context, None, None,
- """Get context (module) of this test""")
-
- def setUp(self):
- """Run any setup function attached to the test function
- """
- if self.setUpFunc:
- self.setUpFunc()
- else:
- names = ('setup', 'setUp', 'setUpFunc')
- try_run(self.test, names)
-
- def tearDown(self):
- """Run any teardown function attached to the test function
- """
- if self.tearDownFunc:
- self.tearDownFunc()
- else:
- names = ('teardown', 'tearDown', 'tearDownFunc')
- try_run(self.test, names)
-
- def __str__(self):
- func, arg = self._descriptors()
- if hasattr(func, 'compat_func_name'):
- name = func.compat_func_name
- else:
- name = func.__name__
- name = "%s.%s" % (func.__module__, name)
- if arg:
- name = "%s%s" % (name, arg)
- # FIXME need to include the full dir path to disambiguate
- # in cases where test module of the same name was seen in
- # another directory (old fromDirectory)
- return name
- __repr__ = __str__
-
- def _descriptors(self):
- """Get the descriptors of the test function: the function and
- arguments that will be used to construct the test name. In
- most cases, this is the function itself and no arguments. For
- tests generated by generator functions, the original
- (generator) function and args passed to the generated function
- are returned.
- """
- if self.descriptor:
- return self.descriptor, self.arg
- else:
- return self.test, self.arg
-
-
-class MethodTestCase(TestBase):
- """Test case wrapper for test methods.
-
- Don't use this class directly; it is used internally in nose to
- create test cases for test methods.
- """
- __test__ = False # do not collect
-
- def __init__(self, method, test=None, arg=tuple(), descriptor=None):
- """Initialize the MethodTestCase.
-
- Required argument:
-
- * method -- the method to call, may be bound or unbound. In either
- case, a new instance of the method's class will be instantiated to
- make the call. Note: In Python 3.x, if using an unbound method, you
- must wrap it using pyversion.unbound_method.
-
- Optional arguments:
-
- * test -- the test function to call. If this is passed, it will be
- called instead of getting a new bound method of the same name as the
- desired method from the test instance. This is to support generator
- methods that yield inline functions.
-
- * arg -- arguments to pass to the test function. This is to support
- generator methods that yield arguments.
-
- * descriptor -- the function, other than the test, that should be used
- to construct the test name. This is to support generator methods.
- """
- self.method = method
- self.test = test
- self.arg = arg
- self.descriptor = descriptor
- if isfunction(method):
- raise ValueError("Unbound methods must be wrapped using pyversion.unbound_method before passing to MethodTestCase")
- self.cls = method.im_class
- self.inst = self.cls()
- if self.test is None:
- method_name = self.method.__name__
- self.test = getattr(self.inst, method_name)
- TestBase.__init__(self)
-
- def __str__(self):
- func, arg = self._descriptors()
- if hasattr(func, 'compat_func_name'):
- name = func.compat_func_name
- else:
- name = func.__name__
- name = "%s.%s.%s" % (self.cls.__module__,
- self.cls.__name__,
- name)
- if arg:
- name = "%s%s" % (name, arg)
- return name
- __repr__ = __str__
-
- def address(self):
- """Return a round-trip name for this test, a name that can be
- fed back as input to loadTestByName and (assuming the same
- plugin configuration) result in the loading of this test.
- """
- if self.descriptor is not None:
- return test_address(self.descriptor)
- else:
- return test_address(self.method)
-
- def _context(self):
- return self.cls
- context = property(_context, None, None,
- """Get context (class) of this test""")
-
- def setUp(self):
- try_run(self.inst, ('setup', 'setUp'))
-
- def tearDown(self):
- try_run(self.inst, ('teardown', 'tearDown'))
-
- def _descriptors(self):
- """Get the descriptors of the test method: the method and
- arguments that will be used to construct the test name. In
- most cases, this is the method itself and no arguments. For
- tests generated by generator methods, the original
- (generator) method and args passed to the generated method
- or function are returned.
- """
- if self.descriptor:
- return self.descriptor, self.arg
- else:
- return self.method, self.arg
diff --git a/lib/spack/external/nose/commands.py b/lib/spack/external/nose/commands.py
deleted file mode 100644
index ef0e9caed4..0000000000
--- a/lib/spack/external/nose/commands.py
+++ /dev/null
@@ -1,172 +0,0 @@
-"""
-nosetests setuptools command
-----------------------------
-
-The easiest way to run tests with nose is to use the `nosetests` setuptools
-command::
-
- python setup.py nosetests
-
-This command has one *major* benefit over the standard `test` command: *all
-nose plugins are supported*.
-
-To configure the `nosetests` command, add a [nosetests] section to your
-setup.cfg. The [nosetests] section can contain any command line arguments that
-nosetests supports. The differences between issuing an option on the command
-line and adding it to setup.cfg are:
-
-* In setup.cfg, the -- prefix must be excluded
-* In setup.cfg, command line flags that take no arguments must be given an
- argument flag (1, T or TRUE for active, 0, F or FALSE for inactive)
-
-Here's an example [nosetests] setup.cfg section::
-
- [nosetests]
- verbosity=1
- detailed-errors=1
- with-coverage=1
- cover-package=nose
- debug=nose.loader
- pdb=1
- pdb-failures=1
-
-If you commonly run nosetests with a large number of options, using
-the nosetests setuptools command and configuring with setup.cfg can
-make running your tests much less tedious. (Note that the same options
-and format supported in setup.cfg are supported in all other config
-files, and the nosetests script will also load config files.)
-
-Another reason to run tests with the command is that the command will
-install packages listed in your `tests_require`, as well as doing a
-complete build of your package before running tests. For packages with
-dependencies or that build C extensions, using the setuptools command
-can be more convenient than building by hand and running the nosetests
-script.
-
-Bootstrapping
--------------
-
-If you are distributing your project and want users to be able to run tests
-without having to install nose themselves, add nose to the setup_requires
-section of your setup()::
-
- setup(
- # ...
- setup_requires=['nose>=1.0']
- )
-
-This will direct setuptools to download and activate nose during the setup
-process, making the ``nosetests`` command available.
-
-"""
-try:
- from setuptools import Command
-except ImportError:
- Command = nosetests = None
-else:
- from nose.config import Config, option_blacklist, user_config_files, \
- flag, _bool
- from nose.core import TestProgram
- from nose.plugins import DefaultPluginManager
-
-
- def get_user_options(parser):
- """convert a optparse option list into a distutils option tuple list"""
- opt_list = []
- for opt in parser.option_list:
- if opt._long_opts[0][2:] in option_blacklist:
- continue
- long_name = opt._long_opts[0][2:]
- if opt.action not in ('store_true', 'store_false'):
- long_name = long_name + "="
- short_name = None
- if opt._short_opts:
- short_name = opt._short_opts[0][1:]
- opt_list.append((long_name, short_name, opt.help or ""))
- return opt_list
-
-
- class nosetests(Command):
- description = "Run unit tests using nosetests"
- __config = Config(files=user_config_files(),
- plugins=DefaultPluginManager())
- __parser = __config.getParser()
- user_options = get_user_options(__parser)
-
- def initialize_options(self):
- """create the member variables, but change hyphens to
- underscores
- """
-
- self.option_to_cmds = {}
- for opt in self.__parser.option_list:
- cmd_name = opt._long_opts[0][2:]
- option_name = cmd_name.replace('-', '_')
- self.option_to_cmds[option_name] = cmd_name
- setattr(self, option_name, None)
- self.attr = None
-
- def finalize_options(self):
- """nothing to do here"""
- pass
-
- def run(self):
- """ensure tests are capable of being run, then
- run nose.main with a reconstructed argument list"""
- if getattr(self.distribution, 'use_2to3', False):
- # If we run 2to3 we can not do this inplace:
-
- # Ensure metadata is up-to-date
- build_py = self.get_finalized_command('build_py')
- build_py.inplace = 0
- build_py.run()
- bpy_cmd = self.get_finalized_command("build_py")
- build_path = bpy_cmd.build_lib
-
- # Build extensions
- egg_info = self.get_finalized_command('egg_info')
- egg_info.egg_base = build_path
- egg_info.run()
-
- build_ext = self.get_finalized_command('build_ext')
- build_ext.inplace = 0
- build_ext.run()
- else:
- self.run_command('egg_info')
-
- # Build extensions in-place
- build_ext = self.get_finalized_command('build_ext')
- build_ext.inplace = 1
- build_ext.run()
-
- if self.distribution.install_requires:
- self.distribution.fetch_build_eggs(
- self.distribution.install_requires)
- if self.distribution.tests_require:
- self.distribution.fetch_build_eggs(
- self.distribution.tests_require)
-
- ei_cmd = self.get_finalized_command("egg_info")
- argv = ['nosetests', '--where', ei_cmd.egg_base]
- for (option_name, cmd_name) in self.option_to_cmds.items():
- if option_name in option_blacklist:
- continue
- value = getattr(self, option_name)
- if value is not None:
- argv.extend(
- self.cfgToArg(option_name.replace('_', '-'), value))
- TestProgram(argv=argv, config=self.__config)
-
- def cfgToArg(self, optname, value):
- argv = []
- long_optname = '--' + optname
- opt = self.__parser.get_option(long_optname)
- if opt.action in ('store_true', 'store_false'):
- if not flag(value):
- raise ValueError("Invalid value '%s' for '%s'" % (
- value, optname))
- if _bool(value):
- argv.append(long_optname)
- else:
- argv.extend([long_optname, value])
- return argv
diff --git a/lib/spack/external/nose/config.py b/lib/spack/external/nose/config.py
deleted file mode 100644
index 125eb5579d..0000000000
--- a/lib/spack/external/nose/config.py
+++ /dev/null
@@ -1,661 +0,0 @@
-import logging
-import optparse
-import os
-import re
-import sys
-import ConfigParser
-from optparse import OptionParser
-from nose.util import absdir, tolist
-from nose.plugins.manager import NoPlugins
-from warnings import warn, filterwarnings
-
-log = logging.getLogger(__name__)
-
-# not allowed in config files
-option_blacklist = ['help', 'verbose']
-
-config_files = [
- # Linux users will prefer this
- "~/.noserc",
- # Windows users will prefer this
- "~/nose.cfg"
- ]
-
-# plaforms on which the exe check defaults to off
-# Windows and IronPython
-exe_allowed_platforms = ('win32', 'cli')
-
-filterwarnings("always", category=DeprecationWarning,
- module=r'(.*\.)?nose\.config')
-
-class NoSuchOptionError(Exception):
- def __init__(self, name):
- Exception.__init__(self, name)
- self.name = name
-
-
-class ConfigError(Exception):
- pass
-
-
-class ConfiguredDefaultsOptionParser(object):
- """
- Handler for options from commandline and config files.
- """
- def __init__(self, parser, config_section, error=None, file_error=None):
- self._parser = parser
- self._config_section = config_section
- if error is None:
- error = self._parser.error
- self._error = error
- if file_error is None:
- file_error = lambda msg, **kw: error(msg)
- self._file_error = file_error
-
- def _configTuples(self, cfg, filename):
- config = []
- if self._config_section in cfg.sections():
- for name, value in cfg.items(self._config_section):
- config.append((name, value, filename))
- return config
-
- def _readFromFilenames(self, filenames):
- config = []
- for filename in filenames:
- cfg = ConfigParser.RawConfigParser()
- try:
- cfg.read(filename)
- except ConfigParser.Error, exc:
- raise ConfigError("Error reading config file %r: %s" %
- (filename, str(exc)))
- config.extend(self._configTuples(cfg, filename))
- return config
-
- def _readFromFileObject(self, fh):
- cfg = ConfigParser.RawConfigParser()
- try:
- filename = fh.name
- except AttributeError:
- filename = '<???>'
- try:
- cfg.readfp(fh)
- except ConfigParser.Error, exc:
- raise ConfigError("Error reading config file %r: %s" %
- (filename, str(exc)))
- return self._configTuples(cfg, filename)
-
- def _readConfiguration(self, config_files):
- try:
- config_files.readline
- except AttributeError:
- filename_or_filenames = config_files
- if isinstance(filename_or_filenames, basestring):
- filenames = [filename_or_filenames]
- else:
- filenames = filename_or_filenames
- config = self._readFromFilenames(filenames)
- else:
- fh = config_files
- config = self._readFromFileObject(fh)
- return config
-
- def _processConfigValue(self, name, value, values, parser):
- opt_str = '--' + name
- option = parser.get_option(opt_str)
- if option is None:
- raise NoSuchOptionError(name)
- else:
- option.process(opt_str, value, values, parser)
-
- def _applyConfigurationToValues(self, parser, config, values):
- for name, value, filename in config:
- if name in option_blacklist:
- continue
- try:
- self._processConfigValue(name, value, values, parser)
- except NoSuchOptionError, exc:
- self._file_error(
- "Error reading config file %r: "
- "no such option %r" % (filename, exc.name),
- name=name, filename=filename)
- except optparse.OptionValueError, exc:
- msg = str(exc).replace('--' + name, repr(name), 1)
- self._file_error("Error reading config file %r: "
- "%s" % (filename, msg),
- name=name, filename=filename)
-
- def parseArgsAndConfigFiles(self, args, config_files):
- values = self._parser.get_default_values()
- try:
- config = self._readConfiguration(config_files)
- except ConfigError, exc:
- self._error(str(exc))
- else:
- try:
- self._applyConfigurationToValues(self._parser, config, values)
- except ConfigError, exc:
- self._error(str(exc))
- return self._parser.parse_args(args, values)
-
-
-class Config(object):
- """nose configuration.
-
- Instances of Config are used throughout nose to configure
- behavior, including plugin lists. Here are the default values for
- all config keys::
-
- self.env = env = kw.pop('env', {})
- self.args = ()
- self.testMatch = re.compile(r'(?:^|[\\b_\\.%s-])[Tt]est' % os.sep)
- self.addPaths = not env.get('NOSE_NOPATH', False)
- self.configSection = 'nosetests'
- self.debug = env.get('NOSE_DEBUG')
- self.debugLog = env.get('NOSE_DEBUG_LOG')
- self.exclude = None
- self.getTestCaseNamesCompat = False
- self.includeExe = env.get('NOSE_INCLUDE_EXE',
- sys.platform in exe_allowed_platforms)
- self.ignoreFiles = (re.compile(r'^\.'),
- re.compile(r'^_'),
- re.compile(r'^setup\.py$')
- )
- self.include = None
- self.loggingConfig = None
- self.logStream = sys.stderr
- self.options = NoOptions()
- self.parser = None
- self.plugins = NoPlugins()
- self.srcDirs = ('lib', 'src')
- self.runOnInit = True
- self.stopOnError = env.get('NOSE_STOP', False)
- self.stream = sys.stderr
- self.testNames = ()
- self.verbosity = int(env.get('NOSE_VERBOSE', 1))
- self.where = ()
- self.py3where = ()
- self.workingDir = None
- """
-
- def __init__(self, **kw):
- self.env = env = kw.pop('env', {})
- self.args = ()
- self.testMatchPat = env.get('NOSE_TESTMATCH',
- r'(?:^|[\b_\.%s-])[Tt]est' % os.sep)
- self.testMatch = re.compile(self.testMatchPat)
- self.addPaths = not env.get('NOSE_NOPATH', False)
- self.configSection = 'nosetests'
- self.debug = env.get('NOSE_DEBUG')
- self.debugLog = env.get('NOSE_DEBUG_LOG')
- self.exclude = None
- self.getTestCaseNamesCompat = False
- self.includeExe = env.get('NOSE_INCLUDE_EXE',
- sys.platform in exe_allowed_platforms)
- self.ignoreFilesDefaultStrings = [r'^\.',
- r'^_',
- r'^setup\.py$',
- ]
- self.ignoreFiles = map(re.compile, self.ignoreFilesDefaultStrings)
- self.include = None
- self.loggingConfig = None
- self.logStream = sys.stderr
- self.options = NoOptions()
- self.parser = None
- self.plugins = NoPlugins()
- self.srcDirs = ('lib', 'src')
- self.runOnInit = True
- self.stopOnError = env.get('NOSE_STOP', False)
- self.stream = sys.stderr
- self.testNames = []
- self.verbosity = int(env.get('NOSE_VERBOSE', 1))
- self.where = ()
- self.py3where = ()
- self.workingDir = os.getcwd()
- self.traverseNamespace = False
- self.firstPackageWins = False
- self.parserClass = OptionParser
- self.worker = False
-
- self._default = self.__dict__.copy()
- self.update(kw)
- self._orig = self.__dict__.copy()
-
- def __getstate__(self):
- state = self.__dict__.copy()
- del state['stream']
- del state['_orig']
- del state['_default']
- del state['env']
- del state['logStream']
- # FIXME remove plugins, have only plugin manager class
- state['plugins'] = self.plugins.__class__
- return state
-
- def __setstate__(self, state):
- plugincls = state.pop('plugins')
- self.update(state)
- self.worker = True
- # FIXME won't work for static plugin lists
- self.plugins = plugincls()
- self.plugins.loadPlugins()
- # needed so .can_configure gets set appropriately
- dummy_parser = self.parserClass()
- self.plugins.addOptions(dummy_parser, {})
- self.plugins.configure(self.options, self)
-
- def __repr__(self):
- d = self.__dict__.copy()
- # don't expose env, could include sensitive info
- d['env'] = {}
- keys = [ k for k in d.keys()
- if not k.startswith('_') ]
- keys.sort()
- return "Config(%s)" % ', '.join([ '%s=%r' % (k, d[k])
- for k in keys ])
- __str__ = __repr__
-
- def _parseArgs(self, argv, cfg_files):
- def warn_sometimes(msg, name=None, filename=None):
- if (hasattr(self.plugins, 'excludedOption') and
- self.plugins.excludedOption(name)):
- msg = ("Option %r in config file %r ignored: "
- "excluded by runtime environment" %
- (name, filename))
- warn(msg, RuntimeWarning)
- else:
- raise ConfigError(msg)
- parser = ConfiguredDefaultsOptionParser(
- self.getParser(), self.configSection, file_error=warn_sometimes)
- return parser.parseArgsAndConfigFiles(argv[1:], cfg_files)
-
- def configure(self, argv=None, doc=None):
- """Configure the nose running environment. Execute configure before
- collecting tests with nose.TestCollector to enable output capture and
- other features.
- """
- env = self.env
- if argv is None:
- argv = sys.argv
-
- cfg_files = getattr(self, 'files', [])
- options, args = self._parseArgs(argv, cfg_files)
- # If -c --config has been specified on command line,
- # load those config files and reparse
- if getattr(options, 'files', []):
- options, args = self._parseArgs(argv, options.files)
-
- self.options = options
- if args:
- self.testNames = args
- if options.testNames is not None:
- self.testNames.extend(tolist(options.testNames))
-
- if options.py3where is not None:
- if sys.version_info >= (3,):
- options.where = options.py3where
-
- # `where` is an append action, so it can't have a default value
- # in the parser, or that default will always be in the list
- if not options.where:
- options.where = env.get('NOSE_WHERE', None)
-
- # include and exclude also
- if not options.ignoreFiles:
- options.ignoreFiles = env.get('NOSE_IGNORE_FILES', [])
- if not options.include:
- options.include = env.get('NOSE_INCLUDE', [])
- if not options.exclude:
- options.exclude = env.get('NOSE_EXCLUDE', [])
-
- self.addPaths = options.addPaths
- self.stopOnError = options.stopOnError
- self.verbosity = options.verbosity
- self.includeExe = options.includeExe
- self.traverseNamespace = options.traverseNamespace
- self.debug = options.debug
- self.debugLog = options.debugLog
- self.loggingConfig = options.loggingConfig
- self.firstPackageWins = options.firstPackageWins
- self.configureLogging()
-
- if not options.byteCompile:
- sys.dont_write_bytecode = True
-
- if options.where is not None:
- self.configureWhere(options.where)
-
- if options.testMatch:
- self.testMatch = re.compile(options.testMatch)
-
- if options.ignoreFiles:
- self.ignoreFiles = map(re.compile, tolist(options.ignoreFiles))
- log.info("Ignoring files matching %s", options.ignoreFiles)
- else:
- log.info("Ignoring files matching %s", self.ignoreFilesDefaultStrings)
-
- if options.include:
- self.include = map(re.compile, tolist(options.include))
- log.info("Including tests matching %s", options.include)
-
- if options.exclude:
- self.exclude = map(re.compile, tolist(options.exclude))
- log.info("Excluding tests matching %s", options.exclude)
-
- # When listing plugins we don't want to run them
- if not options.showPlugins:
- self.plugins.configure(options, self)
- self.plugins.begin()
-
- def configureLogging(self):
- """Configure logging for nose, or optionally other packages. Any logger
- name may be set with the debug option, and that logger will be set to
- debug level and be assigned the same handler as the nose loggers, unless
- it already has a handler.
- """
- if self.loggingConfig:
- from logging.config import fileConfig
- fileConfig(self.loggingConfig)
- return
-
- format = logging.Formatter('%(name)s: %(levelname)s: %(message)s')
- if self.debugLog:
- handler = logging.FileHandler(self.debugLog)
- else:
- handler = logging.StreamHandler(self.logStream)
- handler.setFormatter(format)
-
- logger = logging.getLogger('nose')
- logger.propagate = 0
-
- # only add our default handler if there isn't already one there
- # this avoids annoying duplicate log messages.
- found = False
- if self.debugLog:
- debugLogAbsPath = os.path.abspath(self.debugLog)
- for h in logger.handlers:
- if type(h) == logging.FileHandler and \
- h.baseFilename == debugLogAbsPath:
- found = True
- else:
- for h in logger.handlers:
- if type(h) == logging.StreamHandler and \
- h.stream == self.logStream:
- found = True
- if not found:
- logger.addHandler(handler)
-
- # default level
- lvl = logging.WARNING
- if self.verbosity >= 5:
- lvl = 0
- elif self.verbosity >= 4:
- lvl = logging.DEBUG
- elif self.verbosity >= 3:
- lvl = logging.INFO
- logger.setLevel(lvl)
-
- # individual overrides
- if self.debug:
- # no blanks
- debug_loggers = [ name for name in self.debug.split(',')
- if name ]
- for logger_name in debug_loggers:
- l = logging.getLogger(logger_name)
- l.setLevel(logging.DEBUG)
- if not l.handlers and not logger_name.startswith('nose'):
- l.addHandler(handler)
-
- def configureWhere(self, where):
- """Configure the working directory or directories for the test run.
- """
- from nose.importer import add_path
- self.workingDir = None
- where = tolist(where)
- warned = False
- for path in where:
- if not self.workingDir:
- abs_path = absdir(path)
- if abs_path is None:
- raise ValueError("Working directory '%s' not found, or "
- "not a directory" % path)
- log.info("Set working dir to %s", abs_path)
- self.workingDir = abs_path
- if self.addPaths and \
- os.path.exists(os.path.join(abs_path, '__init__.py')):
- log.info("Working directory %s is a package; "
- "adding to sys.path" % abs_path)
- add_path(abs_path)
- continue
- if not warned:
- warn("Use of multiple -w arguments is deprecated and "
- "support may be removed in a future release. You can "
- "get the same behavior by passing directories without "
- "the -w argument on the command line, or by using the "
- "--tests argument in a configuration file.",
- DeprecationWarning)
- warned = True
- self.testNames.append(path)
-
- def default(self):
- """Reset all config values to defaults.
- """
- self.__dict__.update(self._default)
-
- def getParser(self, doc=None):
- """Get the command line option parser.
- """
- if self.parser:
- return self.parser
- env = self.env
- parser = self.parserClass(doc)
- parser.add_option(
- "-V","--version", action="store_true",
- dest="version", default=False,
- help="Output nose version and exit")
- parser.add_option(
- "-p", "--plugins", action="store_true",
- dest="showPlugins", default=False,
- help="Output list of available plugins and exit. Combine with "
- "higher verbosity for greater detail")
- parser.add_option(
- "-v", "--verbose",
- action="count", dest="verbosity",
- default=self.verbosity,
- help="Be more verbose. [NOSE_VERBOSE]")
- parser.add_option(
- "--verbosity", action="store", dest="verbosity",
- metavar='VERBOSITY',
- type="int", help="Set verbosity; --verbosity=2 is "
- "the same as -v")
- parser.add_option(
- "-q", "--quiet", action="store_const", const=0, dest="verbosity",
- help="Be less verbose")
- parser.add_option(
- "-c", "--config", action="append", dest="files",
- metavar="FILES",
- help="Load configuration from config file(s). May be specified "
- "multiple times; in that case, all config files will be "
- "loaded and combined")
- parser.add_option(
- "-w", "--where", action="append", dest="where",
- metavar="WHERE",
- help="Look for tests in this directory. "
- "May be specified multiple times. The first directory passed "
- "will be used as the working directory, in place of the current "
- "working directory, which is the default. Others will be added "
- "to the list of tests to execute. [NOSE_WHERE]"
- )
- parser.add_option(
- "--py3where", action="append", dest="py3where",
- metavar="PY3WHERE",
- help="Look for tests in this directory under Python 3.x. "
- "Functions the same as 'where', but only applies if running under "
- "Python 3.x or above. Note that, if present under 3.x, this "
- "option completely replaces any directories specified with "
- "'where', so the 'where' option becomes ineffective. "
- "[NOSE_PY3WHERE]"
- )
- parser.add_option(
- "-m", "--match", "--testmatch", action="store",
- dest="testMatch", metavar="REGEX",
- help="Files, directories, function names, and class names "
- "that match this regular expression are considered tests. "
- "Default: %s [NOSE_TESTMATCH]" % self.testMatchPat,
- default=self.testMatchPat)
- parser.add_option(
- "--tests", action="store", dest="testNames", default=None,
- metavar='NAMES',
- help="Run these tests (comma-separated list). This argument is "
- "useful mainly from configuration files; on the command line, "
- "just pass the tests to run as additional arguments with no "
- "switch.")
- parser.add_option(
- "-l", "--debug", action="store",
- dest="debug", default=self.debug,
- help="Activate debug logging for one or more systems. "
- "Available debug loggers: nose, nose.importer, "
- "nose.inspector, nose.plugins, nose.result and "
- "nose.selector. Separate multiple names with a comma.")
- parser.add_option(
- "--debug-log", dest="debugLog", action="store",
- default=self.debugLog, metavar="FILE",
- help="Log debug messages to this file "
- "(default: sys.stderr)")
- parser.add_option(
- "--logging-config", "--log-config",
- dest="loggingConfig", action="store",
- default=self.loggingConfig, metavar="FILE",
- help="Load logging config from this file -- bypasses all other"
- " logging config settings.")
- parser.add_option(
- "-I", "--ignore-files", action="append", dest="ignoreFiles",
- metavar="REGEX",
- help="Completely ignore any file that matches this regular "
- "expression. Takes precedence over any other settings or "
- "plugins. "
- "Specifying this option will replace the default setting. "
- "Specify this option multiple times "
- "to add more regular expressions [NOSE_IGNORE_FILES]")
- parser.add_option(
- "-e", "--exclude", action="append", dest="exclude",
- metavar="REGEX",
- help="Don't run tests that match regular "
- "expression [NOSE_EXCLUDE]")
- parser.add_option(
- "-i", "--include", action="append", dest="include",
- metavar="REGEX",
- help="This regular expression will be applied to files, "
- "directories, function names, and class names for a chance "
- "to include additional tests that do not match TESTMATCH. "
- "Specify this option multiple times "
- "to add more regular expressions [NOSE_INCLUDE]")
- parser.add_option(
- "-x", "--stop", action="store_true", dest="stopOnError",
- default=self.stopOnError,
- help="Stop running tests after the first error or failure")
- parser.add_option(
- "-P", "--no-path-adjustment", action="store_false",
- dest="addPaths",
- default=self.addPaths,
- help="Don't make any changes to sys.path when "
- "loading tests [NOSE_NOPATH]")
- parser.add_option(
- "--exe", action="store_true", dest="includeExe",
- default=self.includeExe,
- help="Look for tests in python modules that are "
- "executable. Normal behavior is to exclude executable "
- "modules, since they may not be import-safe "
- "[NOSE_INCLUDE_EXE]")
- parser.add_option(
- "--noexe", action="store_false", dest="includeExe",
- help="DO NOT look for tests in python modules that are "
- "executable. (The default on the windows platform is to "
- "do so.)")
- parser.add_option(
- "--traverse-namespace", action="store_true",
- default=self.traverseNamespace, dest="traverseNamespace",
- help="Traverse through all path entries of a namespace package")
- parser.add_option(
- "--first-package-wins", "--first-pkg-wins", "--1st-pkg-wins",
- action="store_true", default=False, dest="firstPackageWins",
- help="nose's importer will normally evict a package from sys."
- "modules if it sees a package with the same name in a different "
- "location. Set this option to disable that behavior.")
- parser.add_option(
- "--no-byte-compile",
- action="store_false", default=True, dest="byteCompile",
- help="Prevent nose from byte-compiling the source into .pyc files "
- "while nose is scanning for and running tests.")
-
- self.plugins.loadPlugins()
- self.pluginOpts(parser)
-
- self.parser = parser
- return parser
-
- def help(self, doc=None):
- """Return the generated help message
- """
- return self.getParser(doc).format_help()
-
- def pluginOpts(self, parser):
- self.plugins.addOptions(parser, self.env)
-
- def reset(self):
- self.__dict__.update(self._orig)
-
- def todict(self):
- return self.__dict__.copy()
-
- def update(self, d):
- self.__dict__.update(d)
-
-
-class NoOptions(object):
- """Options container that returns None for all options.
- """
- def __getstate__(self):
- return {}
-
- def __setstate__(self, state):
- pass
-
- def __getnewargs__(self):
- return ()
-
- def __nonzero__(self):
- return False
-
-
-def user_config_files():
- """Return path to any existing user config files
- """
- return filter(os.path.exists,
- map(os.path.expanduser, config_files))
-
-
-def all_config_files():
- """Return path to any existing user config files, plus any setup.cfg
- in the current working directory.
- """
- user = user_config_files()
- if os.path.exists('setup.cfg'):
- return user + ['setup.cfg']
- return user
-
-
-# used when parsing config files
-def flag(val):
- """Does the value look like an on/off flag?"""
- if val == 1:
- return True
- elif val == 0:
- return False
- val = str(val)
- if len(val) > 5:
- return False
- return val.upper() in ('1', '0', 'F', 'T', 'TRUE', 'FALSE', 'ON', 'OFF')
-
-
-def _bool(val):
- return str(val).upper() in ('1', 'T', 'TRUE', 'ON')
diff --git a/lib/spack/external/nose/core.py b/lib/spack/external/nose/core.py
deleted file mode 100644
index 49e7939b98..0000000000
--- a/lib/spack/external/nose/core.py
+++ /dev/null
@@ -1,341 +0,0 @@
-"""Implements nose test program and collector.
-"""
-from __future__ import generators
-
-import logging
-import os
-import sys
-import time
-import unittest
-
-from nose.config import Config, all_config_files
-from nose.loader import defaultTestLoader
-from nose.plugins.manager import PluginManager, DefaultPluginManager, \
- RestrictedPluginManager
-from nose.result import TextTestResult
-from nose.suite import FinalizingSuiteWrapper
-from nose.util import isclass, tolist
-
-
-log = logging.getLogger('nose.core')
-compat_24 = sys.version_info >= (2, 4)
-
-__all__ = ['TestProgram', 'main', 'run', 'run_exit', 'runmodule', 'collector',
- 'TextTestRunner']
-
-
-class TextTestRunner(unittest.TextTestRunner):
- """Test runner that uses nose's TextTestResult to enable errorClasses,
- as well as providing hooks for plugins to override or replace the test
- output stream, results, and the test case itself.
- """
- def __init__(self, stream=sys.stderr, descriptions=1, verbosity=1,
- config=None):
- if config is None:
- config = Config()
- self.config = config
- unittest.TextTestRunner.__init__(self, stream, descriptions, verbosity)
-
-
- def _makeResult(self):
- return TextTestResult(self.stream,
- self.descriptions,
- self.verbosity,
- self.config)
-
- def run(self, test):
- """Overrides to provide plugin hooks and defer all output to
- the test result class.
- """
- wrapper = self.config.plugins.prepareTest(test)
- if wrapper is not None:
- test = wrapper
-
- # plugins can decorate or capture the output stream
- wrapped = self.config.plugins.setOutputStream(self.stream)
- if wrapped is not None:
- self.stream = wrapped
-
- result = self._makeResult()
- start = time.time()
- try:
- test(result)
- except KeyboardInterrupt:
- pass
- stop = time.time()
- result.printErrors()
- result.printSummary(start, stop)
- self.config.plugins.finalize(result)
- return result
-
-
-class TestProgram(unittest.TestProgram):
- """Collect and run tests, returning success or failure.
-
- The arguments to TestProgram() are the same as to
- :func:`main()` and :func:`run()`:
-
- * module: All tests are in this module (default: None)
- * defaultTest: Tests to load (default: '.')
- * argv: Command line arguments (default: None; sys.argv is read)
- * testRunner: Test runner instance (default: None)
- * testLoader: Test loader instance (default: None)
- * env: Environment; ignored if config is provided (default: None;
- os.environ is read)
- * config: :class:`nose.config.Config` instance (default: None)
- * suite: Suite or list of tests to run (default: None). Passing a
- suite or lists of tests will bypass all test discovery and
- loading. *ALSO NOTE* that if you pass a unittest.TestSuite
- instance as the suite, context fixtures at the class, module and
- package level will not be used, and many plugin hooks will not
- be called. If you want normal nose behavior, either pass a list
- of tests, or a fully-configured :class:`nose.suite.ContextSuite`.
- * exit: Exit after running tests and printing report (default: True)
- * plugins: List of plugins to use; ignored if config is provided
- (default: load plugins with DefaultPluginManager)
- * addplugins: List of **extra** plugins to use. Pass a list of plugin
- instances in this argument to make custom plugins available while
- still using the DefaultPluginManager.
- """
- verbosity = 1
-
- def __init__(self, module=None, defaultTest='.', argv=None,
- testRunner=None, testLoader=None, env=None, config=None,
- suite=None, exit=True, plugins=None, addplugins=None):
- if env is None:
- env = os.environ
- if config is None:
- config = self.makeConfig(env, plugins)
- if addplugins:
- config.plugins.addPlugins(extraplugins=addplugins)
- self.config = config
- self.suite = suite
- self.exit = exit
- extra_args = {}
- version = sys.version_info[0:2]
- if version >= (2,7) and version != (3,0):
- extra_args['exit'] = exit
- unittest.TestProgram.__init__(
- self, module=module, defaultTest=defaultTest,
- argv=argv, testRunner=testRunner, testLoader=testLoader,
- **extra_args)
-
- def getAllConfigFiles(self, env=None):
- env = env or {}
- if env.get('NOSE_IGNORE_CONFIG_FILES', False):
- return []
- else:
- return all_config_files()
-
- def makeConfig(self, env, plugins=None):
- """Load a Config, pre-filled with user config files if any are
- found.
- """
- cfg_files = self.getAllConfigFiles(env)
- if plugins:
- manager = PluginManager(plugins=plugins)
- else:
- manager = DefaultPluginManager()
- return Config(
- env=env, files=cfg_files, plugins=manager)
-
- def parseArgs(self, argv):
- """Parse argv and env and configure running environment.
- """
- self.config.configure(argv, doc=self.usage())
- log.debug("configured %s", self.config)
-
- # quick outs: version, plugins (optparse would have already
- # caught and exited on help)
- if self.config.options.version:
- from nose import __version__
- sys.stdout = sys.__stdout__
- print "%s version %s" % (os.path.basename(sys.argv[0]), __version__)
- sys.exit(0)
-
- if self.config.options.showPlugins:
- self.showPlugins()
- sys.exit(0)
-
- if self.testLoader is None:
- self.testLoader = defaultTestLoader(config=self.config)
- elif isclass(self.testLoader):
- self.testLoader = self.testLoader(config=self.config)
- plug_loader = self.config.plugins.prepareTestLoader(self.testLoader)
- if plug_loader is not None:
- self.testLoader = plug_loader
- log.debug("test loader is %s", self.testLoader)
-
- # FIXME if self.module is a string, add it to self.testNames? not sure
-
- if self.config.testNames:
- self.testNames = self.config.testNames
- else:
- self.testNames = tolist(self.defaultTest)
- log.debug('defaultTest %s', self.defaultTest)
- log.debug('Test names are %s', self.testNames)
- if self.config.workingDir is not None:
- os.chdir(self.config.workingDir)
- self.createTests()
-
- def createTests(self):
- """Create the tests to run. If a self.suite
- is set, then that suite will be used. Otherwise, tests will be
- loaded from the given test names (self.testNames) using the
- test loader.
- """
- log.debug("createTests called with %s", self.suite)
- if self.suite is not None:
- # We were given an explicit suite to run. Make sure it's
- # loaded and wrapped correctly.
- self.test = self.testLoader.suiteClass(self.suite)
- else:
- self.test = self.testLoader.loadTestsFromNames(self.testNames)
-
- def runTests(self):
- """Run Tests. Returns true on success, false on failure, and sets
- self.success to the same value.
- """
- log.debug("runTests called")
- if self.testRunner is None:
- self.testRunner = TextTestRunner(stream=self.config.stream,
- verbosity=self.config.verbosity,
- config=self.config)
- plug_runner = self.config.plugins.prepareTestRunner(self.testRunner)
- if plug_runner is not None:
- self.testRunner = plug_runner
- result = self.testRunner.run(self.test)
- self.success = result.wasSuccessful()
- if self.exit:
- sys.exit(not self.success)
- return self.success
-
- def showPlugins(self):
- """Print list of available plugins.
- """
- import textwrap
-
- class DummyParser:
- def __init__(self):
- self.options = []
- def add_option(self, *arg, **kw):
- self.options.append((arg, kw.pop('help', '')))
-
- v = self.config.verbosity
- self.config.plugins.sort()
- for p in self.config.plugins:
- print "Plugin %s" % p.name
- if v >= 2:
- print " score: %s" % p.score
- print '\n'.join(textwrap.wrap(p.help().strip(),
- initial_indent=' ',
- subsequent_indent=' '))
- if v >= 3:
- parser = DummyParser()
- p.addOptions(parser)
- if len(parser.options):
- print
- print " Options:"
- for opts, help in parser.options:
- print ' %s' % (', '.join(opts))
- if help:
- print '\n'.join(
- textwrap.wrap(help.strip(),
- initial_indent=' ',
- subsequent_indent=' '))
- print
-
- def usage(cls):
- import nose
- try:
- ld = nose.__loader__
- text = ld.get_data(os.path.join(
- os.path.dirname(__file__), 'usage.txt'))
- except AttributeError:
- f = open(os.path.join(
- os.path.dirname(__file__), 'usage.txt'), 'r')
- try:
- text = f.read()
- finally:
- f.close()
- # Ensure that we return str, not bytes.
- if not isinstance(text, str):
- text = text.decode('utf-8')
- return text
- usage = classmethod(usage)
-
-# backwards compatibility
-run_exit = main = TestProgram
-
-
-def run(*arg, **kw):
- """Collect and run tests, returning success or failure.
-
- The arguments to `run()` are the same as to `main()`:
-
- * module: All tests are in this module (default: None)
- * defaultTest: Tests to load (default: '.')
- * argv: Command line arguments (default: None; sys.argv is read)
- * testRunner: Test runner instance (default: None)
- * testLoader: Test loader instance (default: None)
- * env: Environment; ignored if config is provided (default: None;
- os.environ is read)
- * config: :class:`nose.config.Config` instance (default: None)
- * suite: Suite or list of tests to run (default: None). Passing a
- suite or lists of tests will bypass all test discovery and
- loading. *ALSO NOTE* that if you pass a unittest.TestSuite
- instance as the suite, context fixtures at the class, module and
- package level will not be used, and many plugin hooks will not
- be called. If you want normal nose behavior, either pass a list
- of tests, or a fully-configured :class:`nose.suite.ContextSuite`.
- * plugins: List of plugins to use; ignored if config is provided
- (default: load plugins with DefaultPluginManager)
- * addplugins: List of **extra** plugins to use. Pass a list of plugin
- instances in this argument to make custom plugins available while
- still using the DefaultPluginManager.
-
- With the exception that the ``exit`` argument is always set
- to False.
- """
- kw['exit'] = False
- return TestProgram(*arg, **kw).success
-
-
-def runmodule(name='__main__', **kw):
- """Collect and run tests in a single module only. Defaults to running
- tests in __main__. Additional arguments to TestProgram may be passed
- as keyword arguments.
- """
- main(defaultTest=name, **kw)
-
-
-def collector():
- """TestSuite replacement entry point. Use anywhere you might use a
- unittest.TestSuite. The collector will, by default, load options from
- all config files and execute loader.loadTestsFromNames() on the
- configured testNames, or '.' if no testNames are configured.
- """
- # plugins that implement any of these methods are disabled, since
- # we don't control the test runner and won't be able to run them
- # finalize() is also not called, but plugins that use it aren't disabled,
- # because capture needs it.
- setuptools_incompat = ('report', 'prepareTest',
- 'prepareTestLoader', 'prepareTestRunner',
- 'setOutputStream')
-
- plugins = RestrictedPluginManager(exclude=setuptools_incompat)
- conf = Config(files=all_config_files(),
- plugins=plugins)
- conf.configure(argv=['collector'])
- loader = defaultTestLoader(conf)
-
- if conf.testNames:
- suite = loader.loadTestsFromNames(conf.testNames)
- else:
- suite = loader.loadTestsFromNames(('.',))
- return FinalizingSuiteWrapper(suite, plugins.finalize)
-
-
-
-if __name__ == '__main__':
- main()
diff --git a/lib/spack/external/nose/exc.py b/lib/spack/external/nose/exc.py
deleted file mode 100644
index 8b780db0d4..0000000000
--- a/lib/spack/external/nose/exc.py
+++ /dev/null
@@ -1,9 +0,0 @@
-"""Exceptions for marking tests as skipped or deprecated.
-
-This module exists to provide backwards compatibility with previous
-versions of nose where skipped and deprecated tests were core
-functionality, rather than being provided by plugins. It may be
-removed in a future release.
-"""
-from nose.plugins.skip import SkipTest
-from nose.plugins.deprecated import DeprecatedTest
diff --git a/lib/spack/external/nose/ext/__init__.py b/lib/spack/external/nose/ext/__init__.py
deleted file mode 100644
index 5fd1516a09..0000000000
--- a/lib/spack/external/nose/ext/__init__.py
+++ /dev/null
@@ -1,3 +0,0 @@
-"""
-External or vendor files
-"""
diff --git a/lib/spack/external/nose/ext/dtcompat.py b/lib/spack/external/nose/ext/dtcompat.py
deleted file mode 100644
index 332cf08c12..0000000000
--- a/lib/spack/external/nose/ext/dtcompat.py
+++ /dev/null
@@ -1,2272 +0,0 @@
-# Module doctest.
-# Released to the public domain 16-Jan-2001, by Tim Peters (tim@python.org).
-# Major enhancements and refactoring by:
-# Jim Fulton
-# Edward Loper
-
-# Provided as-is; use at your own risk; no warranty; no promises; enjoy!
-#
-# Modified for inclusion in nose to provide support for DocFileTest in
-# python 2.3:
-#
-# - all doctests removed from module (they fail under 2.3 and 2.5)
-# - now handles the $py.class extension when ran under Jython
-
-r"""Module doctest -- a framework for running examples in docstrings.
-
-In simplest use, end each module M to be tested with:
-
-def _test():
- import doctest
- doctest.testmod()
-
-if __name__ == "__main__":
- _test()
-
-Then running the module as a script will cause the examples in the
-docstrings to get executed and verified:
-
-python M.py
-
-This won't display anything unless an example fails, in which case the
-failing example(s) and the cause(s) of the failure(s) are printed to stdout
-(why not stderr? because stderr is a lame hack <0.2 wink>), and the final
-line of output is "Test failed.".
-
-Run it with the -v switch instead:
-
-python M.py -v
-
-and a detailed report of all examples tried is printed to stdout, along
-with assorted summaries at the end.
-
-You can force verbose mode by passing "verbose=True" to testmod, or prohibit
-it by passing "verbose=False". In either of those cases, sys.argv is not
-examined by testmod.
-
-There are a variety of other ways to run doctests, including integration
-with the unittest framework, and support for running non-Python text
-files containing doctests. There are also many ways to override parts
-of doctest's default behaviors. See the Library Reference Manual for
-details.
-"""
-
-__docformat__ = 'reStructuredText en'
-
-__all__ = [
- # 0, Option Flags
- 'register_optionflag',
- 'DONT_ACCEPT_TRUE_FOR_1',
- 'DONT_ACCEPT_BLANKLINE',
- 'NORMALIZE_WHITESPACE',
- 'ELLIPSIS',
- 'IGNORE_EXCEPTION_DETAIL',
- 'COMPARISON_FLAGS',
- 'REPORT_UDIFF',
- 'REPORT_CDIFF',
- 'REPORT_NDIFF',
- 'REPORT_ONLY_FIRST_FAILURE',
- 'REPORTING_FLAGS',
- # 1. Utility Functions
- 'is_private',
- # 2. Example & DocTest
- 'Example',
- 'DocTest',
- # 3. Doctest Parser
- 'DocTestParser',
- # 4. Doctest Finder
- 'DocTestFinder',
- # 5. Doctest Runner
- 'DocTestRunner',
- 'OutputChecker',
- 'DocTestFailure',
- 'UnexpectedException',
- 'DebugRunner',
- # 6. Test Functions
- 'testmod',
- 'testfile',
- 'run_docstring_examples',
- # 7. Tester
- 'Tester',
- # 8. Unittest Support
- 'DocTestSuite',
- 'DocFileSuite',
- 'set_unittest_reportflags',
- # 9. Debugging Support
- 'script_from_examples',
- 'testsource',
- 'debug_src',
- 'debug',
-]
-
-import __future__
-
-import sys, traceback, inspect, linecache, os, re
-import unittest, difflib, pdb, tempfile
-import warnings
-from StringIO import StringIO
-
-# Don't whine about the deprecated is_private function in this
-# module's tests.
-warnings.filterwarnings("ignore", "is_private", DeprecationWarning,
- __name__, 0)
-
-# There are 4 basic classes:
-# - Example: a <source, want> pair, plus an intra-docstring line number.
-# - DocTest: a collection of examples, parsed from a docstring, plus
-# info about where the docstring came from (name, filename, lineno).
-# - DocTestFinder: extracts DocTests from a given object's docstring and
-# its contained objects' docstrings.
-# - DocTestRunner: runs DocTest cases, and accumulates statistics.
-#
-# So the basic picture is:
-#
-# list of:
-# +------+ +---------+ +-------+
-# |object| --DocTestFinder-> | DocTest | --DocTestRunner-> |results|
-# +------+ +---------+ +-------+
-# | Example |
-# | ... |
-# | Example |
-# +---------+
-
-# Option constants.
-
-OPTIONFLAGS_BY_NAME = {}
-def register_optionflag(name):
- # Create a new flag unless `name` is already known.
- return OPTIONFLAGS_BY_NAME.setdefault(name, 1 << len(OPTIONFLAGS_BY_NAME))
-
-DONT_ACCEPT_TRUE_FOR_1 = register_optionflag('DONT_ACCEPT_TRUE_FOR_1')
-DONT_ACCEPT_BLANKLINE = register_optionflag('DONT_ACCEPT_BLANKLINE')
-NORMALIZE_WHITESPACE = register_optionflag('NORMALIZE_WHITESPACE')
-ELLIPSIS = register_optionflag('ELLIPSIS')
-IGNORE_EXCEPTION_DETAIL = register_optionflag('IGNORE_EXCEPTION_DETAIL')
-
-COMPARISON_FLAGS = (DONT_ACCEPT_TRUE_FOR_1 |
- DONT_ACCEPT_BLANKLINE |
- NORMALIZE_WHITESPACE |
- ELLIPSIS |
- IGNORE_EXCEPTION_DETAIL)
-
-REPORT_UDIFF = register_optionflag('REPORT_UDIFF')
-REPORT_CDIFF = register_optionflag('REPORT_CDIFF')
-REPORT_NDIFF = register_optionflag('REPORT_NDIFF')
-REPORT_ONLY_FIRST_FAILURE = register_optionflag('REPORT_ONLY_FIRST_FAILURE')
-
-REPORTING_FLAGS = (REPORT_UDIFF |
- REPORT_CDIFF |
- REPORT_NDIFF |
- REPORT_ONLY_FIRST_FAILURE)
-
-# Special string markers for use in `want` strings:
-BLANKLINE_MARKER = '<BLANKLINE>'
-ELLIPSIS_MARKER = '...'
-
-######################################################################
-## Table of Contents
-######################################################################
-# 1. Utility Functions
-# 2. Example & DocTest -- store test cases
-# 3. DocTest Parser -- extracts examples from strings
-# 4. DocTest Finder -- extracts test cases from objects
-# 5. DocTest Runner -- runs test cases
-# 6. Test Functions -- convenient wrappers for testing
-# 7. Tester Class -- for backwards compatibility
-# 8. Unittest Support
-# 9. Debugging Support
-# 10. Example Usage
-
-######################################################################
-## 1. Utility Functions
-######################################################################
-
-def is_private(prefix, base):
- """prefix, base -> true iff name prefix + "." + base is "private".
-
- Prefix may be an empty string, and base does not contain a period.
- Prefix is ignored (although functions you write conforming to this
- protocol may make use of it).
- Return true iff base begins with an (at least one) underscore, but
- does not both begin and end with (at least) two underscores.
- """
- warnings.warn("is_private is deprecated; it wasn't useful; "
- "examine DocTestFinder.find() lists instead",
- DeprecationWarning, stacklevel=2)
- return base[:1] == "_" and not base[:2] == "__" == base[-2:]
-
-def _extract_future_flags(globs):
- """
- Return the compiler-flags associated with the future features that
- have been imported into the given namespace (globs).
- """
- flags = 0
- for fname in __future__.all_feature_names:
- feature = globs.get(fname, None)
- if feature is getattr(__future__, fname):
- flags |= feature.compiler_flag
- return flags
-
-def _normalize_module(module, depth=2):
- """
- Return the module specified by `module`. In particular:
- - If `module` is a module, then return module.
- - If `module` is a string, then import and return the
- module with that name.
- - If `module` is None, then return the calling module.
- The calling module is assumed to be the module of
- the stack frame at the given depth in the call stack.
- """
- if inspect.ismodule(module):
- return module
- elif isinstance(module, (str, unicode)):
- return __import__(module, globals(), locals(), ["*"])
- elif module is None:
- return sys.modules[sys._getframe(depth).f_globals['__name__']]
- else:
- raise TypeError("Expected a module, string, or None")
-
-def _indent(s, indent=4):
- """
- Add the given number of space characters to the beginning every
- non-blank line in `s`, and return the result.
- """
- # This regexp matches the start of non-blank lines:
- return re.sub('(?m)^(?!$)', indent*' ', s)
-
-def _exception_traceback(exc_info):
- """
- Return a string containing a traceback message for the given
- exc_info tuple (as returned by sys.exc_info()).
- """
- # Get a traceback message.
- excout = StringIO()
- exc_type, exc_val, exc_tb = exc_info
- traceback.print_exception(exc_type, exc_val, exc_tb, file=excout)
- return excout.getvalue()
-
-# Override some StringIO methods.
-class _SpoofOut(StringIO):
- def getvalue(self):
- result = StringIO.getvalue(self)
- # If anything at all was written, make sure there's a trailing
- # newline. There's no way for the expected output to indicate
- # that a trailing newline is missing.
- if result and not result.endswith("\n"):
- result += "\n"
- # Prevent softspace from screwing up the next test case, in
- # case they used print with a trailing comma in an example.
- if hasattr(self, "softspace"):
- del self.softspace
- return result
-
- def truncate(self, size=None):
- StringIO.truncate(self, size)
- if hasattr(self, "softspace"):
- del self.softspace
-
-# Worst-case linear-time ellipsis matching.
-def _ellipsis_match(want, got):
- if ELLIPSIS_MARKER not in want:
- return want == got
-
- # Find "the real" strings.
- ws = want.split(ELLIPSIS_MARKER)
- assert len(ws) >= 2
-
- # Deal with exact matches possibly needed at one or both ends.
- startpos, endpos = 0, len(got)
- w = ws[0]
- if w: # starts with exact match
- if got.startswith(w):
- startpos = len(w)
- del ws[0]
- else:
- return False
- w = ws[-1]
- if w: # ends with exact match
- if got.endswith(w):
- endpos -= len(w)
- del ws[-1]
- else:
- return False
-
- if startpos > endpos:
- # Exact end matches required more characters than we have, as in
- # _ellipsis_match('aa...aa', 'aaa')
- return False
-
- # For the rest, we only need to find the leftmost non-overlapping
- # match for each piece. If there's no overall match that way alone,
- # there's no overall match period.
- for w in ws:
- # w may be '' at times, if there are consecutive ellipses, or
- # due to an ellipsis at the start or end of `want`. That's OK.
- # Search for an empty string succeeds, and doesn't change startpos.
- startpos = got.find(w, startpos, endpos)
- if startpos < 0:
- return False
- startpos += len(w)
-
- return True
-
-def _comment_line(line):
- "Return a commented form of the given line"
- line = line.rstrip()
- if line:
- return '# '+line
- else:
- return '#'
-
-class _OutputRedirectingPdb(pdb.Pdb):
- """
- A specialized version of the python debugger that redirects stdout
- to a given stream when interacting with the user. Stdout is *not*
- redirected when traced code is executed.
- """
- def __init__(self, out):
- self.__out = out
- pdb.Pdb.__init__(self)
-
- def trace_dispatch(self, *args):
- # Redirect stdout to the given stream.
- save_stdout = sys.stdout
- sys.stdout = self.__out
- # Call Pdb's trace dispatch method.
- try:
- return pdb.Pdb.trace_dispatch(self, *args)
- finally:
- sys.stdout = save_stdout
-
-# [XX] Normalize with respect to os.path.pardir?
-def _module_relative_path(module, path):
- if not inspect.ismodule(module):
- raise TypeError, 'Expected a module: %r' % module
- if path.startswith('/'):
- raise ValueError, 'Module-relative files may not have absolute paths'
-
- # Find the base directory for the path.
- if hasattr(module, '__file__'):
- # A normal module/package
- basedir = os.path.split(module.__file__)[0]
- elif module.__name__ == '__main__':
- # An interactive session.
- if len(sys.argv)>0 and sys.argv[0] != '':
- basedir = os.path.split(sys.argv[0])[0]
- else:
- basedir = os.curdir
- else:
- # A module w/o __file__ (this includes builtins)
- raise ValueError("Can't resolve paths relative to the module " +
- module + " (it has no __file__)")
-
- # Combine the base directory and the path.
- return os.path.join(basedir, *(path.split('/')))
-
-######################################################################
-## 2. Example & DocTest
-######################################################################
-## - An "example" is a <source, want> pair, where "source" is a
-## fragment of source code, and "want" is the expected output for
-## "source." The Example class also includes information about
-## where the example was extracted from.
-##
-## - A "doctest" is a collection of examples, typically extracted from
-## a string (such as an object's docstring). The DocTest class also
-## includes information about where the string was extracted from.
-
-class Example:
- """
- A single doctest example, consisting of source code and expected
- output. `Example` defines the following attributes:
-
- - source: A single Python statement, always ending with a newline.
- The constructor adds a newline if needed.
-
- - want: The expected output from running the source code (either
- from stdout, or a traceback in case of exception). `want` ends
- with a newline unless it's empty, in which case it's an empty
- string. The constructor adds a newline if needed.
-
- - exc_msg: The exception message generated by the example, if
- the example is expected to generate an exception; or `None` if
- it is not expected to generate an exception. This exception
- message is compared against the return value of
- `traceback.format_exception_only()`. `exc_msg` ends with a
- newline unless it's `None`. The constructor adds a newline
- if needed.
-
- - lineno: The line number within the DocTest string containing
- this Example where the Example begins. This line number is
- zero-based, with respect to the beginning of the DocTest.
-
- - indent: The example's indentation in the DocTest string.
- I.e., the number of space characters that preceed the
- example's first prompt.
-
- - options: A dictionary mapping from option flags to True or
- False, which is used to override default options for this
- example. Any option flags not contained in this dictionary
- are left at their default value (as specified by the
- DocTestRunner's optionflags). By default, no options are set.
- """
- def __init__(self, source, want, exc_msg=None, lineno=0, indent=0,
- options=None):
- # Normalize inputs.
- if not source.endswith('\n'):
- source += '\n'
- if want and not want.endswith('\n'):
- want += '\n'
- if exc_msg is not None and not exc_msg.endswith('\n'):
- exc_msg += '\n'
- # Store properties.
- self.source = source
- self.want = want
- self.lineno = lineno
- self.indent = indent
- if options is None: options = {}
- self.options = options
- self.exc_msg = exc_msg
-
-class DocTest:
- """
- A collection of doctest examples that should be run in a single
- namespace. Each `DocTest` defines the following attributes:
-
- - examples: the list of examples.
-
- - globs: The namespace (aka globals) that the examples should
- be run in.
-
- - name: A name identifying the DocTest (typically, the name of
- the object whose docstring this DocTest was extracted from).
-
- - filename: The name of the file that this DocTest was extracted
- from, or `None` if the filename is unknown.
-
- - lineno: The line number within filename where this DocTest
- begins, or `None` if the line number is unavailable. This
- line number is zero-based, with respect to the beginning of
- the file.
-
- - docstring: The string that the examples were extracted from,
- or `None` if the string is unavailable.
- """
- def __init__(self, examples, globs, name, filename, lineno, docstring):
- """
- Create a new DocTest containing the given examples. The
- DocTest's globals are initialized with a copy of `globs`.
- """
- assert not isinstance(examples, basestring), \
- "DocTest no longer accepts str; use DocTestParser instead"
- self.examples = examples
- self.docstring = docstring
- self.globs = globs.copy()
- self.name = name
- self.filename = filename
- self.lineno = lineno
-
- def __repr__(self):
- if len(self.examples) == 0:
- examples = 'no examples'
- elif len(self.examples) == 1:
- examples = '1 example'
- else:
- examples = '%d examples' % len(self.examples)
- return ('<DocTest %s from %s:%s (%s)>' %
- (self.name, self.filename, self.lineno, examples))
-
-
- # This lets us sort tests by name:
- def __cmp__(self, other):
- if not isinstance(other, DocTest):
- return -1
- return cmp((self.name, self.filename, self.lineno, id(self)),
- (other.name, other.filename, other.lineno, id(other)))
-
-######################################################################
-## 3. DocTestParser
-######################################################################
-
-class DocTestParser:
- """
- A class used to parse strings containing doctest examples.
- """
- # This regular expression is used to find doctest examples in a
- # string. It defines three groups: `source` is the source code
- # (including leading indentation and prompts); `indent` is the
- # indentation of the first (PS1) line of the source code; and
- # `want` is the expected output (including leading indentation).
- _EXAMPLE_RE = re.compile(r'''
- # Source consists of a PS1 line followed by zero or more PS2 lines.
- (?P<source>
- (?:^(?P<indent> [ ]*) >>> .*) # PS1 line
- (?:\n [ ]* \.\.\. .*)*) # PS2 lines
- \n?
- # Want consists of any non-blank lines that do not start with PS1.
- (?P<want> (?:(?![ ]*$) # Not a blank line
- (?![ ]*>>>) # Not a line starting with PS1
- .*$\n? # But any other line
- )*)
- ''', re.MULTILINE | re.VERBOSE)
-
- # A regular expression for handling `want` strings that contain
- # expected exceptions. It divides `want` into three pieces:
- # - the traceback header line (`hdr`)
- # - the traceback stack (`stack`)
- # - the exception message (`msg`), as generated by
- # traceback.format_exception_only()
- # `msg` may have multiple lines. We assume/require that the
- # exception message is the first non-indented line starting with a word
- # character following the traceback header line.
- _EXCEPTION_RE = re.compile(r"""
- # Grab the traceback header. Different versions of Python have
- # said different things on the first traceback line.
- ^(?P<hdr> Traceback\ \(
- (?: most\ recent\ call\ last
- | innermost\ last
- ) \) :
- )
- \s* $ # toss trailing whitespace on the header.
- (?P<stack> .*?) # don't blink: absorb stuff until...
- ^ (?P<msg> \w+ .*) # a line *starts* with alphanum.
- """, re.VERBOSE | re.MULTILINE | re.DOTALL)
-
- # A callable returning a true value iff its argument is a blank line
- # or contains a single comment.
- _IS_BLANK_OR_COMMENT = re.compile(r'^[ ]*(#.*)?$').match
-
- def parse(self, string, name='<string>'):
- """
- Divide the given string into examples and intervening text,
- and return them as a list of alternating Examples and strings.
- Line numbers for the Examples are 0-based. The optional
- argument `name` is a name identifying this string, and is only
- used for error messages.
- """
- string = string.expandtabs()
- # If all lines begin with the same indentation, then strip it.
- min_indent = self._min_indent(string)
- if min_indent > 0:
- string = '\n'.join([l[min_indent:] for l in string.split('\n')])
-
- output = []
- charno, lineno = 0, 0
- # Find all doctest examples in the string:
- for m in self._EXAMPLE_RE.finditer(string):
- # Add the pre-example text to `output`.
- output.append(string[charno:m.start()])
- # Update lineno (lines before this example)
- lineno += string.count('\n', charno, m.start())
- # Extract info from the regexp match.
- (source, options, want, exc_msg) = \
- self._parse_example(m, name, lineno)
- # Create an Example, and add it to the list.
- if not self._IS_BLANK_OR_COMMENT(source):
- output.append( Example(source, want, exc_msg,
- lineno=lineno,
- indent=min_indent+len(m.group('indent')),
- options=options) )
- # Update lineno (lines inside this example)
- lineno += string.count('\n', m.start(), m.end())
- # Update charno.
- charno = m.end()
- # Add any remaining post-example text to `output`.
- output.append(string[charno:])
- return output
-
- def get_doctest(self, string, globs, name, filename, lineno):
- """
- Extract all doctest examples from the given string, and
- collect them into a `DocTest` object.
-
- `globs`, `name`, `filename`, and `lineno` are attributes for
- the new `DocTest` object. See the documentation for `DocTest`
- for more information.
- """
- return DocTest(self.get_examples(string, name), globs,
- name, filename, lineno, string)
-
- def get_examples(self, string, name='<string>'):
- """
- Extract all doctest examples from the given string, and return
- them as a list of `Example` objects. Line numbers are
- 0-based, because it's most common in doctests that nothing
- interesting appears on the same line as opening triple-quote,
- and so the first interesting line is called \"line 1\" then.
-
- The optional argument `name` is a name identifying this
- string, and is only used for error messages.
- """
- return [x for x in self.parse(string, name)
- if isinstance(x, Example)]
-
- def _parse_example(self, m, name, lineno):
- """
- Given a regular expression match from `_EXAMPLE_RE` (`m`),
- return a pair `(source, want)`, where `source` is the matched
- example's source code (with prompts and indentation stripped);
- and `want` is the example's expected output (with indentation
- stripped).
-
- `name` is the string's name, and `lineno` is the line number
- where the example starts; both are used for error messages.
- """
- # Get the example's indentation level.
- indent = len(m.group('indent'))
-
- # Divide source into lines; check that they're properly
- # indented; and then strip their indentation & prompts.
- source_lines = m.group('source').split('\n')
- self._check_prompt_blank(source_lines, indent, name, lineno)
- self._check_prefix(source_lines[1:], ' '*indent + '.', name, lineno)
- source = '\n'.join([sl[indent+4:] for sl in source_lines])
-
- # Divide want into lines; check that it's properly indented; and
- # then strip the indentation. Spaces before the last newline should
- # be preserved, so plain rstrip() isn't good enough.
- want = m.group('want')
- want_lines = want.split('\n')
- if len(want_lines) > 1 and re.match(r' *$', want_lines[-1]):
- del want_lines[-1] # forget final newline & spaces after it
- self._check_prefix(want_lines, ' '*indent, name,
- lineno + len(source_lines))
- want = '\n'.join([wl[indent:] for wl in want_lines])
-
- # If `want` contains a traceback message, then extract it.
- m = self._EXCEPTION_RE.match(want)
- if m:
- exc_msg = m.group('msg')
- else:
- exc_msg = None
-
- # Extract options from the source.
- options = self._find_options(source, name, lineno)
-
- return source, options, want, exc_msg
-
- # This regular expression looks for option directives in the
- # source code of an example. Option directives are comments
- # starting with "doctest:". Warning: this may give false
- # positives for string-literals that contain the string
- # "#doctest:". Eliminating these false positives would require
- # actually parsing the string; but we limit them by ignoring any
- # line containing "#doctest:" that is *followed* by a quote mark.
- _OPTION_DIRECTIVE_RE = re.compile(r'#\s*doctest:\s*([^\n\'"]*)$',
- re.MULTILINE)
-
- def _find_options(self, source, name, lineno):
- """
- Return a dictionary containing option overrides extracted from
- option directives in the given source string.
-
- `name` is the string's name, and `lineno` is the line number
- where the example starts; both are used for error messages.
- """
- options = {}
- # (note: with the current regexp, this will match at most once:)
- for m in self._OPTION_DIRECTIVE_RE.finditer(source):
- option_strings = m.group(1).replace(',', ' ').split()
- for option in option_strings:
- if (option[0] not in '+-' or
- option[1:] not in OPTIONFLAGS_BY_NAME):
- raise ValueError('line %r of the doctest for %s '
- 'has an invalid option: %r' %
- (lineno+1, name, option))
- flag = OPTIONFLAGS_BY_NAME[option[1:]]
- options[flag] = (option[0] == '+')
- if options and self._IS_BLANK_OR_COMMENT(source):
- raise ValueError('line %r of the doctest for %s has an option '
- 'directive on a line with no example: %r' %
- (lineno, name, source))
- return options
-
- # This regular expression finds the indentation of every non-blank
- # line in a string.
- _INDENT_RE = re.compile('^([ ]*)(?=\S)', re.MULTILINE)
-
- def _min_indent(self, s):
- "Return the minimum indentation of any non-blank line in `s`"
- indents = [len(indent) for indent in self._INDENT_RE.findall(s)]
- if len(indents) > 0:
- return min(indents)
- else:
- return 0
-
- def _check_prompt_blank(self, lines, indent, name, lineno):
- """
- Given the lines of a source string (including prompts and
- leading indentation), check to make sure that every prompt is
- followed by a space character. If any line is not followed by
- a space character, then raise ValueError.
- """
- for i, line in enumerate(lines):
- if len(line) >= indent+4 and line[indent+3] != ' ':
- raise ValueError('line %r of the docstring for %s '
- 'lacks blank after %s: %r' %
- (lineno+i+1, name,
- line[indent:indent+3], line))
-
- def _check_prefix(self, lines, prefix, name, lineno):
- """
- Check that every line in the given list starts with the given
- prefix; if any line does not, then raise a ValueError.
- """
- for i, line in enumerate(lines):
- if line and not line.startswith(prefix):
- raise ValueError('line %r of the docstring for %s has '
- 'inconsistent leading whitespace: %r' %
- (lineno+i+1, name, line))
-
-
-######################################################################
-## 4. DocTest Finder
-######################################################################
-
-class DocTestFinder:
- """
- A class used to extract the DocTests that are relevant to a given
- object, from its docstring and the docstrings of its contained
- objects. Doctests can currently be extracted from the following
- object types: modules, functions, classes, methods, staticmethods,
- classmethods, and properties.
- """
-
- def __init__(self, verbose=False, parser=DocTestParser(),
- recurse=True, _namefilter=None, exclude_empty=True):
- """
- Create a new doctest finder.
-
- The optional argument `parser` specifies a class or
- function that should be used to create new DocTest objects (or
- objects that implement the same interface as DocTest). The
- signature for this factory function should match the signature
- of the DocTest constructor.
-
- If the optional argument `recurse` is false, then `find` will
- only examine the given object, and not any contained objects.
-
- If the optional argument `exclude_empty` is false, then `find`
- will include tests for objects with empty docstrings.
- """
- self._parser = parser
- self._verbose = verbose
- self._recurse = recurse
- self._exclude_empty = exclude_empty
- # _namefilter is undocumented, and exists only for temporary backward-
- # compatibility support of testmod's deprecated isprivate mess.
- self._namefilter = _namefilter
-
- def find(self, obj, name=None, module=None, globs=None,
- extraglobs=None):
- """
- Return a list of the DocTests that are defined by the given
- object's docstring, or by any of its contained objects'
- docstrings.
-
- The optional parameter `module` is the module that contains
- the given object. If the module is not specified or is None, then
- the test finder will attempt to automatically determine the
- correct module. The object's module is used:
-
- - As a default namespace, if `globs` is not specified.
- - To prevent the DocTestFinder from extracting DocTests
- from objects that are imported from other modules.
- - To find the name of the file containing the object.
- - To help find the line number of the object within its
- file.
-
- Contained objects whose module does not match `module` are ignored.
-
- If `module` is False, no attempt to find the module will be made.
- This is obscure, of use mostly in tests: if `module` is False, or
- is None but cannot be found automatically, then all objects are
- considered to belong to the (non-existent) module, so all contained
- objects will (recursively) be searched for doctests.
-
- The globals for each DocTest is formed by combining `globs`
- and `extraglobs` (bindings in `extraglobs` override bindings
- in `globs`). A new copy of the globals dictionary is created
- for each DocTest. If `globs` is not specified, then it
- defaults to the module's `__dict__`, if specified, or {}
- otherwise. If `extraglobs` is not specified, then it defaults
- to {}.
-
- """
- # If name was not specified, then extract it from the object.
- if name is None:
- name = getattr(obj, '__name__', None)
- if name is None:
- raise ValueError("DocTestFinder.find: name must be given "
- "when obj.__name__ doesn't exist: %r" %
- (type(obj),))
-
- # Find the module that contains the given object (if obj is
- # a module, then module=obj.). Note: this may fail, in which
- # case module will be None.
- if module is False:
- module = None
- elif module is None:
- module = inspect.getmodule(obj)
-
- # Read the module's source code. This is used by
- # DocTestFinder._find_lineno to find the line number for a
- # given object's docstring.
- try:
- file = inspect.getsourcefile(obj) or inspect.getfile(obj)
- source_lines = linecache.getlines(file)
- if not source_lines:
- source_lines = None
- except TypeError:
- source_lines = None
-
- # Initialize globals, and merge in extraglobs.
- if globs is None:
- if module is None:
- globs = {}
- else:
- globs = module.__dict__.copy()
- else:
- globs = globs.copy()
- if extraglobs is not None:
- globs.update(extraglobs)
-
- # Recursively expore `obj`, extracting DocTests.
- tests = []
- self._find(tests, obj, name, module, source_lines, globs, {})
- # Sort the tests by alpha order of names, for consistency in
- # verbose-mode output. This was a feature of doctest in Pythons
- # <= 2.3 that got lost by accident in 2.4. It was repaired in
- # 2.4.4 and 2.5.
- tests.sort()
- return tests
-
- def _filter(self, obj, prefix, base):
- """
- Return true if the given object should not be examined.
- """
- return (self._namefilter is not None and
- self._namefilter(prefix, base))
-
- def _from_module(self, module, object):
- """
- Return true if the given object is defined in the given
- module.
- """
- if module is None:
- return True
- elif inspect.isfunction(object):
- return module.__dict__ is object.func_globals
- elif inspect.isclass(object):
- # Some jython classes don't set __module__
- return module.__name__ == getattr(object, '__module__', None)
- elif inspect.getmodule(object) is not None:
- return module is inspect.getmodule(object)
- elif hasattr(object, '__module__'):
- return module.__name__ == object.__module__
- elif isinstance(object, property):
- return True # [XX] no way not be sure.
- else:
- raise ValueError("object must be a class or function")
-
- def _find(self, tests, obj, name, module, source_lines, globs, seen):
- """
- Find tests for the given object and any contained objects, and
- add them to `tests`.
- """
- if self._verbose:
- print 'Finding tests in %s' % name
-
- # If we've already processed this object, then ignore it.
- if id(obj) in seen:
- return
- seen[id(obj)] = 1
-
- # Find a test for this object, and add it to the list of tests.
- test = self._get_test(obj, name, module, globs, source_lines)
- if test is not None:
- tests.append(test)
-
- # Look for tests in a module's contained objects.
- if inspect.ismodule(obj) and self._recurse:
- for valname, val in obj.__dict__.items():
- # Check if this contained object should be ignored.
- if self._filter(val, name, valname):
- continue
- valname = '%s.%s' % (name, valname)
- # Recurse to functions & classes.
- if ((inspect.isfunction(val) or inspect.isclass(val)) and
- self._from_module(module, val)):
- self._find(tests, val, valname, module, source_lines,
- globs, seen)
-
- # Look for tests in a module's __test__ dictionary.
- if inspect.ismodule(obj) and self._recurse:
- for valname, val in getattr(obj, '__test__', {}).items():
- if not isinstance(valname, basestring):
- raise ValueError("DocTestFinder.find: __test__ keys "
- "must be strings: %r" %
- (type(valname),))
- if not (inspect.isfunction(val) or inspect.isclass(val) or
- inspect.ismethod(val) or inspect.ismodule(val) or
- isinstance(val, basestring)):
- raise ValueError("DocTestFinder.find: __test__ values "
- "must be strings, functions, methods, "
- "classes, or modules: %r" %
- (type(val),))
- valname = '%s.__test__.%s' % (name, valname)
- self._find(tests, val, valname, module, source_lines,
- globs, seen)
-
- # Look for tests in a class's contained objects.
- if inspect.isclass(obj) and self._recurse:
- for valname, val in obj.__dict__.items():
- # Check if this contained object should be ignored.
- if self._filter(val, name, valname):
- continue
- # Special handling for staticmethod/classmethod.
- if isinstance(val, staticmethod):
- val = getattr(obj, valname)
- if isinstance(val, classmethod):
- val = getattr(obj, valname).im_func
-
- # Recurse to methods, properties, and nested classes.
- if ((inspect.isfunction(val) or inspect.isclass(val) or
- isinstance(val, property)) and
- self._from_module(module, val)):
- valname = '%s.%s' % (name, valname)
- self._find(tests, val, valname, module, source_lines,
- globs, seen)
-
- def _get_test(self, obj, name, module, globs, source_lines):
- """
- Return a DocTest for the given object, if it defines a docstring;
- otherwise, return None.
- """
- # Extract the object's docstring. If it doesn't have one,
- # then return None (no test for this object).
- if isinstance(obj, basestring):
- docstring = obj
- else:
- try:
- if obj.__doc__ is None:
- docstring = ''
- else:
- docstring = obj.__doc__
- if not isinstance(docstring, basestring):
- docstring = str(docstring)
- except (TypeError, AttributeError):
- docstring = ''
-
- # Find the docstring's location in the file.
- lineno = self._find_lineno(obj, source_lines)
-
- # Don't bother if the docstring is empty.
- if self._exclude_empty and not docstring:
- return None
-
- # Return a DocTest for this object.
- if module is None:
- filename = None
- else:
- filename = getattr(module, '__file__', module.__name__)
- if filename[-4:] in (".pyc", ".pyo"):
- filename = filename[:-1]
- elif sys.platform.startswith('java') and \
- filename.endswith('$py.class'):
- filename = '%s.py' % filename[:-9]
- return self._parser.get_doctest(docstring, globs, name,
- filename, lineno)
-
- def _find_lineno(self, obj, source_lines):
- """
- Return a line number of the given object's docstring. Note:
- this method assumes that the object has a docstring.
- """
- lineno = None
-
- # Find the line number for modules.
- if inspect.ismodule(obj):
- lineno = 0
-
- # Find the line number for classes.
- # Note: this could be fooled if a class is defined multiple
- # times in a single file.
- if inspect.isclass(obj):
- if source_lines is None:
- return None
- pat = re.compile(r'^\s*class\s*%s\b' %
- getattr(obj, '__name__', '-'))
- for i, line in enumerate(source_lines):
- if pat.match(line):
- lineno = i
- break
-
- # Find the line number for functions & methods.
- if inspect.ismethod(obj): obj = obj.im_func
- if inspect.isfunction(obj): obj = obj.func_code
- if inspect.istraceback(obj): obj = obj.tb_frame
- if inspect.isframe(obj): obj = obj.f_code
- if inspect.iscode(obj):
- lineno = getattr(obj, 'co_firstlineno', None)-1
-
- # Find the line number where the docstring starts. Assume
- # that it's the first line that begins with a quote mark.
- # Note: this could be fooled by a multiline function
- # signature, where a continuation line begins with a quote
- # mark.
- if lineno is not None:
- if source_lines is None:
- return lineno+1
- pat = re.compile('(^|.*:)\s*\w*("|\')')
- for lineno in range(lineno, len(source_lines)):
- if pat.match(source_lines[lineno]):
- return lineno
-
- # We couldn't find the line number.
- return None
-
-######################################################################
-## 5. DocTest Runner
-######################################################################
-
-class DocTestRunner:
- # This divider string is used to separate failure messages, and to
- # separate sections of the summary.
- DIVIDER = "*" * 70
-
- def __init__(self, checker=None, verbose=None, optionflags=0):
- """
- Create a new test runner.
-
- Optional keyword arg `checker` is the `OutputChecker` that
- should be used to compare the expected outputs and actual
- outputs of doctest examples.
-
- Optional keyword arg 'verbose' prints lots of stuff if true,
- only failures if false; by default, it's true iff '-v' is in
- sys.argv.
-
- Optional argument `optionflags` can be used to control how the
- test runner compares expected output to actual output, and how
- it displays failures. See the documentation for `testmod` for
- more information.
- """
- self._checker = checker or OutputChecker()
- if verbose is None:
- verbose = '-v' in sys.argv
- self._verbose = verbose
- self.optionflags = optionflags
- self.original_optionflags = optionflags
-
- # Keep track of the examples we've run.
- self.tries = 0
- self.failures = 0
- self._name2ft = {}
-
- # Create a fake output target for capturing doctest output.
- self._fakeout = _SpoofOut()
-
- #/////////////////////////////////////////////////////////////////
- # Reporting methods
- #/////////////////////////////////////////////////////////////////
-
- def report_start(self, out, test, example):
- """
- Report that the test runner is about to process the given
- example. (Only displays a message if verbose=True)
- """
- if self._verbose:
- if example.want:
- out('Trying:\n' + _indent(example.source) +
- 'Expecting:\n' + _indent(example.want))
- else:
- out('Trying:\n' + _indent(example.source) +
- 'Expecting nothing\n')
-
- def report_success(self, out, test, example, got):
- """
- Report that the given example ran successfully. (Only
- displays a message if verbose=True)
- """
- if self._verbose:
- out("ok\n")
-
- def report_failure(self, out, test, example, got):
- """
- Report that the given example failed.
- """
- out(self._failure_header(test, example) +
- self._checker.output_difference(example, got, self.optionflags))
-
- def report_unexpected_exception(self, out, test, example, exc_info):
- """
- Report that the given example raised an unexpected exception.
- """
- out(self._failure_header(test, example) +
- 'Exception raised:\n' + _indent(_exception_traceback(exc_info)))
-
- def _failure_header(self, test, example):
- out = [self.DIVIDER]
- if test.filename:
- if test.lineno is not None and example.lineno is not None:
- lineno = test.lineno + example.lineno + 1
- else:
- lineno = '?'
- out.append('File "%s", line %s, in %s' %
- (test.filename, lineno, test.name))
- else:
- out.append('Line %s, in %s' % (example.lineno+1, test.name))
- out.append('Failed example:')
- source = example.source
- out.append(_indent(source))
- return '\n'.join(out)
-
- #/////////////////////////////////////////////////////////////////
- # DocTest Running
- #/////////////////////////////////////////////////////////////////
-
- def __run(self, test, compileflags, out):
- """
- Run the examples in `test`. Write the outcome of each example
- with one of the `DocTestRunner.report_*` methods, using the
- writer function `out`. `compileflags` is the set of compiler
- flags that should be used to execute examples. Return a tuple
- `(f, t)`, where `t` is the number of examples tried, and `f`
- is the number of examples that failed. The examples are run
- in the namespace `test.globs`.
- """
- # Keep track of the number of failures and tries.
- failures = tries = 0
-
- # Save the option flags (since option directives can be used
- # to modify them).
- original_optionflags = self.optionflags
-
- SUCCESS, FAILURE, BOOM = range(3) # `outcome` state
-
- check = self._checker.check_output
-
- # Process each example.
- for examplenum, example in enumerate(test.examples):
-
- # If REPORT_ONLY_FIRST_FAILURE is set, then supress
- # reporting after the first failure.
- quiet = (self.optionflags & REPORT_ONLY_FIRST_FAILURE and
- failures > 0)
-
- # Merge in the example's options.
- self.optionflags = original_optionflags
- if example.options:
- for (optionflag, val) in example.options.items():
- if val:
- self.optionflags |= optionflag
- else:
- self.optionflags &= ~optionflag
-
- # Record that we started this example.
- tries += 1
- if not quiet:
- self.report_start(out, test, example)
-
- # Use a special filename for compile(), so we can retrieve
- # the source code during interactive debugging (see
- # __patched_linecache_getlines).
- filename = '<doctest %s[%d]>' % (test.name, examplenum)
-
- # Run the example in the given context (globs), and record
- # any exception that gets raised. (But don't intercept
- # keyboard interrupts.)
- try:
- # Don't blink! This is where the user's code gets run.
- exec compile(example.source, filename, "single",
- compileflags, 1) in test.globs
- self.debugger.set_continue() # ==== Example Finished ====
- exception = None
- except KeyboardInterrupt:
- raise
- except:
- exception = sys.exc_info()
- self.debugger.set_continue() # ==== Example Finished ====
-
- got = self._fakeout.getvalue() # the actual output
- self._fakeout.truncate(0)
- outcome = FAILURE # guilty until proved innocent or insane
-
- # If the example executed without raising any exceptions,
- # verify its output.
- if exception is None:
- if check(example.want, got, self.optionflags):
- outcome = SUCCESS
-
- # The example raised an exception: check if it was expected.
- else:
- exc_info = sys.exc_info()
- exc_msg = traceback.format_exception_only(*exc_info[:2])[-1]
- if not quiet:
- got += _exception_traceback(exc_info)
-
- # If `example.exc_msg` is None, then we weren't expecting
- # an exception.
- if example.exc_msg is None:
- outcome = BOOM
-
- # We expected an exception: see whether it matches.
- elif check(example.exc_msg, exc_msg, self.optionflags):
- outcome = SUCCESS
-
- # Another chance if they didn't care about the detail.
- elif self.optionflags & IGNORE_EXCEPTION_DETAIL:
- m1 = re.match(r'[^:]*:', example.exc_msg)
- m2 = re.match(r'[^:]*:', exc_msg)
- if m1 and m2 and check(m1.group(0), m2.group(0),
- self.optionflags):
- outcome = SUCCESS
-
- # Report the outcome.
- if outcome is SUCCESS:
- if not quiet:
- self.report_success(out, test, example, got)
- elif outcome is FAILURE:
- if not quiet:
- self.report_failure(out, test, example, got)
- failures += 1
- elif outcome is BOOM:
- if not quiet:
- self.report_unexpected_exception(out, test, example,
- exc_info)
- failures += 1
- else:
- assert False, ("unknown outcome", outcome)
-
- # Restore the option flags (in case they were modified)
- self.optionflags = original_optionflags
-
- # Record and return the number of failures and tries.
- self.__record_outcome(test, failures, tries)
- return failures, tries
-
- def __record_outcome(self, test, f, t):
- """
- Record the fact that the given DocTest (`test`) generated `f`
- failures out of `t` tried examples.
- """
- f2, t2 = self._name2ft.get(test.name, (0,0))
- self._name2ft[test.name] = (f+f2, t+t2)
- self.failures += f
- self.tries += t
-
- __LINECACHE_FILENAME_RE = re.compile(r'<doctest '
- r'(?P<name>[\w\.]+)'
- r'\[(?P<examplenum>\d+)\]>$')
- def __patched_linecache_getlines(self, filename):
- m = self.__LINECACHE_FILENAME_RE.match(filename)
- if m and m.group('name') == self.test.name:
- example = self.test.examples[int(m.group('examplenum'))]
- return example.source.splitlines(True)
- else:
- return self.save_linecache_getlines(filename)
-
- def run(self, test, compileflags=None, out=None, clear_globs=True):
- """
- Run the examples in `test`, and display the results using the
- writer function `out`.
-
- The examples are run in the namespace `test.globs`. If
- `clear_globs` is true (the default), then this namespace will
- be cleared after the test runs, to help with garbage
- collection. If you would like to examine the namespace after
- the test completes, then use `clear_globs=False`.
-
- `compileflags` gives the set of flags that should be used by
- the Python compiler when running the examples. If not
- specified, then it will default to the set of future-import
- flags that apply to `globs`.
-
- The output of each example is checked using
- `DocTestRunner.check_output`, and the results are formatted by
- the `DocTestRunner.report_*` methods.
- """
- self.test = test
-
- if compileflags is None:
- compileflags = _extract_future_flags(test.globs)
-
- save_stdout = sys.stdout
- if out is None:
- out = save_stdout.write
- sys.stdout = self._fakeout
-
- # Patch pdb.set_trace to restore sys.stdout during interactive
- # debugging (so it's not still redirected to self._fakeout).
- # Note that the interactive output will go to *our*
- # save_stdout, even if that's not the real sys.stdout; this
- # allows us to write test cases for the set_trace behavior.
- save_set_trace = pdb.set_trace
- self.debugger = _OutputRedirectingPdb(save_stdout)
- self.debugger.reset()
- pdb.set_trace = self.debugger.set_trace
-
- # Patch linecache.getlines, so we can see the example's source
- # when we're inside the debugger.
- self.save_linecache_getlines = linecache.getlines
- linecache.getlines = self.__patched_linecache_getlines
-
- try:
- return self.__run(test, compileflags, out)
- finally:
- sys.stdout = save_stdout
- pdb.set_trace = save_set_trace
- linecache.getlines = self.save_linecache_getlines
- if clear_globs:
- test.globs.clear()
-
- #/////////////////////////////////////////////////////////////////
- # Summarization
- #/////////////////////////////////////////////////////////////////
- def summarize(self, verbose=None):
- """
- Print a summary of all the test cases that have been run by
- this DocTestRunner, and return a tuple `(f, t)`, where `f` is
- the total number of failed examples, and `t` is the total
- number of tried examples.
-
- The optional `verbose` argument controls how detailed the
- summary is. If the verbosity is not specified, then the
- DocTestRunner's verbosity is used.
- """
- if verbose is None:
- verbose = self._verbose
- notests = []
- passed = []
- failed = []
- totalt = totalf = 0
- for x in self._name2ft.items():
- name, (f, t) = x
- assert f <= t
- totalt += t
- totalf += f
- if t == 0:
- notests.append(name)
- elif f == 0:
- passed.append( (name, t) )
- else:
- failed.append(x)
- if verbose:
- if notests:
- print len(notests), "items had no tests:"
- notests.sort()
- for thing in notests:
- print " ", thing
- if passed:
- print len(passed), "items passed all tests:"
- passed.sort()
- for thing, count in passed:
- print " %3d tests in %s" % (count, thing)
- if failed:
- print self.DIVIDER
- print len(failed), "items had failures:"
- failed.sort()
- for thing, (f, t) in failed:
- print " %3d of %3d in %s" % (f, t, thing)
- if verbose:
- print totalt, "tests in", len(self._name2ft), "items."
- print totalt - totalf, "passed and", totalf, "failed."
- if totalf:
- print "***Test Failed***", totalf, "failures."
- elif verbose:
- print "Test passed."
- return totalf, totalt
-
- #/////////////////////////////////////////////////////////////////
- # Backward compatibility cruft to maintain doctest.master.
- #/////////////////////////////////////////////////////////////////
- def merge(self, other):
- d = self._name2ft
- for name, (f, t) in other._name2ft.items():
- if name in d:
- print "*** DocTestRunner.merge: '" + name + "' in both" \
- " testers; summing outcomes."
- f2, t2 = d[name]
- f = f + f2
- t = t + t2
- d[name] = f, t
-
-class OutputChecker:
- """
- A class used to check the whether the actual output from a doctest
- example matches the expected output. `OutputChecker` defines two
- methods: `check_output`, which compares a given pair of outputs,
- and returns true if they match; and `output_difference`, which
- returns a string describing the differences between two outputs.
- """
- def check_output(self, want, got, optionflags):
- """
- Return True iff the actual output from an example (`got`)
- matches the expected output (`want`). These strings are
- always considered to match if they are identical; but
- depending on what option flags the test runner is using,
- several non-exact match types are also possible. See the
- documentation for `TestRunner` for more information about
- option flags.
- """
- # Handle the common case first, for efficiency:
- # if they're string-identical, always return true.
- if got == want:
- return True
-
- # The values True and False replaced 1 and 0 as the return
- # value for boolean comparisons in Python 2.3.
- if not (optionflags & DONT_ACCEPT_TRUE_FOR_1):
- if (got,want) == ("True\n", "1\n"):
- return True
- if (got,want) == ("False\n", "0\n"):
- return True
-
- # <BLANKLINE> can be used as a special sequence to signify a
- # blank line, unless the DONT_ACCEPT_BLANKLINE flag is used.
- if not (optionflags & DONT_ACCEPT_BLANKLINE):
- # Replace <BLANKLINE> in want with a blank line.
- want = re.sub('(?m)^%s\s*?$' % re.escape(BLANKLINE_MARKER),
- '', want)
- # If a line in got contains only spaces, then remove the
- # spaces.
- got = re.sub('(?m)^\s*?$', '', got)
- if got == want:
- return True
-
- # This flag causes doctest to ignore any differences in the
- # contents of whitespace strings. Note that this can be used
- # in conjunction with the ELLIPSIS flag.
- if optionflags & NORMALIZE_WHITESPACE:
- got = ' '.join(got.split())
- want = ' '.join(want.split())
- if got == want:
- return True
-
- # The ELLIPSIS flag says to let the sequence "..." in `want`
- # match any substring in `got`.
- if optionflags & ELLIPSIS:
- if _ellipsis_match(want, got):
- return True
-
- # We didn't find any match; return false.
- return False
-
- # Should we do a fancy diff?
- def _do_a_fancy_diff(self, want, got, optionflags):
- # Not unless they asked for a fancy diff.
- if not optionflags & (REPORT_UDIFF |
- REPORT_CDIFF |
- REPORT_NDIFF):
- return False
-
- # If expected output uses ellipsis, a meaningful fancy diff is
- # too hard ... or maybe not. In two real-life failures Tim saw,
- # a diff was a major help anyway, so this is commented out.
- # [todo] _ellipsis_match() knows which pieces do and don't match,
- # and could be the basis for a kick-ass diff in this case.
- ##if optionflags & ELLIPSIS and ELLIPSIS_MARKER in want:
- ## return False
-
- # ndiff does intraline difference marking, so can be useful even
- # for 1-line differences.
- if optionflags & REPORT_NDIFF:
- return True
-
- # The other diff types need at least a few lines to be helpful.
- return want.count('\n') > 2 and got.count('\n') > 2
-
- def output_difference(self, example, got, optionflags):
- """
- Return a string describing the differences between the
- expected output for a given example (`example`) and the actual
- output (`got`). `optionflags` is the set of option flags used
- to compare `want` and `got`.
- """
- want = example.want
- # If <BLANKLINE>s are being used, then replace blank lines
- # with <BLANKLINE> in the actual output string.
- if not (optionflags & DONT_ACCEPT_BLANKLINE):
- got = re.sub('(?m)^[ ]*(?=\n)', BLANKLINE_MARKER, got)
-
- # Check if we should use diff.
- if self._do_a_fancy_diff(want, got, optionflags):
- # Split want & got into lines.
- want_lines = want.splitlines(True) # True == keep line ends
- got_lines = got.splitlines(True)
- # Use difflib to find their differences.
- if optionflags & REPORT_UDIFF:
- diff = difflib.unified_diff(want_lines, got_lines, n=2)
- diff = list(diff)[2:] # strip the diff header
- kind = 'unified diff with -expected +actual'
- elif optionflags & REPORT_CDIFF:
- diff = difflib.context_diff(want_lines, got_lines, n=2)
- diff = list(diff)[2:] # strip the diff header
- kind = 'context diff with expected followed by actual'
- elif optionflags & REPORT_NDIFF:
- engine = difflib.Differ(charjunk=difflib.IS_CHARACTER_JUNK)
- diff = list(engine.compare(want_lines, got_lines))
- kind = 'ndiff with -expected +actual'
- else:
- assert 0, 'Bad diff option'
- # Remove trailing whitespace on diff output.
- diff = [line.rstrip() + '\n' for line in diff]
- return 'Differences (%s):\n' % kind + _indent(''.join(diff))
-
- # If we're not using diff, then simply list the expected
- # output followed by the actual output.
- if want and got:
- return 'Expected:\n%sGot:\n%s' % (_indent(want), _indent(got))
- elif want:
- return 'Expected:\n%sGot nothing\n' % _indent(want)
- elif got:
- return 'Expected nothing\nGot:\n%s' % _indent(got)
- else:
- return 'Expected nothing\nGot nothing\n'
-
-class DocTestFailure(Exception):
- """A DocTest example has failed in debugging mode.
-
- The exception instance has variables:
-
- - test: the DocTest object being run
-
- - excample: the Example object that failed
-
- - got: the actual output
- """
- def __init__(self, test, example, got):
- self.test = test
- self.example = example
- self.got = got
-
- def __str__(self):
- return str(self.test)
-
-class UnexpectedException(Exception):
- """A DocTest example has encountered an unexpected exception
-
- The exception instance has variables:
-
- - test: the DocTest object being run
-
- - excample: the Example object that failed
-
- - exc_info: the exception info
- """
- def __init__(self, test, example, exc_info):
- self.test = test
- self.example = example
- self.exc_info = exc_info
-
- def __str__(self):
- return str(self.test)
-
-class DebugRunner(DocTestRunner):
-
- def run(self, test, compileflags=None, out=None, clear_globs=True):
- r = DocTestRunner.run(self, test, compileflags, out, False)
- if clear_globs:
- test.globs.clear()
- return r
-
- def report_unexpected_exception(self, out, test, example, exc_info):
- raise UnexpectedException(test, example, exc_info)
-
- def report_failure(self, out, test, example, got):
- raise DocTestFailure(test, example, got)
-
-######################################################################
-## 6. Test Functions
-######################################################################
-# These should be backwards compatible.
-
-# For backward compatibility, a global instance of a DocTestRunner
-# class, updated by testmod.
-master = None
-
-def testmod(m=None, name=None, globs=None, verbose=None, isprivate=None,
- report=True, optionflags=0, extraglobs=None,
- raise_on_error=False, exclude_empty=False):
- """m=None, name=None, globs=None, verbose=None, isprivate=None,
- report=True, optionflags=0, extraglobs=None, raise_on_error=False,
- exclude_empty=False
-
- Test examples in docstrings in functions and classes reachable
- from module m (or the current module if m is not supplied), starting
- with m.__doc__. Unless isprivate is specified, private names
- are not skipped.
-
- Also test examples reachable from dict m.__test__ if it exists and is
- not None. m.__test__ maps names to functions, classes and strings;
- function and class docstrings are tested even if the name is private;
- strings are tested directly, as if they were docstrings.
-
- Return (#failures, #tests).
-
- See doctest.__doc__ for an overview.
-
- Optional keyword arg "name" gives the name of the module; by default
- use m.__name__.
-
- Optional keyword arg "globs" gives a dict to be used as the globals
- when executing examples; by default, use m.__dict__. A copy of this
- dict is actually used for each docstring, so that each docstring's
- examples start with a clean slate.
-
- Optional keyword arg "extraglobs" gives a dictionary that should be
- merged into the globals that are used to execute examples. By
- default, no extra globals are used. This is new in 2.4.
-
- Optional keyword arg "verbose" prints lots of stuff if true, prints
- only failures if false; by default, it's true iff "-v" is in sys.argv.
-
- Optional keyword arg "report" prints a summary at the end when true,
- else prints nothing at the end. In verbose mode, the summary is
- detailed, else very brief (in fact, empty if all tests passed).
-
- Optional keyword arg "optionflags" or's together module constants,
- and defaults to 0. This is new in 2.3. Possible values (see the
- docs for details):
-
- DONT_ACCEPT_TRUE_FOR_1
- DONT_ACCEPT_BLANKLINE
- NORMALIZE_WHITESPACE
- ELLIPSIS
- IGNORE_EXCEPTION_DETAIL
- REPORT_UDIFF
- REPORT_CDIFF
- REPORT_NDIFF
- REPORT_ONLY_FIRST_FAILURE
-
- Optional keyword arg "raise_on_error" raises an exception on the
- first unexpected exception or failure. This allows failures to be
- post-mortem debugged.
-
- Deprecated in Python 2.4:
- Optional keyword arg "isprivate" specifies a function used to
- determine whether a name is private. The default function is
- treat all functions as public. Optionally, "isprivate" can be
- set to doctest.is_private to skip over functions marked as private
- using the underscore naming convention; see its docs for details.
-
- Advanced tomfoolery: testmod runs methods of a local instance of
- class doctest.Tester, then merges the results into (or creates)
- global Tester instance doctest.master. Methods of doctest.master
- can be called directly too, if you want to do something unusual.
- Passing report=0 to testmod is especially useful then, to delay
- displaying a summary. Invoke doctest.master.summarize(verbose)
- when you're done fiddling.
- """
- global master
-
- if isprivate is not None:
- warnings.warn("the isprivate argument is deprecated; "
- "examine DocTestFinder.find() lists instead",
- DeprecationWarning)
-
- # If no module was given, then use __main__.
- if m is None:
- # DWA - m will still be None if this wasn't invoked from the command
- # line, in which case the following TypeError is about as good an error
- # as we should expect
- m = sys.modules.get('__main__')
-
- # Check that we were actually given a module.
- if not inspect.ismodule(m):
- raise TypeError("testmod: module required; %r" % (m,))
-
- # If no name was given, then use the module's name.
- if name is None:
- name = m.__name__
-
- # Find, parse, and run all tests in the given module.
- finder = DocTestFinder(_namefilter=isprivate, exclude_empty=exclude_empty)
-
- if raise_on_error:
- runner = DebugRunner(verbose=verbose, optionflags=optionflags)
- else:
- runner = DocTestRunner(verbose=verbose, optionflags=optionflags)
-
- for test in finder.find(m, name, globs=globs, extraglobs=extraglobs):
- runner.run(test)
-
- if report:
- runner.summarize()
-
- if master is None:
- master = runner
- else:
- master.merge(runner)
-
- return runner.failures, runner.tries
-
-def testfile(filename, module_relative=True, name=None, package=None,
- globs=None, verbose=None, report=True, optionflags=0,
- extraglobs=None, raise_on_error=False, parser=DocTestParser()):
- """
- Test examples in the given file. Return (#failures, #tests).
-
- Optional keyword arg "module_relative" specifies how filenames
- should be interpreted:
-
- - If "module_relative" is True (the default), then "filename"
- specifies a module-relative path. By default, this path is
- relative to the calling module's directory; but if the
- "package" argument is specified, then it is relative to that
- package. To ensure os-independence, "filename" should use
- "/" characters to separate path segments, and should not
- be an absolute path (i.e., it may not begin with "/").
-
- - If "module_relative" is False, then "filename" specifies an
- os-specific path. The path may be absolute or relative (to
- the current working directory).
-
- Optional keyword arg "name" gives the name of the test; by default
- use the file's basename.
-
- Optional keyword argument "package" is a Python package or the
- name of a Python package whose directory should be used as the
- base directory for a module relative filename. If no package is
- specified, then the calling module's directory is used as the base
- directory for module relative filenames. It is an error to
- specify "package" if "module_relative" is False.
-
- Optional keyword arg "globs" gives a dict to be used as the globals
- when executing examples; by default, use {}. A copy of this dict
- is actually used for each docstring, so that each docstring's
- examples start with a clean slate.
-
- Optional keyword arg "extraglobs" gives a dictionary that should be
- merged into the globals that are used to execute examples. By
- default, no extra globals are used.
-
- Optional keyword arg "verbose" prints lots of stuff if true, prints
- only failures if false; by default, it's true iff "-v" is in sys.argv.
-
- Optional keyword arg "report" prints a summary at the end when true,
- else prints nothing at the end. In verbose mode, the summary is
- detailed, else very brief (in fact, empty if all tests passed).
-
- Optional keyword arg "optionflags" or's together module constants,
- and defaults to 0. Possible values (see the docs for details):
-
- DONT_ACCEPT_TRUE_FOR_1
- DONT_ACCEPT_BLANKLINE
- NORMALIZE_WHITESPACE
- ELLIPSIS
- IGNORE_EXCEPTION_DETAIL
- REPORT_UDIFF
- REPORT_CDIFF
- REPORT_NDIFF
- REPORT_ONLY_FIRST_FAILURE
-
- Optional keyword arg "raise_on_error" raises an exception on the
- first unexpected exception or failure. This allows failures to be
- post-mortem debugged.
-
- Optional keyword arg "parser" specifies a DocTestParser (or
- subclass) that should be used to extract tests from the files.
-
- Advanced tomfoolery: testmod runs methods of a local instance of
- class doctest.Tester, then merges the results into (or creates)
- global Tester instance doctest.master. Methods of doctest.master
- can be called directly too, if you want to do something unusual.
- Passing report=0 to testmod is especially useful then, to delay
- displaying a summary. Invoke doctest.master.summarize(verbose)
- when you're done fiddling.
- """
- global master
-
- if package and not module_relative:
- raise ValueError("Package may only be specified for module-"
- "relative paths.")
-
- # Relativize the path
- if module_relative:
- package = _normalize_module(package)
- filename = _module_relative_path(package, filename)
-
- # If no name was given, then use the file's name.
- if name is None:
- name = os.path.basename(filename)
-
- # Assemble the globals.
- if globs is None:
- globs = {}
- else:
- globs = globs.copy()
- if extraglobs is not None:
- globs.update(extraglobs)
-
- if raise_on_error:
- runner = DebugRunner(verbose=verbose, optionflags=optionflags)
- else:
- runner = DocTestRunner(verbose=verbose, optionflags=optionflags)
-
- # Read the file, convert it to a test, and run it.
- s = open(filename).read()
- test = parser.get_doctest(s, globs, name, filename, 0)
- runner.run(test)
-
- if report:
- runner.summarize()
-
- if master is None:
- master = runner
- else:
- master.merge(runner)
-
- return runner.failures, runner.tries
-
-def run_docstring_examples(f, globs, verbose=False, name="NoName",
- compileflags=None, optionflags=0):
- """
- Test examples in the given object's docstring (`f`), using `globs`
- as globals. Optional argument `name` is used in failure messages.
- If the optional argument `verbose` is true, then generate output
- even if there are no failures.
-
- `compileflags` gives the set of flags that should be used by the
- Python compiler when running the examples. If not specified, then
- it will default to the set of future-import flags that apply to
- `globs`.
-
- Optional keyword arg `optionflags` specifies options for the
- testing and output. See the documentation for `testmod` for more
- information.
- """
- # Find, parse, and run all tests in the given module.
- finder = DocTestFinder(verbose=verbose, recurse=False)
- runner = DocTestRunner(verbose=verbose, optionflags=optionflags)
- for test in finder.find(f, name, globs=globs):
- runner.run(test, compileflags=compileflags)
-
-######################################################################
-## 7. Tester
-######################################################################
-# This is provided only for backwards compatibility. It's not
-# actually used in any way.
-
-class Tester:
- def __init__(self, mod=None, globs=None, verbose=None,
- isprivate=None, optionflags=0):
-
- warnings.warn("class Tester is deprecated; "
- "use class doctest.DocTestRunner instead",
- DeprecationWarning, stacklevel=2)
- if mod is None and globs is None:
- raise TypeError("Tester.__init__: must specify mod or globs")
- if mod is not None and not inspect.ismodule(mod):
- raise TypeError("Tester.__init__: mod must be a module; %r" %
- (mod,))
- if globs is None:
- globs = mod.__dict__
- self.globs = globs
-
- self.verbose = verbose
- self.isprivate = isprivate
- self.optionflags = optionflags
- self.testfinder = DocTestFinder(_namefilter=isprivate)
- self.testrunner = DocTestRunner(verbose=verbose,
- optionflags=optionflags)
-
- def runstring(self, s, name):
- test = DocTestParser().get_doctest(s, self.globs, name, None, None)
- if self.verbose:
- print "Running string", name
- (f,t) = self.testrunner.run(test)
- if self.verbose:
- print f, "of", t, "examples failed in string", name
- return (f,t)
-
- def rundoc(self, object, name=None, module=None):
- f = t = 0
- tests = self.testfinder.find(object, name, module=module,
- globs=self.globs)
- for test in tests:
- (f2, t2) = self.testrunner.run(test)
- (f,t) = (f+f2, t+t2)
- return (f,t)
-
- def rundict(self, d, name, module=None):
- import new
- m = new.module(name)
- m.__dict__.update(d)
- if module is None:
- module = False
- return self.rundoc(m, name, module)
-
- def run__test__(self, d, name):
- import new
- m = new.module(name)
- m.__test__ = d
- return self.rundoc(m, name)
-
- def summarize(self, verbose=None):
- return self.testrunner.summarize(verbose)
-
- def merge(self, other):
- self.testrunner.merge(other.testrunner)
-
-######################################################################
-## 8. Unittest Support
-######################################################################
-
-_unittest_reportflags = 0
-
-def set_unittest_reportflags(flags):
- global _unittest_reportflags
-
- if (flags & REPORTING_FLAGS) != flags:
- raise ValueError("Only reporting flags allowed", flags)
- old = _unittest_reportflags
- _unittest_reportflags = flags
- return old
-
-
-class DocTestCase(unittest.TestCase):
-
- def __init__(self, test, optionflags=0, setUp=None, tearDown=None,
- checker=None):
-
- unittest.TestCase.__init__(self)
- self._dt_optionflags = optionflags
- self._dt_checker = checker
- self._dt_test = test
- self._dt_setUp = setUp
- self._dt_tearDown = tearDown
-
- def setUp(self):
- test = self._dt_test
-
- if self._dt_setUp is not None:
- self._dt_setUp(test)
-
- def tearDown(self):
- test = self._dt_test
-
- if self._dt_tearDown is not None:
- self._dt_tearDown(test)
-
- test.globs.clear()
-
- def runTest(self):
- test = self._dt_test
- old = sys.stdout
- new = StringIO()
- optionflags = self._dt_optionflags
-
- if not (optionflags & REPORTING_FLAGS):
- # The option flags don't include any reporting flags,
- # so add the default reporting flags
- optionflags |= _unittest_reportflags
-
- runner = DocTestRunner(optionflags=optionflags,
- checker=self._dt_checker, verbose=False)
-
- try:
- runner.DIVIDER = "-"*70
- failures, tries = runner.run(
- test, out=new.write, clear_globs=False)
- finally:
- sys.stdout = old
-
- if failures:
- raise self.failureException(self.format_failure(new.getvalue()))
-
- def format_failure(self, err):
- test = self._dt_test
- if test.lineno is None:
- lineno = 'unknown line number'
- else:
- lineno = '%s' % test.lineno
- lname = '.'.join(test.name.split('.')[-1:])
- return ('Failed doctest test for %s\n'
- ' File "%s", line %s, in %s\n\n%s'
- % (test.name, test.filename, lineno, lname, err)
- )
-
- def debug(self):
- self.setUp()
- runner = DebugRunner(optionflags=self._dt_optionflags,
- checker=self._dt_checker, verbose=False)
- runner.run(self._dt_test)
- self.tearDown()
-
- def id(self):
- return self._dt_test.name
-
- def __repr__(self):
- name = self._dt_test.name.split('.')
- return "%s (%s)" % (name[-1], '.'.join(name[:-1]))
-
- __str__ = __repr__
-
- def shortDescription(self):
- return "Doctest: " + self._dt_test.name
-
-def DocTestSuite(module=None, globs=None, extraglobs=None, test_finder=None,
- **options):
- """
- Convert doctest tests for a module to a unittest test suite.
-
- This converts each documentation string in a module that
- contains doctest tests to a unittest test case. If any of the
- tests in a doc string fail, then the test case fails. An exception
- is raised showing the name of the file containing the test and a
- (sometimes approximate) line number.
-
- The `module` argument provides the module to be tested. The argument
- can be either a module or a module name.
-
- If no argument is given, the calling module is used.
-
- A number of options may be provided as keyword arguments:
-
- setUp
- A set-up function. This is called before running the
- tests in each file. The setUp function will be passed a DocTest
- object. The setUp function can access the test globals as the
- globs attribute of the test passed.
-
- tearDown
- A tear-down function. This is called after running the
- tests in each file. The tearDown function will be passed a DocTest
- object. The tearDown function can access the test globals as the
- globs attribute of the test passed.
-
- globs
- A dictionary containing initial global variables for the tests.
-
- optionflags
- A set of doctest option flags expressed as an integer.
- """
-
- if test_finder is None:
- test_finder = DocTestFinder()
-
- module = _normalize_module(module)
- tests = test_finder.find(module, globs=globs, extraglobs=extraglobs)
- if globs is None:
- globs = module.__dict__
- if not tests:
- # Why do we want to do this? Because it reveals a bug that might
- # otherwise be hidden.
- raise ValueError(module, "has no tests")
-
- tests.sort()
- suite = unittest.TestSuite()
- for test in tests:
- if len(test.examples) == 0:
- continue
- if not test.filename:
- filename = module.__file__
- if filename[-4:] in (".pyc", ".pyo"):
- filename = filename[:-1]
- elif sys.platform.startswith('java') and \
- filename.endswith('$py.class'):
- filename = '%s.py' % filename[:-9]
- test.filename = filename
- suite.addTest(DocTestCase(test, **options))
-
- return suite
-
-class DocFileCase(DocTestCase):
-
- def id(self):
- return '_'.join(self._dt_test.name.split('.'))
-
- def __repr__(self):
- return self._dt_test.filename
- __str__ = __repr__
-
- def format_failure(self, err):
- return ('Failed doctest test for %s\n File "%s", line 0\n\n%s'
- % (self._dt_test.name, self._dt_test.filename, err)
- )
-
-def DocFileTest(path, module_relative=True, package=None,
- globs=None, parser=DocTestParser(), **options):
- if globs is None:
- globs = {}
-
- if package and not module_relative:
- raise ValueError("Package may only be specified for module-"
- "relative paths.")
-
- # Relativize the path.
- if module_relative:
- package = _normalize_module(package)
- path = _module_relative_path(package, path)
-
- # Find the file and read it.
- name = os.path.basename(path)
- doc = open(path).read()
-
- # Convert it to a test, and wrap it in a DocFileCase.
- test = parser.get_doctest(doc, globs, name, path, 0)
- return DocFileCase(test, **options)
-
-def DocFileSuite(*paths, **kw):
- """A unittest suite for one or more doctest files.
-
- The path to each doctest file is given as a string; the
- interpretation of that string depends on the keyword argument
- "module_relative".
-
- A number of options may be provided as keyword arguments:
-
- module_relative
- If "module_relative" is True, then the given file paths are
- interpreted as os-independent module-relative paths. By
- default, these paths are relative to the calling module's
- directory; but if the "package" argument is specified, then
- they are relative to that package. To ensure os-independence,
- "filename" should use "/" characters to separate path
- segments, and may not be an absolute path (i.e., it may not
- begin with "/").
-
- If "module_relative" is False, then the given file paths are
- interpreted as os-specific paths. These paths may be absolute
- or relative (to the current working directory).
-
- package
- A Python package or the name of a Python package whose directory
- should be used as the base directory for module relative paths.
- If "package" is not specified, then the calling module's
- directory is used as the base directory for module relative
- filenames. It is an error to specify "package" if
- "module_relative" is False.
-
- setUp
- A set-up function. This is called before running the
- tests in each file. The setUp function will be passed a DocTest
- object. The setUp function can access the test globals as the
- globs attribute of the test passed.
-
- tearDown
- A tear-down function. This is called after running the
- tests in each file. The tearDown function will be passed a DocTest
- object. The tearDown function can access the test globals as the
- globs attribute of the test passed.
-
- globs
- A dictionary containing initial global variables for the tests.
-
- optionflags
- A set of doctest option flags expressed as an integer.
-
- parser
- A DocTestParser (or subclass) that should be used to extract
- tests from the files.
- """
- suite = unittest.TestSuite()
-
- # We do this here so that _normalize_module is called at the right
- # level. If it were called in DocFileTest, then this function
- # would be the caller and we might guess the package incorrectly.
- if kw.get('module_relative', True):
- kw['package'] = _normalize_module(kw.get('package'))
-
- for path in paths:
- suite.addTest(DocFileTest(path, **kw))
-
- return suite
-
-######################################################################
-## 9. Debugging Support
-######################################################################
-
-def script_from_examples(s):
- output = []
- for piece in DocTestParser().parse(s):
- if isinstance(piece, Example):
- # Add the example's source code (strip trailing NL)
- output.append(piece.source[:-1])
- # Add the expected output:
- want = piece.want
- if want:
- output.append('# Expected:')
- output += ['## '+l for l in want.split('\n')[:-1]]
- else:
- # Add non-example text.
- output += [_comment_line(l)
- for l in piece.split('\n')[:-1]]
-
- # Trim junk on both ends.
- while output and output[-1] == '#':
- output.pop()
- while output and output[0] == '#':
- output.pop(0)
- # Combine the output, and return it.
- # Add a courtesy newline to prevent exec from choking (see bug #1172785)
- return '\n'.join(output) + '\n'
-
-def testsource(module, name):
- """Extract the test sources from a doctest docstring as a script.
-
- Provide the module (or dotted name of the module) containing the
- test to be debugged and the name (within the module) of the object
- with the doc string with tests to be debugged.
- """
- module = _normalize_module(module)
- tests = DocTestFinder().find(module)
- test = [t for t in tests if t.name == name]
- if not test:
- raise ValueError(name, "not found in tests")
- test = test[0]
- testsrc = script_from_examples(test.docstring)
- return testsrc
-
-def debug_src(src, pm=False, globs=None):
- """Debug a single doctest docstring, in argument `src`'"""
- testsrc = script_from_examples(src)
- debug_script(testsrc, pm, globs)
-
-def debug_script(src, pm=False, globs=None):
- "Debug a test script. `src` is the script, as a string."
- import pdb
-
- # Note that tempfile.NameTemporaryFile() cannot be used. As the
- # docs say, a file so created cannot be opened by name a second time
- # on modern Windows boxes, and execfile() needs to open it.
- srcfilename = tempfile.mktemp(".py", "doctestdebug")
- f = open(srcfilename, 'w')
- f.write(src)
- f.close()
-
- try:
- if globs:
- globs = globs.copy()
- else:
- globs = {}
-
- if pm:
- try:
- execfile(srcfilename, globs, globs)
- except:
- print sys.exc_info()[1]
- pdb.post_mortem(sys.exc_info()[2])
- else:
- # Note that %r is vital here. '%s' instead can, e.g., cause
- # backslashes to get treated as metacharacters on Windows.
- pdb.run("execfile(%r)" % srcfilename, globs, globs)
-
- finally:
- os.remove(srcfilename)
-
-def debug(module, name, pm=False):
- """Debug a single doctest docstring.
-
- Provide the module (or dotted name of the module) containing the
- test to be debugged and the name (within the module) of the object
- with the docstring with tests to be debugged.
- """
- module = _normalize_module(module)
- testsrc = testsource(module, name)
- debug_script(testsrc, pm, module.__dict__)
-
-
-__test__ = {}
diff --git a/lib/spack/external/nose/failure.py b/lib/spack/external/nose/failure.py
deleted file mode 100644
index c5fabfda5e..0000000000
--- a/lib/spack/external/nose/failure.py
+++ /dev/null
@@ -1,42 +0,0 @@
-import logging
-import unittest
-from traceback import format_tb
-from nose.pyversion import is_base_exception
-
-log = logging.getLogger(__name__)
-
-
-__all__ = ['Failure']
-
-
-class Failure(unittest.TestCase):
- """Unloadable or unexecutable test.
-
- A Failure case is placed in a test suite to indicate the presence of a
- test that could not be loaded or executed. A common example is a test
- module that fails to import.
-
- """
- __test__ = False # do not collect
- def __init__(self, exc_class, exc_val, tb=None, address=None):
- log.debug("A failure! %s %s %s", exc_class, exc_val, format_tb(tb))
- self.exc_class = exc_class
- self.exc_val = exc_val
- self.tb = tb
- self._address = address
- unittest.TestCase.__init__(self)
-
- def __str__(self):
- return "Failure: %s (%s)" % (
- getattr(self.exc_class, '__name__', self.exc_class), self.exc_val)
-
- def address(self):
- return self._address
-
- def runTest(self):
- if self.tb is not None:
- if is_base_exception(self.exc_val):
- raise self.exc_val, None, self.tb
- raise self.exc_class, self.exc_val, self.tb
- else:
- raise self.exc_class(self.exc_val)
diff --git a/lib/spack/external/nose/importer.py b/lib/spack/external/nose/importer.py
deleted file mode 100644
index e677658ce6..0000000000
--- a/lib/spack/external/nose/importer.py
+++ /dev/null
@@ -1,167 +0,0 @@
-"""Implements an importer that looks only in specific path (ignoring
-sys.path), and uses a per-path cache in addition to sys.modules. This is
-necessary because test modules in different directories frequently have the
-same names, which means that the first loaded would mask the rest when using
-the builtin importer.
-"""
-import logging
-import os
-import sys
-from nose.config import Config
-
-from imp import find_module, load_module, acquire_lock, release_lock
-
-log = logging.getLogger(__name__)
-
-try:
- _samefile = os.path.samefile
-except AttributeError:
- def _samefile(src, dst):
- return (os.path.normcase(os.path.realpath(src)) ==
- os.path.normcase(os.path.realpath(dst)))
-
-
-class Importer(object):
- """An importer class that does only path-specific imports. That
- is, the given module is not searched for on sys.path, but only at
- the path or in the directory specified.
- """
- def __init__(self, config=None):
- if config is None:
- config = Config()
- self.config = config
-
- def importFromPath(self, path, fqname):
- """Import a dotted-name package whose tail is at path. In other words,
- given foo.bar and path/to/foo/bar.py, import foo from path/to/foo then
- bar from path/to/foo/bar, returning bar.
- """
- # find the base dir of the package
- path_parts = os.path.normpath(os.path.abspath(path)).split(os.sep)
- name_parts = fqname.split('.')
- if path_parts[-1] == '__init__.py':
- path_parts.pop()
- path_parts = path_parts[:-(len(name_parts))]
- dir_path = os.sep.join(path_parts)
- # then import fqname starting from that dir
- return self.importFromDir(dir_path, fqname)
-
- def importFromDir(self, dir, fqname):
- """Import a module *only* from path, ignoring sys.path and
- reloading if the version in sys.modules is not the one we want.
- """
- dir = os.path.normpath(os.path.abspath(dir))
- log.debug("Import %s from %s", fqname, dir)
-
- # FIXME reimplement local per-dir cache?
-
- # special case for __main__
- if fqname == '__main__':
- return sys.modules[fqname]
-
- if self.config.addPaths:
- add_path(dir, self.config)
-
- path = [dir]
- parts = fqname.split('.')
- part_fqname = ''
- mod = parent = fh = None
-
- for part in parts:
- if part_fqname == '':
- part_fqname = part
- else:
- part_fqname = "%s.%s" % (part_fqname, part)
- try:
- acquire_lock()
- log.debug("find module part %s (%s) in %s",
- part, part_fqname, path)
- fh, filename, desc = find_module(part, path)
- old = sys.modules.get(part_fqname)
- if old is not None:
- # test modules frequently have name overlap; make sure
- # we get a fresh copy of anything we are trying to load
- # from a new path
- log.debug("sys.modules has %s as %s", part_fqname, old)
- if (self.sameModule(old, filename)
- or (self.config.firstPackageWins and
- getattr(old, '__path__', None))):
- mod = old
- else:
- del sys.modules[part_fqname]
- mod = load_module(part_fqname, fh, filename, desc)
- else:
- mod = load_module(part_fqname, fh, filename, desc)
- finally:
- if fh:
- fh.close()
- release_lock()
- if parent:
- setattr(parent, part, mod)
- if hasattr(mod, '__path__'):
- path = mod.__path__
- parent = mod
- return mod
-
- def _dirname_if_file(self, filename):
- # We only take the dirname if we have a path to a non-dir,
- # because taking the dirname of a symlink to a directory does not
- # give the actual directory parent.
- if os.path.isdir(filename):
- return filename
- else:
- return os.path.dirname(filename)
-
- def sameModule(self, mod, filename):
- mod_paths = []
- if hasattr(mod, '__path__'):
- for path in mod.__path__:
- mod_paths.append(self._dirname_if_file(path))
- elif hasattr(mod, '__file__'):
- mod_paths.append(self._dirname_if_file(mod.__file__))
- else:
- # builtin or other module-like object that
- # doesn't have __file__; must be new
- return False
- new_path = self._dirname_if_file(filename)
- for mod_path in mod_paths:
- log.debug(
- "module already loaded? mod: %s new: %s",
- mod_path, new_path)
- if _samefile(mod_path, new_path):
- return True
- return False
-
-
-def add_path(path, config=None):
- """Ensure that the path, or the root of the current package (if
- path is in a package), is in sys.path.
- """
-
- # FIXME add any src-looking dirs seen too... need to get config for that
-
- log.debug('Add path %s' % path)
- if not path:
- return []
- added = []
- parent = os.path.dirname(path)
- if (parent
- and os.path.exists(os.path.join(path, '__init__.py'))):
- added.extend(add_path(parent, config))
- elif not path in sys.path:
- log.debug("insert %s into sys.path", path)
- sys.path.insert(0, path)
- added.append(path)
- if config and config.srcDirs:
- for dirname in config.srcDirs:
- dirpath = os.path.join(path, dirname)
- if os.path.isdir(dirpath):
- sys.path.insert(0, dirpath)
- added.append(dirpath)
- return added
-
-
-def remove_path(path):
- log.debug('Remove path %s' % path)
- if path in sys.path:
- sys.path.remove(path)
diff --git a/lib/spack/external/nose/inspector.py b/lib/spack/external/nose/inspector.py
deleted file mode 100644
index a6c4a3e3b6..0000000000
--- a/lib/spack/external/nose/inspector.py
+++ /dev/null
@@ -1,207 +0,0 @@
-"""Simple traceback introspection. Used to add additional information to
-AssertionErrors in tests, so that failure messages may be more informative.
-"""
-import inspect
-import logging
-import re
-import sys
-import textwrap
-import tokenize
-
-try:
- from cStringIO import StringIO
-except ImportError:
- from StringIO import StringIO
-
-log = logging.getLogger(__name__)
-
-def inspect_traceback(tb):
- """Inspect a traceback and its frame, returning source for the expression
- where the exception was raised, with simple variable replacement performed
- and the line on which the exception was raised marked with '>>'
- """
- log.debug('inspect traceback %s', tb)
-
- # we only want the innermost frame, where the exception was raised
- while tb.tb_next:
- tb = tb.tb_next
-
- frame = tb.tb_frame
- lines, exc_line = tbsource(tb)
-
- # figure out the set of lines to grab.
- inspect_lines, mark_line = find_inspectable_lines(lines, exc_line)
- src = StringIO(textwrap.dedent(''.join(inspect_lines)))
- exp = Expander(frame.f_locals, frame.f_globals)
-
- while inspect_lines:
- try:
- for tok in tokenize.generate_tokens(src.readline):
- exp(*tok)
- except tokenize.TokenError, e:
- # this can happen if our inspectable region happens to butt up
- # against the end of a construct like a docstring with the closing
- # """ on separate line
- log.debug("Tokenizer error: %s", e)
- inspect_lines.pop(0)
- mark_line -= 1
- src = StringIO(textwrap.dedent(''.join(inspect_lines)))
- exp = Expander(frame.f_locals, frame.f_globals)
- continue
- break
- padded = []
- if exp.expanded_source:
- exp_lines = exp.expanded_source.split('\n')
- ep = 0
- for line in exp_lines:
- if ep == mark_line:
- padded.append('>> ' + line)
- else:
- padded.append(' ' + line)
- ep += 1
- return '\n'.join(padded)
-
-
-def tbsource(tb, context=6):
- """Get source from a traceback object.
-
- A tuple of two things is returned: a list of lines of context from
- the source code, and the index of the current line within that list.
- The optional second argument specifies the number of lines of context
- to return, which are centered around the current line.
-
- .. Note ::
- This is adapted from inspect.py in the python 2.4 standard library,
- since a bug in the 2.3 version of inspect prevents it from correctly
- locating source lines in a traceback frame.
- """
-
- lineno = tb.tb_lineno
- frame = tb.tb_frame
-
- if context > 0:
- start = lineno - 1 - context//2
- log.debug("lineno: %s start: %s", lineno, start)
-
- try:
- lines, dummy = inspect.findsource(frame)
- except IOError:
- lines, index = [''], 0
- else:
- all_lines = lines
- start = max(start, 1)
- start = max(0, min(start, len(lines) - context))
- lines = lines[start:start+context]
- index = lineno - 1 - start
-
- # python 2.5 compat: if previous line ends in a continuation,
- # decrement start by 1 to match 2.4 behavior
- if sys.version_info >= (2, 5) and index > 0:
- while lines[index-1].strip().endswith('\\'):
- start -= 1
- lines = all_lines[start:start+context]
- else:
- lines, index = [''], 0
- log.debug("tbsource lines '''%s''' around index %s", lines, index)
- return (lines, index)
-
-
-def find_inspectable_lines(lines, pos):
- """Find lines in home that are inspectable.
-
- Walk back from the err line up to 3 lines, but don't walk back over
- changes in indent level.
-
- Walk forward up to 3 lines, counting \ separated lines as 1. Don't walk
- over changes in indent level (unless part of an extended line)
- """
- cnt = re.compile(r'\\[\s\n]*$')
- df = re.compile(r':[\s\n]*$')
- ind = re.compile(r'^(\s*)')
- toinspect = []
- home = lines[pos]
- home_indent = ind.match(home).groups()[0]
-
- before = lines[max(pos-3, 0):pos]
- before.reverse()
- after = lines[pos+1:min(pos+4, len(lines))]
-
- for line in before:
- if ind.match(line).groups()[0] == home_indent:
- toinspect.append(line)
- else:
- break
- toinspect.reverse()
- toinspect.append(home)
- home_pos = len(toinspect)-1
- continued = cnt.search(home)
- for line in after:
- if ((continued or ind.match(line).groups()[0] == home_indent)
- and not df.search(line)):
- toinspect.append(line)
- continued = cnt.search(line)
- else:
- break
- log.debug("Inspecting lines '''%s''' around %s", toinspect, home_pos)
- return toinspect, home_pos
-
-
-class Expander:
- """Simple expression expander. Uses tokenize to find the names and
- expands any that can be looked up in the frame.
- """
- def __init__(self, locals, globals):
- self.locals = locals
- self.globals = globals
- self.lpos = None
- self.expanded_source = ''
-
- def __call__(self, ttype, tok, start, end, line):
- # TODO
- # deal with unicode properly
-
- # TODO
- # Dealing with instance members
- # always keep the last thing seen
- # if the current token is a dot,
- # get ready to getattr(lastthing, this thing) on the
- # next call.
-
- if self.lpos is not None:
- if start[1] >= self.lpos:
- self.expanded_source += ' ' * (start[1]-self.lpos)
- elif start[1] < self.lpos:
- # newline, indent correctly
- self.expanded_source += ' ' * start[1]
- self.lpos = end[1]
-
- if ttype == tokenize.INDENT:
- pass
- elif ttype == tokenize.NAME:
- # Clean this junk up
- try:
- val = self.locals[tok]
- if callable(val):
- val = tok
- else:
- val = repr(val)
- except KeyError:
- try:
- val = self.globals[tok]
- if callable(val):
- val = tok
- else:
- val = repr(val)
-
- except KeyError:
- val = tok
- # FIXME... not sure how to handle things like funcs, classes
- # FIXME this is broken for some unicode strings
- self.expanded_source += val
- else:
- self.expanded_source += tok
- # if this is the end of the line and the line ends with
- # \, then tack a \ and newline onto the output
- # print line[end[1]:]
- if re.match(r'\s+\\\n', line[end[1]:]):
- self.expanded_source += ' \\\n'
diff --git a/lib/spack/external/nose/loader.py b/lib/spack/external/nose/loader.py
deleted file mode 100644
index 3744e54ff6..0000000000
--- a/lib/spack/external/nose/loader.py
+++ /dev/null
@@ -1,623 +0,0 @@
-"""
-Test Loader
------------
-
-nose's test loader implements the same basic functionality as its
-superclass, unittest.TestLoader, but extends it by more liberal
-interpretations of what may be a test and how a test may be named.
-"""
-from __future__ import generators
-
-import logging
-import os
-import sys
-import unittest
-import types
-from inspect import isfunction
-from nose.pyversion import unbound_method, ismethod
-from nose.case import FunctionTestCase, MethodTestCase
-from nose.failure import Failure
-from nose.config import Config
-from nose.importer import Importer, add_path, remove_path
-from nose.selector import defaultSelector, TestAddress
-from nose.util import func_lineno, getpackage, isclass, isgenerator, \
- ispackage, regex_last_key, resolve_name, transplant_func, \
- transplant_class, test_address
-from nose.suite import ContextSuiteFactory, ContextList, LazySuite
-from nose.pyversion import sort_list, cmp_to_key
-
-
-log = logging.getLogger(__name__)
-#log.setLevel(logging.DEBUG)
-
-# for efficiency and easier mocking
-op_normpath = os.path.normpath
-op_abspath = os.path.abspath
-op_join = os.path.join
-op_isdir = os.path.isdir
-op_isfile = os.path.isfile
-
-
-__all__ = ['TestLoader', 'defaultTestLoader']
-
-
-class TestLoader(unittest.TestLoader):
- """Test loader that extends unittest.TestLoader to:
-
- * Load tests from test-like functions and classes that are not
- unittest.TestCase subclasses
- * Find and load test modules in a directory
- * Support tests that are generators
- * Support easy extensions of or changes to that behavior through plugins
- """
- config = None
- importer = None
- workingDir = None
- selector = None
- suiteClass = None
-
- def __init__(self, config=None, importer=None, workingDir=None,
- selector=None):
- """Initialize a test loader.
-
- Parameters (all optional):
-
- * config: provide a `nose.config.Config`_ or other config class
- instance; if not provided a `nose.config.Config`_ with
- default values is used.
- * importer: provide an importer instance that implements
- `importFromPath`. If not provided, a
- `nose.importer.Importer`_ is used.
- * workingDir: the directory to which file and module names are
- relative. If not provided, assumed to be the current working
- directory.
- * selector: a selector class or instance. If a class is
- provided, it will be instantiated with one argument, the
- current config. If not provided, a `nose.selector.Selector`_
- is used.
- """
- if config is None:
- config = Config()
- if importer is None:
- importer = Importer(config=config)
- if workingDir is None:
- workingDir = config.workingDir
- if selector is None:
- selector = defaultSelector(config)
- elif isclass(selector):
- selector = selector(config)
- self.config = config
- self.importer = importer
- self.workingDir = op_normpath(op_abspath(workingDir))
- self.selector = selector
- if config.addPaths:
- add_path(workingDir, config)
- self.suiteClass = ContextSuiteFactory(config=config)
-
- self._visitedPaths = set([])
-
- unittest.TestLoader.__init__(self)
-
- def getTestCaseNames(self, testCaseClass):
- """Override to select with selector, unless
- config.getTestCaseNamesCompat is True
- """
- if self.config.getTestCaseNamesCompat:
- return unittest.TestLoader.getTestCaseNames(self, testCaseClass)
-
- def wanted(attr, cls=testCaseClass, sel=self.selector):
- item = getattr(cls, attr, None)
- if isfunction(item):
- item = unbound_method(cls, item)
- elif not ismethod(item):
- return False
- return sel.wantMethod(item)
-
- cases = filter(wanted, dir(testCaseClass))
-
- # add runTest if nothing else picked
- if not cases and hasattr(testCaseClass, 'runTest'):
- cases = ['runTest']
- if self.sortTestMethodsUsing:
- sort_list(cases, cmp_to_key(self.sortTestMethodsUsing))
- return cases
-
- def _haveVisited(self, path):
- # For cases where path is None, we always pretend we haven't visited
- # them.
- if path is None:
- return False
-
- return path in self._visitedPaths
-
- def _addVisitedPath(self, path):
- if path is not None:
- self._visitedPaths.add(path)
-
- def loadTestsFromDir(self, path):
- """Load tests from the directory at path. This is a generator
- -- each suite of tests from a module or other file is yielded
- and is expected to be executed before the next file is
- examined.
- """
- log.debug("load from dir %s", path)
- plugins = self.config.plugins
- plugins.beforeDirectory(path)
- if self.config.addPaths:
- paths_added = add_path(path, self.config)
-
- entries = os.listdir(path)
- sort_list(entries, regex_last_key(self.config.testMatch))
- for entry in entries:
- # this hard-coded initial-dot test will be removed:
- # http://code.google.com/p/python-nose/issues/detail?id=82
- if entry.startswith('.'):
- continue
- entry_path = op_abspath(op_join(path, entry))
- is_file = op_isfile(entry_path)
- wanted = False
- if is_file:
- is_dir = False
- wanted = self.selector.wantFile(entry_path)
- else:
- is_dir = op_isdir(entry_path)
- if is_dir:
- # this hard-coded initial-underscore test will be removed:
- # http://code.google.com/p/python-nose/issues/detail?id=82
- if entry.startswith('_'):
- continue
- wanted = self.selector.wantDirectory(entry_path)
- is_package = ispackage(entry_path)
-
- # Python 3.3 now implements PEP 420: Implicit Namespace Packages.
- # As a result, it's now possible that parent paths that have a
- # segment with the same basename as our package ends up
- # in module.__path__. So we have to keep track of what we've
- # visited, and not-revisit them again.
- if wanted and not self._haveVisited(entry_path):
- self._addVisitedPath(entry_path)
- if is_file:
- plugins.beforeContext()
- if entry.endswith('.py'):
- yield self.loadTestsFromName(
- entry_path, discovered=True)
- else:
- yield self.loadTestsFromFile(entry_path)
- plugins.afterContext()
- elif is_package:
- # Load the entry as a package: given the full path,
- # loadTestsFromName() will figure it out
- yield self.loadTestsFromName(
- entry_path, discovered=True)
- else:
- # Another test dir in this one: recurse lazily
- yield self.suiteClass(
- lambda: self.loadTestsFromDir(entry_path))
- tests = []
- for test in plugins.loadTestsFromDir(path):
- tests.append(test)
- # TODO: is this try/except needed?
- try:
- if tests:
- yield self.suiteClass(tests)
- except (KeyboardInterrupt, SystemExit):
- raise
- except:
- yield self.suiteClass([Failure(*sys.exc_info())])
-
- # pop paths
- if self.config.addPaths:
- for p in paths_added:
- remove_path(p)
- plugins.afterDirectory(path)
-
- def loadTestsFromFile(self, filename):
- """Load tests from a non-module file. Default is to raise a
- ValueError; plugins may implement `loadTestsFromFile` to
- provide a list of tests loaded from the file.
- """
- log.debug("Load from non-module file %s", filename)
- try:
- tests = [test for test in
- self.config.plugins.loadTestsFromFile(filename)]
- if tests:
- # Plugins can yield False to indicate that they were
- # unable to load tests from a file, but it was not an
- # error -- the file just had no tests to load.
- tests = filter(None, tests)
- return self.suiteClass(tests)
- else:
- # Nothing was able to even try to load from this file
- open(filename, 'r').close() # trigger os error
- raise ValueError("Unable to load tests from file %s"
- % filename)
- except (KeyboardInterrupt, SystemExit):
- raise
- except:
- exc = sys.exc_info()
- return self.suiteClass(
- [Failure(exc[0], exc[1], exc[2],
- address=(filename, None, None))])
-
- def loadTestsFromGenerator(self, generator, module):
- """Lazy-load tests from a generator function. The generator function
- may yield either:
-
- * a callable, or
- * a function name resolvable within the same module
- """
- def generate(g=generator, m=module):
- try:
- for test in g():
- test_func, arg = self.parseGeneratedTest(test)
- if not callable(test_func):
- test_func = getattr(m, test_func)
- yield FunctionTestCase(test_func, arg=arg, descriptor=g)
- except KeyboardInterrupt:
- raise
- except:
- exc = sys.exc_info()
- yield Failure(exc[0], exc[1], exc[2],
- address=test_address(generator))
- return self.suiteClass(generate, context=generator, can_split=False)
-
- def loadTestsFromGeneratorMethod(self, generator, cls):
- """Lazy-load tests from a generator method.
-
- This is more complicated than loading from a generator function,
- since a generator method may yield:
-
- * a function
- * a bound or unbound method, or
- * a method name
- """
- # convert the unbound generator method
- # into a bound method so it can be called below
- if hasattr(generator, 'im_class'):
- cls = generator.im_class
- inst = cls()
- method = generator.__name__
- generator = getattr(inst, method)
-
- def generate(g=generator, c=cls):
- try:
- for test in g():
- test_func, arg = self.parseGeneratedTest(test)
- if not callable(test_func):
- test_func = unbound_method(c, getattr(c, test_func))
- if ismethod(test_func):
- yield MethodTestCase(test_func, arg=arg, descriptor=g)
- elif callable(test_func):
- # In this case we're forcing the 'MethodTestCase'
- # to run the inline function as its test call,
- # but using the generator method as the 'method of
- # record' (so no need to pass it as the descriptor)
- yield MethodTestCase(g, test=test_func, arg=arg)
- else:
- yield Failure(
- TypeError,
- "%s is not a callable or method" % test_func)
- except KeyboardInterrupt:
- raise
- except:
- exc = sys.exc_info()
- yield Failure(exc[0], exc[1], exc[2],
- address=test_address(generator))
- return self.suiteClass(generate, context=generator, can_split=False)
-
- def loadTestsFromModule(self, module, path=None, discovered=False):
- """Load all tests from module and return a suite containing
- them. If the module has been discovered and is not test-like,
- the suite will be empty by default, though plugins may add
- their own tests.
- """
- log.debug("Load from module %s", module)
- tests = []
- test_classes = []
- test_funcs = []
- # For *discovered* modules, we only load tests when the module looks
- # testlike. For modules we've been directed to load, we always
- # look for tests. (discovered is set to True by loadTestsFromDir)
- if not discovered or self.selector.wantModule(module):
- for item in dir(module):
- test = getattr(module, item, None)
- # print "Check %s (%s) in %s" % (item, test, module.__name__)
- if isclass(test):
- if self.selector.wantClass(test):
- test_classes.append(test)
- elif isfunction(test) and self.selector.wantFunction(test):
- test_funcs.append(test)
- sort_list(test_classes, lambda x: x.__name__)
- sort_list(test_funcs, func_lineno)
- tests = map(lambda t: self.makeTest(t, parent=module),
- test_classes + test_funcs)
-
- # Now, descend into packages
- # FIXME can or should this be lazy?
- # is this syntax 2.2 compatible?
- module_paths = getattr(module, '__path__', [])
-
- if path:
- path = os.path.normcase(os.path.realpath(path))
-
- for module_path in module_paths:
- log.debug("Load tests from module path %s?", module_path)
- log.debug("path: %s os.path.realpath(%s): %s",
- path, os.path.normcase(module_path),
- os.path.realpath(os.path.normcase(module_path)))
- if (self.config.traverseNamespace or not path) or \
- os.path.realpath(
- os.path.normcase(module_path)).startswith(path):
- # Egg files can be on sys.path, so make sure the path is a
- # directory before trying to load from it.
- if os.path.isdir(module_path):
- tests.extend(self.loadTestsFromDir(module_path))
-
- for test in self.config.plugins.loadTestsFromModule(module, path):
- tests.append(test)
-
- return self.suiteClass(ContextList(tests, context=module))
-
- def loadTestsFromName(self, name, module=None, discovered=False):
- """Load tests from the entity with the given name.
-
- The name may indicate a file, directory, module, or any object
- within a module. See `nose.util.split_test_name` for details on
- test name parsing.
- """
- # FIXME refactor this method into little bites?
- log.debug("load from %s (%s)", name, module)
-
- suite = self.suiteClass
-
- # give plugins first crack
- plug_tests = self.config.plugins.loadTestsFromName(name, module)
- if plug_tests:
- return suite(plug_tests)
-
- addr = TestAddress(name, workingDir=self.workingDir)
- if module:
- # Two cases:
- # name is class.foo
- # The addr will be incorrect, since it thinks class.foo is
- # a dotted module name. It's actually a dotted attribute
- # name. In this case we want to use the full submitted
- # name as the name to load from the module.
- # name is module:class.foo
- # The addr will be correct. The part we want is the part after
- # the :, which is in addr.call.
- if addr.call:
- name = addr.call
- parent, obj = self.resolve(name, module)
- if (isclass(parent)
- and getattr(parent, '__module__', None) != module.__name__
- and not isinstance(obj, Failure)):
- parent = transplant_class(parent, module.__name__)
- obj = getattr(parent, obj.__name__)
- log.debug("parent %s obj %s module %s", parent, obj, module)
- if isinstance(obj, Failure):
- return suite([obj])
- else:
- return suite(ContextList([self.makeTest(obj, parent)],
- context=parent))
- else:
- if addr.module:
- try:
- if addr.filename is None:
- module = resolve_name(addr.module)
- else:
- self.config.plugins.beforeImport(
- addr.filename, addr.module)
- # FIXME: to support module.name names,
- # do what resolve-name does and keep trying to
- # import, popping tail of module into addr.call,
- # until we either get an import or run out of
- # module parts
- try:
- module = self.importer.importFromPath(
- addr.filename, addr.module)
- finally:
- self.config.plugins.afterImport(
- addr.filename, addr.module)
- except (KeyboardInterrupt, SystemExit):
- raise
- except:
- exc = sys.exc_info()
- return suite([Failure(exc[0], exc[1], exc[2],
- address=addr.totuple())])
- if addr.call:
- return self.loadTestsFromName(addr.call, module)
- else:
- return self.loadTestsFromModule(
- module, addr.filename,
- discovered=discovered)
- elif addr.filename:
- path = addr.filename
- if addr.call:
- package = getpackage(path)
- if package is None:
- return suite([
- Failure(ValueError,
- "Can't find callable %s in file %s: "
- "file is not a python module" %
- (addr.call, path),
- address=addr.totuple())])
- return self.loadTestsFromName(addr.call, module=package)
- else:
- if op_isdir(path):
- # In this case we *can* be lazy since we know
- # that each module in the dir will be fully
- # loaded before its tests are executed; we
- # also know that we're not going to be asked
- # to load from . and ./some_module.py *as part
- # of this named test load*
- return LazySuite(
- lambda: self.loadTestsFromDir(path))
- elif op_isfile(path):
- return self.loadTestsFromFile(path)
- else:
- return suite([
- Failure(OSError, "No such file %s" % path,
- address=addr.totuple())])
- else:
- # just a function? what to do? I think it can only be
- # handled when module is not None
- return suite([
- Failure(ValueError, "Unresolvable test name %s" % name,
- address=addr.totuple())])
-
- def loadTestsFromNames(self, names, module=None):
- """Load tests from all names, returning a suite containing all
- tests.
- """
- plug_res = self.config.plugins.loadTestsFromNames(names, module)
- if plug_res:
- suite, names = plug_res
- if suite:
- return self.suiteClass([
- self.suiteClass(suite),
- unittest.TestLoader.loadTestsFromNames(self, names, module)
- ])
- return unittest.TestLoader.loadTestsFromNames(self, names, module)
-
- def loadTestsFromTestCase(self, testCaseClass):
- """Load tests from a unittest.TestCase subclass.
- """
- cases = []
- plugins = self.config.plugins
- for case in plugins.loadTestsFromTestCase(testCaseClass):
- cases.append(case)
- # For efficiency in the most common case, just call and return from
- # super. This avoids having to extract cases and rebuild a context
- # suite when there are no plugin-contributed cases.
- if not cases:
- return super(TestLoader, self).loadTestsFromTestCase(testCaseClass)
- cases.extend(
- [case for case in
- super(TestLoader, self).loadTestsFromTestCase(testCaseClass)])
- return self.suiteClass(cases)
-
- def loadTestsFromTestClass(self, cls):
- """Load tests from a test class that is *not* a unittest.TestCase
- subclass.
-
- In this case, we can't depend on the class's `__init__` taking method
- name arguments, so we have to compose a MethodTestCase for each
- method in the class that looks testlike.
- """
- def wanted(attr, cls=cls, sel=self.selector):
- item = getattr(cls, attr, None)
- if isfunction(item):
- item = unbound_method(cls, item)
- elif not ismethod(item):
- return False
- return sel.wantMethod(item)
- cases = [self.makeTest(getattr(cls, case), cls)
- for case in filter(wanted, dir(cls))]
- for test in self.config.plugins.loadTestsFromTestClass(cls):
- cases.append(test)
- return self.suiteClass(ContextList(cases, context=cls))
-
- def makeTest(self, obj, parent=None):
- try:
- return self._makeTest(obj, parent)
- except (KeyboardInterrupt, SystemExit):
- raise
- except:
- exc = sys.exc_info()
- try:
- addr = test_address(obj)
- except KeyboardInterrupt:
- raise
- except:
- addr = None
- return Failure(exc[0], exc[1], exc[2], address=addr)
-
- def _makeTest(self, obj, parent=None):
- """Given a test object and its parent, return a test case
- or test suite.
- """
- plug_tests = []
- try:
- addr = test_address(obj)
- except KeyboardInterrupt:
- raise
- except:
- addr = None
- for test in self.config.plugins.makeTest(obj, parent):
- plug_tests.append(test)
- # TODO: is this try/except needed?
- try:
- if plug_tests:
- return self.suiteClass(plug_tests)
- except (KeyboardInterrupt, SystemExit):
- raise
- except:
- exc = sys.exc_info()
- return Failure(exc[0], exc[1], exc[2], address=addr)
-
- if isfunction(obj) and parent and not isinstance(parent, types.ModuleType):
- # This is a Python 3.x 'unbound method'. Wrap it with its
- # associated class..
- obj = unbound_method(parent, obj)
-
- if isinstance(obj, unittest.TestCase):
- return obj
- elif isclass(obj):
- if parent and obj.__module__ != parent.__name__:
- obj = transplant_class(obj, parent.__name__)
- if issubclass(obj, unittest.TestCase):
- return self.loadTestsFromTestCase(obj)
- else:
- return self.loadTestsFromTestClass(obj)
- elif ismethod(obj):
- if parent is None:
- parent = obj.__class__
- if issubclass(parent, unittest.TestCase):
- return parent(obj.__name__)
- else:
- if isgenerator(obj):
- return self.loadTestsFromGeneratorMethod(obj, parent)
- else:
- return MethodTestCase(obj)
- elif isfunction(obj):
- if parent and obj.__module__ != parent.__name__:
- obj = transplant_func(obj, parent.__name__)
- if isgenerator(obj):
- return self.loadTestsFromGenerator(obj, parent)
- else:
- return FunctionTestCase(obj)
- else:
- return Failure(TypeError,
- "Can't make a test from %s" % obj,
- address=addr)
-
- def resolve(self, name, module):
- """Resolve name within module
- """
- obj = module
- parts = name.split('.')
- for part in parts:
- parent, obj = obj, getattr(obj, part, None)
- if obj is None:
- # no such test
- obj = Failure(ValueError, "No such test %s" % name)
- return parent, obj
-
- def parseGeneratedTest(self, test):
- """Given the yield value of a test generator, return a func and args.
-
- This is used in the two loadTestsFromGenerator* methods.
-
- """
- if not isinstance(test, tuple): # yield test
- test_func, arg = (test, tuple())
- elif len(test) == 1: # yield (test,)
- test_func, arg = (test[0], tuple())
- else: # yield test, foo, bar, ...
- assert len(test) > 1 # sanity check
- test_func, arg = (test[0], test[1:])
- return test_func, arg
-
-defaultTestLoader = TestLoader
-
diff --git a/lib/spack/external/nose/plugins/__init__.py b/lib/spack/external/nose/plugins/__init__.py
deleted file mode 100644
index 08ee8f3230..0000000000
--- a/lib/spack/external/nose/plugins/__init__.py
+++ /dev/null
@@ -1,190 +0,0 @@
-"""
-Writing Plugins
----------------
-
-nose supports plugins for test collection, selection, observation and
-reporting. There are two basic rules for plugins:
-
-* Plugin classes should subclass :class:`nose.plugins.Plugin`.
-
-* Plugins may implement any of the methods described in the class
- :doc:`IPluginInterface <interface>` in nose.plugins.base. Please note that
- this class is for documentary purposes only; plugins may not subclass
- IPluginInterface.
-
-Hello World
-===========
-
-Here's a basic plugin. It doesn't do much so read on for more ideas or dive
-into the :doc:`IPluginInterface <interface>` to see all available hooks.
-
-.. code-block:: python
-
- import logging
- import os
-
- from nose.plugins import Plugin
-
- log = logging.getLogger('nose.plugins.helloworld')
-
- class HelloWorld(Plugin):
- name = 'helloworld'
-
- def options(self, parser, env=os.environ):
- super(HelloWorld, self).options(parser, env=env)
-
- def configure(self, options, conf):
- super(HelloWorld, self).configure(options, conf)
- if not self.enabled:
- return
-
- def finalize(self, result):
- log.info('Hello pluginized world!')
-
-Registering
-===========
-
-.. Note::
- Important note: the following applies only to the default
- plugin manager. Other plugin managers may use different means to
- locate and load plugins.
-
-For nose to find a plugin, it must be part of a package that uses
-setuptools_, and the plugin must be included in the entry points defined
-in the setup.py for the package:
-
-.. code-block:: python
-
- setup(name='Some plugin',
- # ...
- entry_points = {
- 'nose.plugins.0.10': [
- 'someplugin = someplugin:SomePlugin'
- ]
- },
- # ...
- )
-
-Once the package is installed with install or develop, nose will be able
-to load the plugin.
-
-.. _setuptools: http://peak.telecommunity.com/DevCenter/setuptools
-
-Registering a plugin without setuptools
-=======================================
-
-It is currently possible to register a plugin programmatically by
-creating a custom nose runner like this :
-
-.. code-block:: python
-
- import nose
- from yourplugin import YourPlugin
-
- if __name__ == '__main__':
- nose.main(addplugins=[YourPlugin()])
-
-Defining options
-================
-
-All plugins must implement the methods ``options(self, parser, env)``
-and ``configure(self, options, conf)``. Subclasses of nose.plugins.Plugin
-that want the standard options should call the superclass methods.
-
-nose uses optparse.OptionParser from the standard library to parse
-arguments. A plugin's ``options()`` method receives a parser
-instance. It's good form for a plugin to use that instance only to add
-additional arguments that take only long arguments (--like-this). Most
-of nose's built-in arguments get their default value from an environment
-variable.
-
-A plugin's ``configure()`` method receives the parsed ``OptionParser`` options
-object, as well as the current config object. Plugins should configure their
-behavior based on the user-selected settings, and may raise exceptions
-if the configured behavior is nonsensical.
-
-Logging
-=======
-
-nose uses the logging classes from the standard library. To enable users
-to view debug messages easily, plugins should use ``logging.getLogger()`` to
-acquire a logger in the ``nose.plugins`` namespace.
-
-Recipes
-=======
-
-* Writing a plugin that monitors or controls test result output
-
- Implement any or all of ``addError``, ``addFailure``, etc., to monitor test
- results. If you also want to monitor output, implement
- ``setOutputStream`` and keep a reference to the output stream. If you
- want to prevent the builtin ``TextTestResult`` output, implement
- ``setOutputSteam`` and *return a dummy stream*. The default output will go
- to the dummy stream, while you send your desired output to the real stream.
-
- Example: `examples/html_plugin/htmlplug.py`_
-
-* Writing a plugin that handles exceptions
-
- Subclass :doc:`ErrorClassPlugin <errorclasses>`.
-
- Examples: :doc:`nose.plugins.deprecated <deprecated>`,
- :doc:`nose.plugins.skip <skip>`
-
-* Writing a plugin that adds detail to error reports
-
- Implement ``formatError`` and/or ``formatFailure``. The error tuple
- you return (error class, error message, traceback) will replace the
- original error tuple.
-
- Examples: :doc:`nose.plugins.capture <capture>`,
- :doc:`nose.plugins.failuredetail <failuredetail>`
-
-* Writing a plugin that loads tests from files other than python modules
-
- Implement ``wantFile`` and ``loadTestsFromFile``. In ``wantFile``,
- return True for files that you want to examine for tests. In
- ``loadTestsFromFile``, for those files, return an iterable
- containing TestCases (or yield them as you find them;
- ``loadTestsFromFile`` may also be a generator).
-
- Example: :doc:`nose.plugins.doctests <doctests>`
-
-* Writing a plugin that prints a report
-
- Implement ``begin`` if you need to perform setup before testing
- begins. Implement ``report`` and output your report to the provided stream.
-
- Examples: :doc:`nose.plugins.cover <cover>`, :doc:`nose.plugins.prof <prof>`
-
-* Writing a plugin that selects or rejects tests
-
- Implement any or all ``want*`` methods. Return False to reject the test
- candidate, True to accept it -- which means that the test candidate
- will pass through the rest of the system, so you must be prepared to
- load tests from it if tests can't be loaded by the core loader or
- another plugin -- and None if you don't care.
-
- Examples: :doc:`nose.plugins.attrib <attrib>`,
- :doc:`nose.plugins.doctests <doctests>`, :doc:`nose.plugins.testid <testid>`
-
-
-More Examples
-=============
-
-See any builtin plugin or example plugin in the examples_ directory in
-the nose source distribution. There is a list of third-party plugins
-`on jottit`_.
-
-.. _examples/html_plugin/htmlplug.py: http://python-nose.googlecode.com/svn/trunk/examples/html_plugin/htmlplug.py
-.. _examples: http://python-nose.googlecode.com/svn/trunk/examples
-.. _on jottit: http://nose-plugins.jottit.com/
-
-"""
-from nose.plugins.base import Plugin
-from nose.plugins.manager import *
-from nose.plugins.plugintest import PluginTester
-
-if __name__ == '__main__':
- import doctest
- doctest.testmod()
diff --git a/lib/spack/external/nose/plugins/allmodules.py b/lib/spack/external/nose/plugins/allmodules.py
deleted file mode 100644
index 1ccd7773a7..0000000000
--- a/lib/spack/external/nose/plugins/allmodules.py
+++ /dev/null
@@ -1,45 +0,0 @@
-"""Use the AllModules plugin by passing ``--all-modules`` or setting the
-NOSE_ALL_MODULES environment variable to enable collection and execution of
-tests in all python modules. Normal nose behavior is to look for tests only in
-modules that match testMatch.
-
-More information: :doc:`../doc_tests/test_allmodules/test_allmodules`
-
-.. warning ::
-
- This plugin can have surprising interactions with plugins that load tests
- from what nose normally considers non-test modules, such as
- the :doc:`doctest plugin <doctests>`. This is because any given
- object in a module can't be loaded both by a plugin and the normal nose
- :class:`test loader <nose.loader.TestLoader>`. Also, if you have functions
- or classes in non-test modules that look like tests but aren't, you will
- likely see errors as nose attempts to run them as tests.
-
-"""
-
-import os
-from nose.plugins.base import Plugin
-
-class AllModules(Plugin):
- """Collect tests from all python modules.
- """
- def options(self, parser, env):
- """Register commandline options.
- """
- env_opt = 'NOSE_ALL_MODULES'
- parser.add_option('--all-modules',
- action="store_true",
- dest=self.enableOpt,
- default=env.get(env_opt),
- help="Enable plugin %s: %s [%s]" %
- (self.__class__.__name__, self.help(), env_opt))
-
- def wantFile(self, file):
- """Override to return True for all files ending with .py"""
- # always want .py files
- if file.endswith('.py'):
- return True
-
- def wantModule(self, module):
- """Override return True for all modules"""
- return True
diff --git a/lib/spack/external/nose/plugins/attrib.py b/lib/spack/external/nose/plugins/attrib.py
deleted file mode 100644
index 3d4422a23a..0000000000
--- a/lib/spack/external/nose/plugins/attrib.py
+++ /dev/null
@@ -1,286 +0,0 @@
-"""Attribute selector plugin.
-
-Oftentimes when testing you will want to select tests based on
-criteria rather then simply by filename. For example, you might want
-to run all tests except for the slow ones. You can do this with the
-Attribute selector plugin by setting attributes on your test methods.
-Here is an example:
-
-.. code-block:: python
-
- def test_big_download():
- import urllib
- # commence slowness...
-
- test_big_download.slow = 1
-
-Once you've assigned an attribute ``slow = 1`` you can exclude that
-test and all other tests having the slow attribute by running ::
-
- $ nosetests -a '!slow'
-
-There is also a decorator available for you that will set attributes.
-Here's how to set ``slow=1`` like above with the decorator:
-
-.. code-block:: python
-
- from nose.plugins.attrib import attr
- @attr('slow')
- def test_big_download():
- import urllib
- # commence slowness...
-
-And here's how to set an attribute with a specific value:
-
-.. code-block:: python
-
- from nose.plugins.attrib import attr
- @attr(speed='slow')
- def test_big_download():
- import urllib
- # commence slowness...
-
-This test could be run with ::
-
- $ nosetests -a speed=slow
-
-In Python 2.6 and higher, ``@attr`` can be used on a class to set attributes
-on all its test methods at once. For example:
-
-.. code-block:: python
-
- from nose.plugins.attrib import attr
- @attr(speed='slow')
- class MyTestCase:
- def test_long_integration(self):
- pass
- def test_end_to_end_something(self):
- pass
-
-Below is a reference to the different syntaxes available.
-
-Simple syntax
--------------
-
-Examples of using the ``-a`` and ``--attr`` options:
-
-* ``nosetests -a status=stable``
- Only runs tests with attribute "status" having value "stable"
-
-* ``nosetests -a priority=2,status=stable``
- Runs tests having both attributes and values
-
-* ``nosetests -a priority=2 -a slow``
- Runs tests that match either attribute
-
-* ``nosetests -a tags=http``
- If a test's ``tags`` attribute was a list and it contained the value
- ``http`` then it would be run
-
-* ``nosetests -a slow``
- Runs tests with the attribute ``slow`` if its value does not equal False
- (False, [], "", etc...)
-
-* ``nosetests -a '!slow'``
- Runs tests that do NOT have the attribute ``slow`` or have a ``slow``
- attribute that is equal to False
- **NOTE**:
- if your shell (like bash) interprets '!' as a special character make sure to
- put single quotes around it.
-
-Expression Evaluation
----------------------
-
-Examples using the ``-A`` and ``--eval-attr`` options:
-
-* ``nosetests -A "not slow"``
- Evaluates the Python expression "not slow" and runs the test if True
-
-* ``nosetests -A "(priority > 5) and not slow"``
- Evaluates a complex Python expression and runs the test if True
-
-"""
-import inspect
-import logging
-import os
-import sys
-from inspect import isfunction
-from nose.plugins.base import Plugin
-from nose.util import tolist
-
-log = logging.getLogger('nose.plugins.attrib')
-compat_24 = sys.version_info >= (2, 4)
-
-def attr(*args, **kwargs):
- """Decorator that adds attributes to classes or functions
- for use with the Attribute (-a) plugin.
- """
- def wrap_ob(ob):
- for name in args:
- setattr(ob, name, True)
- for name, value in kwargs.iteritems():
- setattr(ob, name, value)
- return ob
- return wrap_ob
-
-def get_method_attr(method, cls, attr_name, default = False):
- """Look up an attribute on a method/ function.
- If the attribute isn't found there, looking it up in the
- method's class, if any.
- """
- Missing = object()
- value = getattr(method, attr_name, Missing)
- if value is Missing and cls is not None:
- value = getattr(cls, attr_name, Missing)
- if value is Missing:
- return default
- return value
-
-
-class ContextHelper:
- """Object that can act as context dictionary for eval and looks up
- names as attributes on a method/ function and its class.
- """
- def __init__(self, method, cls):
- self.method = method
- self.cls = cls
-
- def __getitem__(self, name):
- return get_method_attr(self.method, self.cls, name)
-
-
-class AttributeSelector(Plugin):
- """Selects test cases to be run based on their attributes.
- """
-
- def __init__(self):
- Plugin.__init__(self)
- self.attribs = []
-
- def options(self, parser, env):
- """Register command line options"""
- parser.add_option("-a", "--attr",
- dest="attr", action="append",
- default=env.get('NOSE_ATTR'),
- metavar="ATTR",
- help="Run only tests that have attributes "
- "specified by ATTR [NOSE_ATTR]")
- # disable in < 2.4: eval can't take needed args
- if compat_24:
- parser.add_option("-A", "--eval-attr",
- dest="eval_attr", metavar="EXPR", action="append",
- default=env.get('NOSE_EVAL_ATTR'),
- help="Run only tests for whose attributes "
- "the Python expression EXPR evaluates "
- "to True [NOSE_EVAL_ATTR]")
-
- def configure(self, options, config):
- """Configure the plugin and system, based on selected options.
-
- attr and eval_attr may each be lists.
-
- self.attribs will be a list of lists of tuples. In that list, each
- list is a group of attributes, all of which must match for the rule to
- match.
- """
- self.attribs = []
-
- # handle python eval-expression parameter
- if compat_24 and options.eval_attr:
- eval_attr = tolist(options.eval_attr)
- for attr in eval_attr:
- # "<python expression>"
- # -> eval(expr) in attribute context must be True
- def eval_in_context(expr, obj, cls):
- return eval(expr, None, ContextHelper(obj, cls))
- self.attribs.append([(attr, eval_in_context)])
-
- # attribute requirements are a comma separated list of
- # 'key=value' pairs
- if options.attr:
- std_attr = tolist(options.attr)
- for attr in std_attr:
- # all attributes within an attribute group must match
- attr_group = []
- for attrib in attr.strip().split(","):
- # don't die on trailing comma
- if not attrib:
- continue
- items = attrib.split("=", 1)
- if len(items) > 1:
- # "name=value"
- # -> 'str(obj.name) == value' must be True
- key, value = items
- else:
- key = items[0]
- if key[0] == "!":
- # "!name"
- # 'bool(obj.name)' must be False
- key = key[1:]
- value = False
- else:
- # "name"
- # -> 'bool(obj.name)' must be True
- value = True
- attr_group.append((key, value))
- self.attribs.append(attr_group)
- if self.attribs:
- self.enabled = True
-
- def validateAttrib(self, method, cls = None):
- """Verify whether a method has the required attributes
- The method is considered a match if it matches all attributes
- for any attribute group.
- ."""
- # TODO: is there a need for case-sensitive value comparison?
- any = False
- for group in self.attribs:
- match = True
- for key, value in group:
- attr = get_method_attr(method, cls, key)
- if callable(value):
- if not value(key, method, cls):
- match = False
- break
- elif value is True:
- # value must exist and be True
- if not bool(attr):
- match = False
- break
- elif value is False:
- # value must not exist or be False
- if bool(attr):
- match = False
- break
- elif type(attr) in (list, tuple):
- # value must be found in the list attribute
- if not str(value).lower() in [str(x).lower()
- for x in attr]:
- match = False
- break
- else:
- # value must match, convert to string and compare
- if (value != attr
- and str(value).lower() != str(attr).lower()):
- match = False
- break
- any = any or match
- if any:
- # not True because we don't want to FORCE the selection of the
- # item, only say that it is acceptable
- return None
- return False
-
- def wantFunction(self, function):
- """Accept the function if its attributes match.
- """
- return self.validateAttrib(function)
-
- def wantMethod(self, method):
- """Accept the method if its attributes match.
- """
- try:
- cls = method.im_class
- except AttributeError:
- return False
- return self.validateAttrib(method, cls)
diff --git a/lib/spack/external/nose/plugins/base.py b/lib/spack/external/nose/plugins/base.py
deleted file mode 100644
index f09beb696f..0000000000
--- a/lib/spack/external/nose/plugins/base.py
+++ /dev/null
@@ -1,725 +0,0 @@
-import os
-import textwrap
-from optparse import OptionConflictError
-from warnings import warn
-from nose.util import tolist
-
-class Plugin(object):
- """Base class for nose plugins. It's recommended but not *necessary* to
- subclass this class to create a plugin, but all plugins *must* implement
- `options(self, parser, env)` and `configure(self, options, conf)`, and
- must have the attributes `enabled`, `name` and `score`. The `name`
- attribute may contain hyphens ('-').
-
- Plugins should not be enabled by default.
-
- Subclassing Plugin (and calling the superclass methods in
- __init__, configure, and options, if you override them) will give
- your plugin some friendly default behavior:
-
- * A --with-$name option will be added to the command line interface
- to enable the plugin, and a corresponding environment variable
- will be used as the default value. The plugin class's docstring
- will be used as the help for this option.
- * The plugin will not be enabled unless this option is selected by
- the user.
- """
- can_configure = False
- enabled = False
- enableOpt = None
- name = None
- score = 100
-
- def __init__(self):
- if self.name is None:
- self.name = self.__class__.__name__.lower()
- if self.enableOpt is None:
- self.enableOpt = "enable_plugin_%s" % self.name.replace('-', '_')
-
- def addOptions(self, parser, env=None):
- """Add command-line options for this plugin.
-
- The base plugin class adds --with-$name by default, used to enable the
- plugin.
-
- .. warning :: Don't implement addOptions unless you want to override
- all default option handling behavior, including
- warnings for conflicting options. Implement
- :meth:`options
- <nose.plugins.base.IPluginInterface.options>`
- instead.
- """
- self.add_options(parser, env)
-
- def add_options(self, parser, env=None):
- """Non-camel-case version of func name for backwards compatibility.
-
- .. warning ::
-
- DEPRECATED: Do not use this method,
- use :meth:`options <nose.plugins.base.IPluginInterface.options>`
- instead.
-
- """
- # FIXME raise deprecation warning if wasn't called by wrapper
- if env is None:
- env = os.environ
- try:
- self.options(parser, env)
- self.can_configure = True
- except OptionConflictError, e:
- warn("Plugin %s has conflicting option string: %s and will "
- "be disabled" % (self, e), RuntimeWarning)
- self.enabled = False
- self.can_configure = False
-
- def options(self, parser, env):
- """Register commandline options.
-
- Implement this method for normal options behavior with protection from
- OptionConflictErrors. If you override this method and want the default
- --with-$name option to be registered, be sure to call super().
- """
- env_opt = 'NOSE_WITH_%s' % self.name.upper()
- env_opt = env_opt.replace('-', '_')
- parser.add_option("--with-%s" % self.name,
- action="store_true",
- dest=self.enableOpt,
- default=env.get(env_opt),
- help="Enable plugin %s: %s [%s]" %
- (self.__class__.__name__, self.help(), env_opt))
-
- def configure(self, options, conf):
- """Configure the plugin and system, based on selected options.
-
- The base plugin class sets the plugin to enabled if the enable option
- for the plugin (self.enableOpt) is true.
- """
- if not self.can_configure:
- return
- self.conf = conf
- if hasattr(options, self.enableOpt):
- self.enabled = getattr(options, self.enableOpt)
-
- def help(self):
- """Return help for this plugin. This will be output as the help
- section of the --with-$name option that enables the plugin.
- """
- if self.__class__.__doc__:
- # doc sections are often indented; compress the spaces
- return textwrap.dedent(self.__class__.__doc__)
- return "(no help available)"
-
- # Compatiblity shim
- def tolist(self, val):
- warn("Plugin.tolist is deprecated. Use nose.util.tolist instead",
- DeprecationWarning)
- return tolist(val)
-
-
-class IPluginInterface(object):
- """
- IPluginInterface describes the plugin API. Do not subclass or use this
- class directly.
- """
- def __new__(cls, *arg, **kw):
- raise TypeError("IPluginInterface class is for documentation only")
-
- def addOptions(self, parser, env):
- """Called to allow plugin to register command-line options with the
- parser. DO NOT return a value from this method unless you want to stop
- all other plugins from setting their options.
-
- .. warning ::
-
- DEPRECATED -- implement
- :meth:`options <nose.plugins.base.IPluginInterface.options>` instead.
- """
- pass
- add_options = addOptions
- add_options.deprecated = True
-
- def addDeprecated(self, test):
- """Called when a deprecated test is seen. DO NOT return a value
- unless you want to stop other plugins from seeing the deprecated
- test.
-
- .. warning :: DEPRECATED -- check error class in addError instead
- """
- pass
- addDeprecated.deprecated = True
-
- def addError(self, test, err):
- """Called when a test raises an uncaught exception. DO NOT return a
- value unless you want to stop other plugins from seeing that the
- test has raised an error.
-
- :param test: the test case
- :type test: :class:`nose.case.Test`
- :param err: sys.exc_info() tuple
- :type err: 3-tuple
- """
- pass
- addError.changed = True
-
- def addFailure(self, test, err):
- """Called when a test fails. DO NOT return a value unless you
- want to stop other plugins from seeing that the test has failed.
-
- :param test: the test case
- :type test: :class:`nose.case.Test`
- :param err: 3-tuple
- :type err: sys.exc_info() tuple
- """
- pass
- addFailure.changed = True
-
- def addSkip(self, test):
- """Called when a test is skipped. DO NOT return a value unless
- you want to stop other plugins from seeing the skipped test.
-
- .. warning:: DEPRECATED -- check error class in addError instead
- """
- pass
- addSkip.deprecated = True
-
- def addSuccess(self, test):
- """Called when a test passes. DO NOT return a value unless you
- want to stop other plugins from seeing the passing test.
-
- :param test: the test case
- :type test: :class:`nose.case.Test`
- """
- pass
- addSuccess.changed = True
-
- def afterContext(self):
- """Called after a context (generally a module) has been
- lazy-loaded, imported, setup, had its tests loaded and
- executed, and torn down.
- """
- pass
- afterContext._new = True
-
- def afterDirectory(self, path):
- """Called after all tests have been loaded from directory at path
- and run.
-
- :param path: the directory that has finished processing
- :type path: string
- """
- pass
- afterDirectory._new = True
-
- def afterImport(self, filename, module):
- """Called after module is imported from filename. afterImport
- is called even if the import failed.
-
- :param filename: The file that was loaded
- :type filename: string
- :param module: The name of the module
- :type module: string
- """
- pass
- afterImport._new = True
-
- def afterTest(self, test):
- """Called after the test has been run and the result recorded
- (after stopTest).
-
- :param test: the test case
- :type test: :class:`nose.case.Test`
- """
- pass
- afterTest._new = True
-
- def beforeContext(self):
- """Called before a context (generally a module) is
- examined. Because the context is not yet loaded, plugins don't
- get to know what the context is; so any context operations
- should use a stack that is pushed in `beforeContext` and popped
- in `afterContext` to ensure they operate symmetrically.
-
- `beforeContext` and `afterContext` are mainly useful for tracking
- and restoring global state around possible changes from within a
- context, whatever the context may be. If you need to operate on
- contexts themselves, see `startContext` and `stopContext`, which
- are passed the context in question, but are called after
- it has been loaded (imported in the module case).
- """
- pass
- beforeContext._new = True
-
- def beforeDirectory(self, path):
- """Called before tests are loaded from directory at path.
-
- :param path: the directory that is about to be processed
- """
- pass
- beforeDirectory._new = True
-
- def beforeImport(self, filename, module):
- """Called before module is imported from filename.
-
- :param filename: The file that will be loaded
- :param module: The name of the module found in file
- :type module: string
- """
- beforeImport._new = True
-
- def beforeTest(self, test):
- """Called before the test is run (before startTest).
-
- :param test: the test case
- :type test: :class:`nose.case.Test`
- """
- pass
- beforeTest._new = True
-
- def begin(self):
- """Called before any tests are collected or run. Use this to
- perform any setup needed before testing begins.
- """
- pass
-
- def configure(self, options, conf):
- """Called after the command line has been parsed, with the
- parsed options and the config container. Here, implement any
- config storage or changes to state or operation that are set
- by command line options.
-
- DO NOT return a value from this method unless you want to
- stop all other plugins from being configured.
- """
- pass
-
- def finalize(self, result):
- """Called after all report output, including output from all
- plugins, has been sent to the stream. Use this to print final
- test results or perform final cleanup. Return None to allow
- other plugins to continue printing, or any other value to stop
- them.
-
- :param result: test result object
-
- .. Note:: When tests are run under a test runner other than
- :class:`nose.core.TextTestRunner`, such as
- via ``python setup.py test``, this method may be called
- **before** the default report output is sent.
- """
- pass
-
- def describeTest(self, test):
- """Return a test description.
-
- Called by :meth:`nose.case.Test.shortDescription`.
-
- :param test: the test case
- :type test: :class:`nose.case.Test`
- """
- pass
- describeTest._new = True
-
- def formatError(self, test, err):
- """Called in result.addError, before plugin.addError. If you
- want to replace or modify the error tuple, return a new error
- tuple, otherwise return err, the original error tuple.
-
- :param test: the test case
- :type test: :class:`nose.case.Test`
- :param err: sys.exc_info() tuple
- :type err: 3-tuple
- """
- pass
- formatError._new = True
- formatError.chainable = True
- # test arg is not chainable
- formatError.static_args = (True, False)
-
- def formatFailure(self, test, err):
- """Called in result.addFailure, before plugin.addFailure. If you
- want to replace or modify the error tuple, return a new error
- tuple, otherwise return err, the original error tuple.
-
- :param test: the test case
- :type test: :class:`nose.case.Test`
- :param err: sys.exc_info() tuple
- :type err: 3-tuple
- """
- pass
- formatFailure._new = True
- formatFailure.chainable = True
- # test arg is not chainable
- formatFailure.static_args = (True, False)
-
- def handleError(self, test, err):
- """Called on addError. To handle the error yourself and prevent normal
- error processing, return a true value.
-
- :param test: the test case
- :type test: :class:`nose.case.Test`
- :param err: sys.exc_info() tuple
- :type err: 3-tuple
- """
- pass
- handleError._new = True
-
- def handleFailure(self, test, err):
- """Called on addFailure. To handle the failure yourself and
- prevent normal failure processing, return a true value.
-
- :param test: the test case
- :type test: :class:`nose.case.Test`
- :param err: sys.exc_info() tuple
- :type err: 3-tuple
- """
- pass
- handleFailure._new = True
-
- def loadTestsFromDir(self, path):
- """Return iterable of tests from a directory. May be a
- generator. Each item returned must be a runnable
- unittest.TestCase (or subclass) instance or suite instance.
- Return None if your plugin cannot collect any tests from
- directory.
-
- :param path: The path to the directory.
- """
- pass
- loadTestsFromDir.generative = True
- loadTestsFromDir._new = True
-
- def loadTestsFromModule(self, module, path=None):
- """Return iterable of tests in a module. May be a
- generator. Each item returned must be a runnable
- unittest.TestCase (or subclass) instance.
- Return None if your plugin cannot
- collect any tests from module.
-
- :param module: The module object
- :type module: python module
- :param path: the path of the module to search, to distinguish from
- namespace package modules
-
- .. note::
-
- NEW. The ``path`` parameter will only be passed by nose 0.11
- or above.
- """
- pass
- loadTestsFromModule.generative = True
-
- def loadTestsFromName(self, name, module=None, importPath=None):
- """Return tests in this file or module. Return None if you are not able
- to load any tests, or an iterable if you are. May be a
- generator.
-
- :param name: The test name. May be a file or module name plus a test
- callable. Use split_test_name to split into parts. Or it might
- be some crazy name of your own devising, in which case, do
- whatever you want.
- :param module: Module from which the name is to be loaded
- :param importPath: Path from which file (must be a python module) was
- found
-
- .. warning:: DEPRECATED: this argument will NOT be passed.
- """
- pass
- loadTestsFromName.generative = True
-
- def loadTestsFromNames(self, names, module=None):
- """Return a tuple of (tests loaded, remaining names). Return
- None if you are not able to load any tests. Multiple plugins
- may implement loadTestsFromNames; the remaining name list from
- each will be passed to the next as input.
-
- :param names: List of test names.
- :type names: iterable
- :param module: Module from which the names are to be loaded
- """
- pass
- loadTestsFromNames._new = True
- loadTestsFromNames.chainable = True
-
- def loadTestsFromFile(self, filename):
- """Return tests in this file. Return None if you are not
- interested in loading any tests, or an iterable if you are and
- can load some. May be a generator. *If you are interested in
- loading tests from the file and encounter no errors, but find
- no tests, yield False or return [False].*
-
- .. Note:: This method replaces loadTestsFromPath from the 0.9
- API.
-
- :param filename: The full path to the file or directory.
- """
- pass
- loadTestsFromFile.generative = True
- loadTestsFromFile._new = True
-
- def loadTestsFromPath(self, path):
- """
- .. warning:: DEPRECATED -- use loadTestsFromFile instead
- """
- pass
- loadTestsFromPath.deprecated = True
-
- def loadTestsFromTestCase(self, cls):
- """Return tests in this test case class. Return None if you are
- not able to load any tests, or an iterable if you are. May be a
- generator.
-
- :param cls: The test case class. Must be subclass of
- :class:`unittest.TestCase`.
- """
- pass
- loadTestsFromTestCase.generative = True
-
- def loadTestsFromTestClass(self, cls):
- """Return tests in this test class. Class will *not* be a
- unittest.TestCase subclass. Return None if you are not able to
- load any tests, an iterable if you are. May be a generator.
-
- :param cls: The test case class. Must be **not** be subclass of
- :class:`unittest.TestCase`.
- """
- pass
- loadTestsFromTestClass._new = True
- loadTestsFromTestClass.generative = True
-
- def makeTest(self, obj, parent):
- """Given an object and its parent, return or yield one or more
- test cases. Each test must be a unittest.TestCase (or subclass)
- instance. This is called before default test loading to allow
- plugins to load an alternate test case or cases for an
- object. May be a generator.
-
- :param obj: The object to be made into a test
- :param parent: The parent of obj (eg, for a method, the class)
- """
- pass
- makeTest._new = True
- makeTest.generative = True
-
- def options(self, parser, env):
- """Called to allow plugin to register command line
- options with the parser.
-
- DO NOT return a value from this method unless you want to stop
- all other plugins from setting their options.
-
- :param parser: options parser instance
- :type parser: :class:`ConfigParser.ConfigParser`
- :param env: environment, default is os.environ
- """
- pass
- options._new = True
-
- def prepareTest(self, test):
- """Called before the test is run by the test runner. Please
- note the article *the* in the previous sentence: prepareTest
- is called *only once*, and is passed the test case or test
- suite that the test runner will execute. It is *not* called
- for each individual test case. If you return a non-None value,
- that return value will be run as the test. Use this hook to
- wrap or decorate the test with another function. If you need
- to modify or wrap individual test cases, use `prepareTestCase`
- instead.
-
- :param test: the test case
- :type test: :class:`nose.case.Test`
- """
- pass
-
- def prepareTestCase(self, test):
- """Prepare or wrap an individual test case. Called before
- execution of the test. The test passed here is a
- nose.case.Test instance; the case to be executed is in the
- test attribute of the passed case. To modify the test to be
- run, you should return a callable that takes one argument (the
- test result object) -- it is recommended that you *do not*
- side-effect the nose.case.Test instance you have been passed.
-
- Keep in mind that when you replace the test callable you are
- replacing the run() method of the test case -- including the
- exception handling and result calls, etc.
-
- :param test: the test case
- :type test: :class:`nose.case.Test`
- """
- pass
- prepareTestCase._new = True
-
- def prepareTestLoader(self, loader):
- """Called before tests are loaded. To replace the test loader,
- return a test loader. To allow other plugins to process the
- test loader, return None. Only one plugin may replace the test
- loader. Only valid when using nose.TestProgram.
-
- :param loader: :class:`nose.loader.TestLoader`
- (or other loader) instance
- """
- pass
- prepareTestLoader._new = True
-
- def prepareTestResult(self, result):
- """Called before the first test is run. To use a different
- test result handler for all tests than the given result,
- return a test result handler. NOTE however that this handler
- will only be seen by tests, that is, inside of the result
- proxy system. The TestRunner and TestProgram -- whether nose's
- or other -- will continue to see the original result
- handler. For this reason, it is usually better to monkeypatch
- the result (for instance, if you want to handle some
- exceptions in a unique way). Only one plugin may replace the
- result, but many may monkeypatch it. If you want to
- monkeypatch and stop other plugins from doing so, monkeypatch
- and return the patched result.
-
- :param result: :class:`nose.result.TextTestResult`
- (or other result) instance
- """
- pass
- prepareTestResult._new = True
-
- def prepareTestRunner(self, runner):
- """Called before tests are run. To replace the test runner,
- return a test runner. To allow other plugins to process the
- test runner, return None. Only valid when using nose.TestProgram.
-
- :param runner: :class:`nose.core.TextTestRunner`
- (or other runner) instance
- """
- pass
- prepareTestRunner._new = True
-
- def report(self, stream):
- """Called after all error output has been printed. Print your
- plugin's report to the provided stream. Return None to allow
- other plugins to print reports, any other value to stop them.
-
- :param stream: stream object; send your output here
- :type stream: file-like object
- """
- pass
-
- def setOutputStream(self, stream):
- """Called before test output begins. To direct test output to a
- new stream, return a stream object, which must implement a
- `write(msg)` method. If you only want to note the stream, not
- capture or redirect it, then return None.
-
- :param stream: stream object; send your output here
- :type stream: file-like object
- """
-
- def startContext(self, context):
- """Called before context setup and the running of tests in the
- context. Note that tests have already been *loaded* from the
- context before this call.
-
- :param context: the context about to be setup. May be a module or
- class, or any other object that contains tests.
- """
- pass
- startContext._new = True
-
- def startTest(self, test):
- """Called before each test is run. DO NOT return a value unless
- you want to stop other plugins from seeing the test start.
-
- :param test: the test case
- :type test: :class:`nose.case.Test`
- """
- pass
-
- def stopContext(self, context):
- """Called after the tests in a context have run and the
- context has been torn down.
-
- :param context: the context that has been torn down. May be a module or
- class, or any other object that contains tests.
- """
- pass
- stopContext._new = True
-
- def stopTest(self, test):
- """Called after each test is run. DO NOT return a value unless
- you want to stop other plugins from seeing that the test has stopped.
-
- :param test: the test case
- :type test: :class:`nose.case.Test`
- """
- pass
-
- def testName(self, test):
- """Return a short test name. Called by `nose.case.Test.__str__`.
-
- :param test: the test case
- :type test: :class:`nose.case.Test`
- """
- pass
- testName._new = True
-
- def wantClass(self, cls):
- """Return true if you want the main test selector to collect
- tests from this class, false if you don't, and None if you don't
- care.
-
- :param cls: The class being examined by the selector
- """
- pass
-
- def wantDirectory(self, dirname):
- """Return true if you want test collection to descend into this
- directory, false if you do not, and None if you don't care.
-
- :param dirname: Full path to directory being examined by the selector
- """
- pass
-
- def wantFile(self, file):
- """Return true if you want to collect tests from this file,
- false if you do not and None if you don't care.
-
- Change from 0.9: The optional package parameter is no longer passed.
-
- :param file: Full path to file being examined by the selector
- """
- pass
-
- def wantFunction(self, function):
- """Return true to collect this function as a test, false to
- prevent it from being collected, and None if you don't care.
-
- :param function: The function object being examined by the selector
- """
- pass
-
- def wantMethod(self, method):
- """Return true to collect this method as a test, false to
- prevent it from being collected, and None if you don't care.
-
- :param method: The method object being examined by the selector
- :type method: unbound method
- """
- pass
-
- def wantModule(self, module):
- """Return true if you want to collection to descend into this
- module, false to prevent the collector from descending into the
- module, and None if you don't care.
-
- :param module: The module object being examined by the selector
- :type module: python module
- """
- pass
-
- def wantModuleTests(self, module):
- """
- .. warning:: DEPRECATED -- this method will not be called, it has
- been folded into wantModule.
- """
- pass
- wantModuleTests.deprecated = True
-
diff --git a/lib/spack/external/nose/plugins/builtin.py b/lib/spack/external/nose/plugins/builtin.py
deleted file mode 100644
index 4fcc0018ad..0000000000
--- a/lib/spack/external/nose/plugins/builtin.py
+++ /dev/null
@@ -1,34 +0,0 @@
-"""
-Lists builtin plugins.
-"""
-plugins = []
-builtins = (
- ('nose.plugins.attrib', 'AttributeSelector'),
- ('nose.plugins.capture', 'Capture'),
- ('nose.plugins.logcapture', 'LogCapture'),
- ('nose.plugins.cover', 'Coverage'),
- ('nose.plugins.debug', 'Pdb'),
- ('nose.plugins.deprecated', 'Deprecated'),
- ('nose.plugins.doctests', 'Doctest'),
- ('nose.plugins.isolate', 'IsolationPlugin'),
- ('nose.plugins.failuredetail', 'FailureDetail'),
- ('nose.plugins.prof', 'Profile'),
- ('nose.plugins.skip', 'Skip'),
- ('nose.plugins.testid', 'TestId'),
- ('nose.plugins.multiprocess', 'MultiProcess'),
- ('nose.plugins.xunit', 'Xunit'),
- ('nose.plugins.allmodules', 'AllModules'),
- ('nose.plugins.collect', 'CollectOnly'),
- )
-
-for module, cls in builtins:
- try:
- plugmod = __import__(module, globals(), locals(), [cls])
- except KeyboardInterrupt:
- raise
- except:
- continue
- plug = getattr(plugmod, cls)
- plugins.append(plug)
- globals()[cls] = plug
-
diff --git a/lib/spack/external/nose/plugins/capture.py b/lib/spack/external/nose/plugins/capture.py
deleted file mode 100644
index fa4e5dcaaf..0000000000
--- a/lib/spack/external/nose/plugins/capture.py
+++ /dev/null
@@ -1,115 +0,0 @@
-"""
-This plugin captures stdout during test execution. If the test fails
-or raises an error, the captured output will be appended to the error
-or failure output. It is enabled by default but can be disabled with
-the options ``-s`` or ``--nocapture``.
-
-:Options:
- ``--nocapture``
- Don't capture stdout (any stdout output will be printed immediately)
-
-"""
-import logging
-import os
-import sys
-from nose.plugins.base import Plugin
-from nose.pyversion import exc_to_unicode, force_unicode
-from nose.util import ln
-from StringIO import StringIO
-
-
-log = logging.getLogger(__name__)
-
-class Capture(Plugin):
- """
- Output capture plugin. Enabled by default. Disable with ``-s`` or
- ``--nocapture``. This plugin captures stdout during test execution,
- appending any output captured to the error or failure output,
- should the test fail or raise an error.
- """
- enabled = True
- env_opt = 'NOSE_NOCAPTURE'
- name = 'capture'
- score = 1600
-
- def __init__(self):
- self.stdout = []
- self._buf = None
-
- def options(self, parser, env):
- """Register commandline options
- """
- parser.add_option(
- "-s", "--nocapture", action="store_false",
- default=not env.get(self.env_opt), dest="capture",
- help="Don't capture stdout (any stdout output "
- "will be printed immediately) [NOSE_NOCAPTURE]")
-
- def configure(self, options, conf):
- """Configure plugin. Plugin is enabled by default.
- """
- self.conf = conf
- if not options.capture:
- self.enabled = False
-
- def afterTest(self, test):
- """Clear capture buffer.
- """
- self.end()
- self._buf = None
-
- def begin(self):
- """Replace sys.stdout with capture buffer.
- """
- self.start() # get an early handle on sys.stdout
-
- def beforeTest(self, test):
- """Flush capture buffer.
- """
- self.start()
-
- def formatError(self, test, err):
- """Add captured output to error report.
- """
- test.capturedOutput = output = self.buffer
- self._buf = None
- if not output:
- # Don't return None as that will prevent other
- # formatters from formatting and remove earlier formatters
- # formats, instead return the err we got
- return err
- ec, ev, tb = err
- return (ec, self.addCaptureToErr(ev, output), tb)
-
- def formatFailure(self, test, err):
- """Add captured output to failure report.
- """
- return self.formatError(test, err)
-
- def addCaptureToErr(self, ev, output):
- ev = exc_to_unicode(ev)
- output = force_unicode(output)
- return u'\n'.join([ev, ln(u'>> begin captured stdout <<'),
- output, ln(u'>> end captured stdout <<')])
-
- def start(self):
- self.stdout.append(sys.stdout)
- self._buf = StringIO()
- sys.stdout = self._buf
-
- def end(self):
- if self.stdout:
- sys.stdout = self.stdout.pop()
-
- def finalize(self, result):
- """Restore stdout.
- """
- while self.stdout:
- self.end()
-
- def _get_buffer(self):
- if self._buf is not None:
- return self._buf.getvalue()
-
- buffer = property(_get_buffer, None, None,
- """Captured stdout output.""")
diff --git a/lib/spack/external/nose/plugins/collect.py b/lib/spack/external/nose/plugins/collect.py
deleted file mode 100644
index 6f9f0faa77..0000000000
--- a/lib/spack/external/nose/plugins/collect.py
+++ /dev/null
@@ -1,94 +0,0 @@
-"""
-This plugin bypasses the actual execution of tests, and instead just collects
-test names. Fixtures are also bypassed, so running nosetests with the
-collection plugin enabled should be very quick.
-
-This plugin is useful in combination with the testid plugin (``--with-id``).
-Run both together to get an indexed list of all tests, which will enable you to
-run individual tests by index number.
-
-This plugin is also useful for counting tests in a test suite, and making
-people watching your demo think all of your tests pass.
-"""
-from nose.plugins.base import Plugin
-from nose.case import Test
-import logging
-import unittest
-
-log = logging.getLogger(__name__)
-
-
-class CollectOnly(Plugin):
- """
- Collect and output test names only, don't run any tests.
- """
- name = "collect-only"
- enableOpt = 'collect_only'
-
- def options(self, parser, env):
- """Register commandline options.
- """
- parser.add_option('--collect-only',
- action='store_true',
- dest=self.enableOpt,
- default=env.get('NOSE_COLLECT_ONLY'),
- help="Enable collect-only: %s [COLLECT_ONLY]" %
- (self.help()))
-
- def prepareTestLoader(self, loader):
- """Install collect-only suite class in TestLoader.
- """
- # Disable context awareness
- log.debug("Preparing test loader")
- loader.suiteClass = TestSuiteFactory(self.conf)
-
- def prepareTestCase(self, test):
- """Replace actual test with dummy that always passes.
- """
- # Return something that always passes
- log.debug("Preparing test case %s", test)
- if not isinstance(test, Test):
- return
- def run(result):
- # We need to make these plugin calls because there won't be
- # a result proxy, due to using a stripped-down test suite
- self.conf.plugins.startTest(test)
- result.startTest(test)
- self.conf.plugins.addSuccess(test)
- result.addSuccess(test)
- self.conf.plugins.stopTest(test)
- result.stopTest(test)
- return run
-
-
-class TestSuiteFactory:
- """
- Factory for producing configured test suites.
- """
- def __init__(self, conf):
- self.conf = conf
-
- def __call__(self, tests=(), **kw):
- return TestSuite(tests, conf=self.conf)
-
-
-class TestSuite(unittest.TestSuite):
- """
- Basic test suite that bypasses most proxy and plugin calls, but does
- wrap tests in a nose.case.Test so prepareTestCase will be called.
- """
- def __init__(self, tests=(), conf=None):
- self.conf = conf
- # Exec lazy suites: makes discovery depth-first
- if callable(tests):
- tests = tests()
- log.debug("TestSuite(%r)", tests)
- unittest.TestSuite.__init__(self, tests)
-
- def addTest(self, test):
- log.debug("Add test %s", test)
- if isinstance(test, unittest.TestSuite):
- self._tests.append(test)
- else:
- self._tests.append(Test(test, config=self.conf))
-
diff --git a/lib/spack/external/nose/plugins/cover.py b/lib/spack/external/nose/plugins/cover.py
deleted file mode 100644
index fbe2e30dcd..0000000000
--- a/lib/spack/external/nose/plugins/cover.py
+++ /dev/null
@@ -1,271 +0,0 @@
-"""If you have Ned Batchelder's coverage_ module installed, you may activate a
-coverage report with the ``--with-coverage`` switch or NOSE_WITH_COVERAGE
-environment variable. The coverage report will cover any python source module
-imported after the start of the test run, excluding modules that match
-testMatch. If you want to include those modules too, use the ``--cover-tests``
-switch, or set the NOSE_COVER_TESTS environment variable to a true value. To
-restrict the coverage report to modules from a particular package or packages,
-use the ``--cover-package`` switch or the NOSE_COVER_PACKAGE environment
-variable.
-
-.. _coverage: http://www.nedbatchelder.com/code/modules/coverage.html
-"""
-import logging
-import re
-import sys
-import StringIO
-from nose.plugins.base import Plugin
-from nose.util import src, tolist
-
-log = logging.getLogger(__name__)
-
-
-class Coverage(Plugin):
- """
- Activate a coverage report using Ned Batchelder's coverage module.
- """
- coverTests = False
- coverPackages = None
- coverInstance = None
- coverErase = False
- coverMinPercentage = None
- score = 200
- status = {}
-
- def options(self, parser, env):
- """
- Add options to command line.
- """
- super(Coverage, self).options(parser, env)
- parser.add_option("--cover-package", action="append",
- default=env.get('NOSE_COVER_PACKAGE'),
- metavar="PACKAGE",
- dest="cover_packages",
- help="Restrict coverage output to selected packages "
- "[NOSE_COVER_PACKAGE]")
- parser.add_option("--cover-erase", action="store_true",
- default=env.get('NOSE_COVER_ERASE'),
- dest="cover_erase",
- help="Erase previously collected coverage "
- "statistics before run")
- parser.add_option("--cover-tests", action="store_true",
- dest="cover_tests",
- default=env.get('NOSE_COVER_TESTS'),
- help="Include test modules in coverage report "
- "[NOSE_COVER_TESTS]")
- parser.add_option("--cover-min-percentage", action="store",
- dest="cover_min_percentage",
- default=env.get('NOSE_COVER_MIN_PERCENTAGE'),
- help="Minimum percentage of coverage for tests "
- "to pass [NOSE_COVER_MIN_PERCENTAGE]")
- parser.add_option("--cover-inclusive", action="store_true",
- dest="cover_inclusive",
- default=env.get('NOSE_COVER_INCLUSIVE'),
- help="Include all python files under working "
- "directory in coverage report. Useful for "
- "discovering holes in test coverage if not all "
- "files are imported by the test suite. "
- "[NOSE_COVER_INCLUSIVE]")
- parser.add_option("--cover-html", action="store_true",
- default=env.get('NOSE_COVER_HTML'),
- dest='cover_html',
- help="Produce HTML coverage information")
- parser.add_option('--cover-html-dir', action='store',
- default=env.get('NOSE_COVER_HTML_DIR', 'cover'),
- dest='cover_html_dir',
- metavar='DIR',
- help='Produce HTML coverage information in dir')
- parser.add_option("--cover-branches", action="store_true",
- default=env.get('NOSE_COVER_BRANCHES'),
- dest="cover_branches",
- help="Include branch coverage in coverage report "
- "[NOSE_COVER_BRANCHES]")
- parser.add_option("--cover-xml", action="store_true",
- default=env.get('NOSE_COVER_XML'),
- dest="cover_xml",
- help="Produce XML coverage information")
- parser.add_option("--cover-xml-file", action="store",
- default=env.get('NOSE_COVER_XML_FILE', 'coverage.xml'),
- dest="cover_xml_file",
- metavar="FILE",
- help="Produce XML coverage information in file")
-
- def configure(self, options, conf):
- """
- Configure plugin.
- """
- try:
- self.status.pop('active')
- except KeyError:
- pass
- super(Coverage, self).configure(options, conf)
- if self.enabled:
- try:
- import coverage
- if not hasattr(coverage, 'coverage'):
- raise ImportError("Unable to import coverage module")
- except ImportError:
- log.error("Coverage not available: "
- "unable to import coverage module")
- self.enabled = False
- return
- self.conf = conf
- self.coverErase = options.cover_erase
- self.coverTests = options.cover_tests
- self.coverPackages = []
- if options.cover_packages:
- if isinstance(options.cover_packages, (list, tuple)):
- cover_packages = options.cover_packages
- else:
- cover_packages = [options.cover_packages]
- for pkgs in [tolist(x) for x in cover_packages]:
- self.coverPackages.extend(pkgs)
- self.coverInclusive = options.cover_inclusive
- if self.coverPackages:
- log.info("Coverage report will include only packages: %s",
- self.coverPackages)
- self.coverHtmlDir = None
- if options.cover_html:
- self.coverHtmlDir = options.cover_html_dir
- log.debug('Will put HTML coverage report in %s', self.coverHtmlDir)
- self.coverBranches = options.cover_branches
- self.coverXmlFile = None
- if options.cover_min_percentage:
- self.coverMinPercentage = int(options.cover_min_percentage.rstrip('%'))
- if options.cover_xml:
- self.coverXmlFile = options.cover_xml_file
- log.debug('Will put XML coverage report in %s', self.coverXmlFile)
- if self.enabled:
- self.status['active'] = True
- self.coverInstance = coverage.coverage(auto_data=False,
- branch=self.coverBranches, data_suffix=conf.worker,
- source=self.coverPackages)
- self.coverInstance._warn_no_data = False
- self.coverInstance.is_worker = conf.worker
- self.coverInstance.exclude('#pragma[: ]+[nN][oO] [cC][oO][vV][eE][rR]')
-
- log.debug("Coverage begin")
- self.skipModules = sys.modules.keys()[:]
- if self.coverErase:
- log.debug("Clearing previously collected coverage statistics")
- self.coverInstance.combine()
- self.coverInstance.erase()
-
- if not self.coverInstance.is_worker:
- self.coverInstance.load()
- self.coverInstance.start()
-
-
- def beforeTest(self, *args, **kwargs):
- """
- Begin recording coverage information.
- """
-
- if self.coverInstance.is_worker:
- self.coverInstance.load()
- self.coverInstance.start()
-
- def afterTest(self, *args, **kwargs):
- """
- Stop recording coverage information.
- """
-
- if self.coverInstance.is_worker:
- self.coverInstance.stop()
- self.coverInstance.save()
-
-
- def report(self, stream):
- """
- Output code coverage report.
- """
- log.debug("Coverage report")
- self.coverInstance.stop()
- self.coverInstance.combine()
- self.coverInstance.save()
- modules = [module
- for name, module in sys.modules.items()
- if self.wantModuleCoverage(name, module)]
- log.debug("Coverage report will cover modules: %s", modules)
- self.coverInstance.report(modules, file=stream)
-
- import coverage
- if self.coverHtmlDir:
- log.debug("Generating HTML coverage report")
- try:
- self.coverInstance.html_report(modules, self.coverHtmlDir)
- except coverage.misc.CoverageException, e:
- log.warning("Failed to generate HTML report: %s" % str(e))
-
- if self.coverXmlFile:
- log.debug("Generating XML coverage report")
- try:
- self.coverInstance.xml_report(modules, self.coverXmlFile)
- except coverage.misc.CoverageException, e:
- log.warning("Failed to generate XML report: %s" % str(e))
-
- # make sure we have minimum required coverage
- if self.coverMinPercentage:
- f = StringIO.StringIO()
- self.coverInstance.report(modules, file=f)
-
- multiPackageRe = (r'-------\s\w+\s+\d+\s+\d+(?:\s+\d+\s+\d+)?'
- r'\s+(\d+)%\s+\d*\s{0,1}$')
- singlePackageRe = (r'-------\s[\w./]+\s+\d+\s+\d+(?:\s+\d+\s+\d+)?'
- r'\s+(\d+)%(?:\s+[-\d, ]+)\s{0,1}$')
-
- m = re.search(multiPackageRe, f.getvalue())
- if m is None:
- m = re.search(singlePackageRe, f.getvalue())
-
- if m:
- percentage = int(m.groups()[0])
- if percentage < self.coverMinPercentage:
- log.error('TOTAL Coverage did not reach minimum '
- 'required: %d%%' % self.coverMinPercentage)
- sys.exit(1)
- else:
- log.error("No total percentage was found in coverage output, "
- "something went wrong.")
-
-
- def wantModuleCoverage(self, name, module):
- if not hasattr(module, '__file__'):
- log.debug("no coverage of %s: no __file__", name)
- return False
- module_file = src(module.__file__)
- if not module_file or not module_file.endswith('.py'):
- log.debug("no coverage of %s: not a python file", name)
- return False
- if self.coverPackages:
- for package in self.coverPackages:
- if (re.findall(r'^%s\b' % re.escape(package), name)
- and (self.coverTests
- or not self.conf.testMatch.search(name))):
- log.debug("coverage for %s", name)
- return True
- if name in self.skipModules:
- log.debug("no coverage for %s: loaded before coverage start",
- name)
- return False
- if self.conf.testMatch.search(name) and not self.coverTests:
- log.debug("no coverage for %s: is a test", name)
- return False
- # accept any package that passed the previous tests, unless
- # coverPackages is on -- in that case, if we wanted this
- # module, we would have already returned True
- return not self.coverPackages
-
- def wantFile(self, file, package=None):
- """If inclusive coverage enabled, return true for all source files
- in wanted packages.
- """
- if self.coverInclusive:
- if file.endswith(".py"):
- if package and self.coverPackages:
- for want in self.coverPackages:
- if package.startswith(want):
- return True
- else:
- return True
- return None
diff --git a/lib/spack/external/nose/plugins/debug.py b/lib/spack/external/nose/plugins/debug.py
deleted file mode 100644
index 78243e60d0..0000000000
--- a/lib/spack/external/nose/plugins/debug.py
+++ /dev/null
@@ -1,67 +0,0 @@
-"""
-This plugin provides ``--pdb`` and ``--pdb-failures`` options. The ``--pdb``
-option will drop the test runner into pdb when it encounters an error. To
-drop into pdb on failure, use ``--pdb-failures``.
-"""
-
-import pdb
-from nose.plugins.base import Plugin
-
-class Pdb(Plugin):
- """
- Provides --pdb and --pdb-failures options that cause the test runner to
- drop into pdb if it encounters an error or failure, respectively.
- """
- enabled_for_errors = False
- enabled_for_failures = False
- score = 5 # run last, among builtins
-
- def options(self, parser, env):
- """Register commandline options.
- """
- parser.add_option(
- "--pdb", action="store_true", dest="debugBoth",
- default=env.get('NOSE_PDB', False),
- help="Drop into debugger on failures or errors")
- parser.add_option(
- "--pdb-failures", action="store_true",
- dest="debugFailures",
- default=env.get('NOSE_PDB_FAILURES', False),
- help="Drop into debugger on failures")
- parser.add_option(
- "--pdb-errors", action="store_true",
- dest="debugErrors",
- default=env.get('NOSE_PDB_ERRORS', False),
- help="Drop into debugger on errors")
-
- def configure(self, options, conf):
- """Configure which kinds of exceptions trigger plugin.
- """
- self.conf = conf
- self.enabled_for_errors = options.debugErrors or options.debugBoth
- self.enabled_for_failures = options.debugFailures or options.debugBoth
- self.enabled = self.enabled_for_failures or self.enabled_for_errors
-
- def addError(self, test, err):
- """Enter pdb if configured to debug errors.
- """
- if not self.enabled_for_errors:
- return
- self.debug(err)
-
- def addFailure(self, test, err):
- """Enter pdb if configured to debug failures.
- """
- if not self.enabled_for_failures:
- return
- self.debug(err)
-
- def debug(self, err):
- import sys # FIXME why is this import here?
- ec, ev, tb = err
- stdout = sys.stdout
- sys.stdout = sys.__stdout__
- try:
- pdb.post_mortem(tb)
- finally:
- sys.stdout = stdout
diff --git a/lib/spack/external/nose/plugins/deprecated.py b/lib/spack/external/nose/plugins/deprecated.py
deleted file mode 100644
index 461a26be63..0000000000
--- a/lib/spack/external/nose/plugins/deprecated.py
+++ /dev/null
@@ -1,45 +0,0 @@
-"""
-This plugin installs a DEPRECATED error class for the :class:`DeprecatedTest`
-exception. When :class:`DeprecatedTest` is raised, the exception will be logged
-in the deprecated attribute of the result, ``D`` or ``DEPRECATED`` (verbose)
-will be output, and the exception will not be counted as an error or failure.
-It is enabled by default, but can be turned off by using ``--no-deprecated``.
-"""
-
-from nose.plugins.errorclass import ErrorClass, ErrorClassPlugin
-
-
-class DeprecatedTest(Exception):
- """Raise this exception to mark a test as deprecated.
- """
- pass
-
-
-class Deprecated(ErrorClassPlugin):
- """
- Installs a DEPRECATED error class for the DeprecatedTest exception. Enabled
- by default.
- """
- enabled = True
- deprecated = ErrorClass(DeprecatedTest,
- label='DEPRECATED',
- isfailure=False)
-
- def options(self, parser, env):
- """Register commandline options.
- """
- env_opt = 'NOSE_WITHOUT_DEPRECATED'
- parser.add_option('--no-deprecated', action='store_true',
- dest='noDeprecated', default=env.get(env_opt, False),
- help="Disable special handling of DeprecatedTest "
- "exceptions.")
-
- def configure(self, options, conf):
- """Configure plugin.
- """
- if not self.can_configure:
- return
- self.conf = conf
- disable = getattr(options, 'noDeprecated', False)
- if disable:
- self.enabled = False
diff --git a/lib/spack/external/nose/plugins/doctests.py b/lib/spack/external/nose/plugins/doctests.py
deleted file mode 100644
index 5ef65799f3..0000000000
--- a/lib/spack/external/nose/plugins/doctests.py
+++ /dev/null
@@ -1,455 +0,0 @@
-"""Use the Doctest plugin with ``--with-doctest`` or the NOSE_WITH_DOCTEST
-environment variable to enable collection and execution of :mod:`doctests
-<doctest>`. Because doctests are usually included in the tested package
-(instead of being grouped into packages or modules of their own), nose only
-looks for them in the non-test packages it discovers in the working directory.
-
-Doctests may also be placed into files other than python modules, in which
-case they can be collected and executed by using the ``--doctest-extension``
-switch or NOSE_DOCTEST_EXTENSION environment variable to indicate which file
-extension(s) to load.
-
-When loading doctests from non-module files, use the ``--doctest-fixtures``
-switch to specify how to find modules containing fixtures for the tests. A
-module name will be produced by appending the value of that switch to the base
-name of each doctest file loaded. For example, a doctest file "widgets.rst"
-with the switch ``--doctest_fixtures=_fixt`` will load fixtures from the module
-``widgets_fixt.py``.
-
-A fixtures module may define any or all of the following functions:
-
-* setup([module]) or setup_module([module])
-
- Called before the test runs. You may raise SkipTest to skip all tests.
-
-* teardown([module]) or teardown_module([module])
-
- Called after the test runs, if setup/setup_module did not raise an
- unhandled exception.
-
-* setup_test(test)
-
- Called before the test. NOTE: the argument passed is a
- doctest.DocTest instance, *not* a unittest.TestCase.
-
-* teardown_test(test)
-
- Called after the test, if setup_test did not raise an exception. NOTE: the
- argument passed is a doctest.DocTest instance, *not* a unittest.TestCase.
-
-Doctests are run like any other test, with the exception that output
-capture does not work; doctest does its own output capture while running a
-test.
-
-.. note ::
-
- See :doc:`../doc_tests/test_doctest_fixtures/doctest_fixtures` for
- additional documentation and examples.
-
-"""
-from __future__ import generators
-
-import logging
-import os
-import sys
-import unittest
-from inspect import getmodule
-from nose.plugins.base import Plugin
-from nose.suite import ContextList
-from nose.util import anyp, getpackage, test_address, resolve_name, \
- src, tolist, isproperty
-try:
- from cStringIO import StringIO
-except ImportError:
- from StringIO import StringIO
-import sys
-import __builtin__ as builtin_mod
-
-log = logging.getLogger(__name__)
-
-try:
- import doctest
- doctest.DocTestCase
- # system version of doctest is acceptable, but needs a monkeypatch
-except (ImportError, AttributeError):
- # system version is too old
- import nose.ext.dtcompat as doctest
-
-
-#
-# Doctest and coverage don't get along, so we need to create
-# a monkeypatch that will replace the part of doctest that
-# interferes with coverage reports.
-#
-# The monkeypatch is based on this zope patch:
-# http://svn.zope.org/Zope3/trunk/src/zope/testing/doctest.py?rev=28679&r1=28703&r2=28705
-#
-_orp = doctest._OutputRedirectingPdb
-
-class NoseOutputRedirectingPdb(_orp):
- def __init__(self, out):
- self.__debugger_used = False
- _orp.__init__(self, out)
-
- def set_trace(self):
- self.__debugger_used = True
- _orp.set_trace(self, sys._getframe().f_back)
-
- def set_continue(self):
- # Calling set_continue unconditionally would break unit test
- # coverage reporting, as Bdb.set_continue calls sys.settrace(None).
- if self.__debugger_used:
- _orp.set_continue(self)
-doctest._OutputRedirectingPdb = NoseOutputRedirectingPdb
-
-
-class DoctestSuite(unittest.TestSuite):
- """
- Doctest suites are parallelizable at the module or file level only,
- since they may be attached to objects that are not individually
- addressable (like properties). This suite subclass is used when
- loading doctests from a module to ensure that behavior.
-
- This class is used only if the plugin is not fully prepared;
- in normal use, the loader's suiteClass is used.
-
- """
- can_split = False
-
- def __init__(self, tests=(), context=None, can_split=False):
- self.context = context
- self.can_split = can_split
- unittest.TestSuite.__init__(self, tests=tests)
-
- def address(self):
- return test_address(self.context)
-
- def __iter__(self):
- # 2.3 compat
- return iter(self._tests)
-
- def __str__(self):
- return str(self._tests)
-
-
-class Doctest(Plugin):
- """
- Activate doctest plugin to find and run doctests in non-test modules.
- """
- extension = None
- suiteClass = DoctestSuite
-
- def options(self, parser, env):
- """Register commmandline options.
- """
- Plugin.options(self, parser, env)
- parser.add_option('--doctest-tests', action='store_true',
- dest='doctest_tests',
- default=env.get('NOSE_DOCTEST_TESTS'),
- help="Also look for doctests in test modules. "
- "Note that classes, methods and functions should "
- "have either doctests or non-doctest tests, "
- "not both. [NOSE_DOCTEST_TESTS]")
- parser.add_option('--doctest-extension', action="append",
- dest="doctestExtension",
- metavar="EXT",
- help="Also look for doctests in files with "
- "this extension [NOSE_DOCTEST_EXTENSION]")
- parser.add_option('--doctest-result-variable',
- dest='doctest_result_var',
- default=env.get('NOSE_DOCTEST_RESULT_VAR'),
- metavar="VAR",
- help="Change the variable name set to the result of "
- "the last interpreter command from the default '_'. "
- "Can be used to avoid conflicts with the _() "
- "function used for text translation. "
- "[NOSE_DOCTEST_RESULT_VAR]")
- parser.add_option('--doctest-fixtures', action="store",
- dest="doctestFixtures",
- metavar="SUFFIX",
- help="Find fixtures for a doctest file in module "
- "with this name appended to the base name "
- "of the doctest file")
- parser.add_option('--doctest-options', action="append",
- dest="doctestOptions",
- metavar="OPTIONS",
- help="Specify options to pass to doctest. " +
- "Eg. '+ELLIPSIS,+NORMALIZE_WHITESPACE'")
- # Set the default as a list, if given in env; otherwise
- # an additional value set on the command line will cause
- # an error.
- env_setting = env.get('NOSE_DOCTEST_EXTENSION')
- if env_setting is not None:
- parser.set_defaults(doctestExtension=tolist(env_setting))
-
- def configure(self, options, config):
- """Configure plugin.
- """
- Plugin.configure(self, options, config)
- self.doctest_result_var = options.doctest_result_var
- self.doctest_tests = options.doctest_tests
- self.extension = tolist(options.doctestExtension)
- self.fixtures = options.doctestFixtures
- self.finder = doctest.DocTestFinder()
- self.optionflags = 0
- if options.doctestOptions:
- flags = ",".join(options.doctestOptions).split(',')
- for flag in flags:
- if not flag or flag[0] not in '+-':
- raise ValueError(
- "Must specify doctest options with starting " +
- "'+' or '-'. Got %s" % (flag,))
- mode, option_name = flag[0], flag[1:]
- option_flag = doctest.OPTIONFLAGS_BY_NAME.get(option_name)
- if not option_flag:
- raise ValueError("Unknown doctest option %s" %
- (option_name,))
- if mode == '+':
- self.optionflags |= option_flag
- elif mode == '-':
- self.optionflags &= ~option_flag
-
- def prepareTestLoader(self, loader):
- """Capture loader's suiteClass.
-
- This is used to create test suites from doctest files.
-
- """
- self.suiteClass = loader.suiteClass
-
- def loadTestsFromModule(self, module):
- """Load doctests from the module.
- """
- log.debug("loading from %s", module)
- if not self.matches(module.__name__):
- log.debug("Doctest doesn't want module %s", module)
- return
- try:
- tests = self.finder.find(module)
- except AttributeError:
- log.exception("Attribute error loading from %s", module)
- # nose allows module.__test__ = False; doctest does not and throws
- # AttributeError
- return
- if not tests:
- log.debug("No tests found in %s", module)
- return
- tests.sort()
- module_file = src(module.__file__)
- # FIXME this breaks the id plugin somehow (tests probably don't
- # get wrapped in result proxy or something)
- cases = []
- for test in tests:
- if not test.examples:
- continue
- if not test.filename:
- test.filename = module_file
- cases.append(DocTestCase(test,
- optionflags=self.optionflags,
- result_var=self.doctest_result_var))
- if cases:
- yield self.suiteClass(cases, context=module, can_split=False)
-
- def loadTestsFromFile(self, filename):
- """Load doctests from the file.
-
- Tests are loaded only if filename's extension matches
- configured doctest extension.
-
- """
- if self.extension and anyp(filename.endswith, self.extension):
- name = os.path.basename(filename)
- dh = open(filename)
- try:
- doc = dh.read()
- finally:
- dh.close()
-
- fixture_context = None
- globs = {'__file__': filename}
- if self.fixtures:
- base, ext = os.path.splitext(name)
- dirname = os.path.dirname(filename)
- sys.path.append(dirname)
- fixt_mod = base + self.fixtures
- try:
- fixture_context = __import__(
- fixt_mod, globals(), locals(), ["nop"])
- except ImportError, e:
- log.debug(
- "Could not import %s: %s (%s)", fixt_mod, e, sys.path)
- log.debug("Fixture module %s resolved to %s",
- fixt_mod, fixture_context)
- if hasattr(fixture_context, 'globs'):
- globs = fixture_context.globs(globs)
- parser = doctest.DocTestParser()
- test = parser.get_doctest(
- doc, globs=globs, name=name,
- filename=filename, lineno=0)
- if test.examples:
- case = DocFileCase(
- test,
- optionflags=self.optionflags,
- setUp=getattr(fixture_context, 'setup_test', None),
- tearDown=getattr(fixture_context, 'teardown_test', None),
- result_var=self.doctest_result_var)
- if fixture_context:
- yield ContextList((case,), context=fixture_context)
- else:
- yield case
- else:
- yield False # no tests to load
-
- def makeTest(self, obj, parent):
- """Look for doctests in the given object, which will be a
- function, method or class.
- """
- name = getattr(obj, '__name__', 'Unnammed %s' % type(obj))
- doctests = self.finder.find(obj, module=getmodule(parent), name=name)
- if doctests:
- for test in doctests:
- if len(test.examples) == 0:
- continue
- yield DocTestCase(test, obj=obj, optionflags=self.optionflags,
- result_var=self.doctest_result_var)
-
- def matches(self, name):
- # FIXME this seems wrong -- nothing is ever going to
- # fail this test, since we're given a module NAME not FILE
- if name == '__init__.py':
- return False
- # FIXME don't think we need include/exclude checks here?
- return ((self.doctest_tests or not self.conf.testMatch.search(name)
- or (self.conf.include
- and filter(None,
- [inc.search(name)
- for inc in self.conf.include])))
- and (not self.conf.exclude
- or not filter(None,
- [exc.search(name)
- for exc in self.conf.exclude])))
-
- def wantFile(self, file):
- """Override to select all modules and any file ending with
- configured doctest extension.
- """
- # always want .py files
- if file.endswith('.py'):
- return True
- # also want files that match my extension
- if (self.extension
- and anyp(file.endswith, self.extension)
- and (not self.conf.exclude
- or not filter(None,
- [exc.search(file)
- for exc in self.conf.exclude]))):
- return True
- return None
-
-
-class DocTestCase(doctest.DocTestCase):
- """Overrides DocTestCase to
- provide an address() method that returns the correct address for
- the doctest case. To provide hints for address(), an obj may also
- be passed -- this will be used as the test object for purposes of
- determining the test address, if it is provided.
- """
- def __init__(self, test, optionflags=0, setUp=None, tearDown=None,
- checker=None, obj=None, result_var='_'):
- self._result_var = result_var
- self._nose_obj = obj
- super(DocTestCase, self).__init__(
- test, optionflags=optionflags, setUp=setUp, tearDown=tearDown,
- checker=checker)
-
- def address(self):
- if self._nose_obj is not None:
- return test_address(self._nose_obj)
- obj = resolve_name(self._dt_test.name)
-
- if isproperty(obj):
- # properties have no connection to the class they are in
- # so we can't just look 'em up, we have to first look up
- # the class, then stick the prop on the end
- parts = self._dt_test.name.split('.')
- class_name = '.'.join(parts[:-1])
- cls = resolve_name(class_name)
- base_addr = test_address(cls)
- return (base_addr[0], base_addr[1],
- '.'.join([base_addr[2], parts[-1]]))
- else:
- return test_address(obj)
-
- # doctests loaded via find(obj) omit the module name
- # so we need to override id, __repr__ and shortDescription
- # bonus: this will squash a 2.3 vs 2.4 incompatiblity
- def id(self):
- name = self._dt_test.name
- filename = self._dt_test.filename
- if filename is not None:
- pk = getpackage(filename)
- if pk is None:
- return name
- if not name.startswith(pk):
- name = "%s.%s" % (pk, name)
- return name
-
- def __repr__(self):
- name = self.id()
- name = name.split('.')
- return "%s (%s)" % (name[-1], '.'.join(name[:-1]))
- __str__ = __repr__
-
- def shortDescription(self):
- return 'Doctest: %s' % self.id()
-
- def setUp(self):
- if self._result_var is not None:
- self._old_displayhook = sys.displayhook
- sys.displayhook = self._displayhook
- super(DocTestCase, self).setUp()
-
- def _displayhook(self, value):
- if value is None:
- return
- setattr(builtin_mod, self._result_var, value)
- print repr(value)
-
- def tearDown(self):
- super(DocTestCase, self).tearDown()
- if self._result_var is not None:
- sys.displayhook = self._old_displayhook
- delattr(builtin_mod, self._result_var)
-
-
-class DocFileCase(doctest.DocFileCase):
- """Overrides to provide address() method that returns the correct
- address for the doc file case.
- """
- def __init__(self, test, optionflags=0, setUp=None, tearDown=None,
- checker=None, result_var='_'):
- self._result_var = result_var
- super(DocFileCase, self).__init__(
- test, optionflags=optionflags, setUp=setUp, tearDown=tearDown,
- checker=None)
-
- def address(self):
- return (self._dt_test.filename, None, None)
-
- def setUp(self):
- if self._result_var is not None:
- self._old_displayhook = sys.displayhook
- sys.displayhook = self._displayhook
- super(DocFileCase, self).setUp()
-
- def _displayhook(self, value):
- if value is None:
- return
- setattr(builtin_mod, self._result_var, value)
- print repr(value)
-
- def tearDown(self):
- super(DocFileCase, self).tearDown()
- if self._result_var is not None:
- sys.displayhook = self._old_displayhook
- delattr(builtin_mod, self._result_var)
diff --git a/lib/spack/external/nose/plugins/errorclass.py b/lib/spack/external/nose/plugins/errorclass.py
deleted file mode 100644
index d1540e0070..0000000000
--- a/lib/spack/external/nose/plugins/errorclass.py
+++ /dev/null
@@ -1,210 +0,0 @@
-"""
-ErrorClass Plugins
-------------------
-
-ErrorClass plugins provide an easy way to add support for custom
-handling of particular classes of exceptions.
-
-An ErrorClass plugin defines one or more ErrorClasses and how each is
-handled and reported on. Each error class is stored in a different
-attribute on the result, and reported separately. Each error class must
-indicate the exceptions that fall under that class, the label to use
-for reporting, and whether exceptions of the class should be
-considered as failures for the whole test run.
-
-ErrorClasses use a declarative syntax. Assign an ErrorClass to the
-attribute you wish to add to the result object, defining the
-exceptions, label and isfailure attributes. For example, to declare an
-ErrorClassPlugin that defines TodoErrors (and subclasses of TodoError)
-as an error class with the label 'TODO' that is considered a failure,
-do this:
-
- >>> class Todo(Exception):
- ... pass
- >>> class TodoError(ErrorClassPlugin):
- ... todo = ErrorClass(Todo, label='TODO', isfailure=True)
-
-The MetaErrorClass metaclass translates the ErrorClass declarations
-into the tuples used by the error handling and reporting functions in
-the result. This is an internal format and subject to change; you
-should always use the declarative syntax for attaching ErrorClasses to
-an ErrorClass plugin.
-
- >>> TodoError.errorClasses # doctest: +ELLIPSIS
- ((<class ...Todo...>, ('todo', 'TODO', True)),)
-
-Let's see the plugin in action. First some boilerplate.
-
- >>> import sys
- >>> import unittest
- >>> try:
- ... # 2.7+
- ... from unittest.runner import _WritelnDecorator
- ... except ImportError:
- ... from unittest import _WritelnDecorator
- ...
- >>> buf = _WritelnDecorator(sys.stdout)
-
-Now define a test case that raises a Todo.
-
- >>> class TestTodo(unittest.TestCase):
- ... def runTest(self):
- ... raise Todo("I need to test something")
- >>> case = TestTodo()
-
-Prepare the result using our plugin. Normally this happens during the
-course of test execution within nose -- you won't be doing this
-yourself. For the purposes of this testing document, I'm stepping
-through the internal process of nose so you can see what happens at
-each step.
-
- >>> plugin = TodoError()
- >>> from nose.result import _TextTestResult
- >>> result = _TextTestResult(stream=buf, descriptions=0, verbosity=2)
- >>> plugin.prepareTestResult(result)
-
-Now run the test. TODO is printed.
-
- >>> _ = case(result) # doctest: +ELLIPSIS
- runTest (....TestTodo) ... TODO: I need to test something
-
-Errors and failures are empty, but todo has our test:
-
- >>> result.errors
- []
- >>> result.failures
- []
- >>> result.todo # doctest: +ELLIPSIS
- [(<....TestTodo testMethod=runTest>, '...Todo: I need to test something\\n')]
- >>> result.printErrors() # doctest: +ELLIPSIS
- <BLANKLINE>
- ======================================================================
- TODO: runTest (....TestTodo)
- ----------------------------------------------------------------------
- Traceback (most recent call last):
- ...
- ...Todo: I need to test something
- <BLANKLINE>
-
-Since we defined a Todo as a failure, the run was not successful.
-
- >>> result.wasSuccessful()
- False
-"""
-
-from nose.pyversion import make_instancemethod
-from nose.plugins.base import Plugin
-from nose.result import TextTestResult
-from nose.util import isclass
-
-class MetaErrorClass(type):
- """Metaclass for ErrorClassPlugins that allows error classes to be
- set up in a declarative manner.
- """
- def __init__(self, name, bases, attr):
- errorClasses = []
- for name, detail in attr.items():
- if isinstance(detail, ErrorClass):
- attr.pop(name)
- for cls in detail:
- errorClasses.append(
- (cls, (name, detail.label, detail.isfailure)))
- super(MetaErrorClass, self).__init__(name, bases, attr)
- self.errorClasses = tuple(errorClasses)
-
-
-class ErrorClass(object):
- def __init__(self, *errorClasses, **kw):
- self.errorClasses = errorClasses
- try:
- for key in ('label', 'isfailure'):
- setattr(self, key, kw.pop(key))
- except KeyError:
- raise TypeError("%r is a required named argument for ErrorClass"
- % key)
-
- def __iter__(self):
- return iter(self.errorClasses)
-
-
-class ErrorClassPlugin(Plugin):
- """
- Base class for ErrorClass plugins. Subclass this class and declare the
- exceptions that you wish to handle as attributes of the subclass.
- """
- __metaclass__ = MetaErrorClass
- score = 1000
- errorClasses = ()
-
- def addError(self, test, err):
- err_cls, a, b = err
- if not isclass(err_cls):
- return
- classes = [e[0] for e in self.errorClasses]
- if filter(lambda c: issubclass(err_cls, c), classes):
- return True
-
- def prepareTestResult(self, result):
- if not hasattr(result, 'errorClasses'):
- self.patchResult(result)
- for cls, (storage_attr, label, isfail) in self.errorClasses:
- if cls not in result.errorClasses:
- storage = getattr(result, storage_attr, [])
- setattr(result, storage_attr, storage)
- result.errorClasses[cls] = (storage, label, isfail)
-
- def patchResult(self, result):
- result.printLabel = print_label_patch(result)
- result._orig_addError, result.addError = \
- result.addError, add_error_patch(result)
- result._orig_wasSuccessful, result.wasSuccessful = \
- result.wasSuccessful, wassuccessful_patch(result)
- if hasattr(result, 'printErrors'):
- result._orig_printErrors, result.printErrors = \
- result.printErrors, print_errors_patch(result)
- if hasattr(result, 'addSkip'):
- result._orig_addSkip, result.addSkip = \
- result.addSkip, add_skip_patch(result)
- result.errorClasses = {}
-
-
-def add_error_patch(result):
- """Create a new addError method to patch into a result instance
- that recognizes the errorClasses attribute and deals with
- errorclasses correctly.
- """
- return make_instancemethod(TextTestResult.addError, result)
-
-
-def print_errors_patch(result):
- """Create a new printErrors method that prints errorClasses items
- as well.
- """
- return make_instancemethod(TextTestResult.printErrors, result)
-
-
-def print_label_patch(result):
- """Create a new printLabel method that prints errorClasses items
- as well.
- """
- return make_instancemethod(TextTestResult.printLabel, result)
-
-
-def wassuccessful_patch(result):
- """Create a new wasSuccessful method that checks errorClasses for
- exceptions that were put into other slots than error or failure
- but that still count as not success.
- """
- return make_instancemethod(TextTestResult.wasSuccessful, result)
-
-
-def add_skip_patch(result):
- """Create a new addSkip method to patch into a result instance
- that delegates to addError.
- """
- return make_instancemethod(TextTestResult.addSkip, result)
-
-
-if __name__ == '__main__':
- import doctest
- doctest.testmod()
diff --git a/lib/spack/external/nose/plugins/failuredetail.py b/lib/spack/external/nose/plugins/failuredetail.py
deleted file mode 100644
index 6462865dd0..0000000000
--- a/lib/spack/external/nose/plugins/failuredetail.py
+++ /dev/null
@@ -1,49 +0,0 @@
-"""
-This plugin provides assert introspection. When the plugin is enabled
-and a test failure occurs, the traceback is displayed with extra context
-around the line in which the exception was raised. Simple variable
-substitution is also performed in the context output to provide more
-debugging information.
-"""
-
-from nose.plugins import Plugin
-from nose.pyversion import exc_to_unicode, force_unicode
-from nose.inspector import inspect_traceback
-
-class FailureDetail(Plugin):
- """
- Plugin that provides extra information in tracebacks of test failures.
- """
- score = 1600 # before capture
-
- def options(self, parser, env):
- """Register commmandline options.
- """
- parser.add_option(
- "-d", "--detailed-errors", "--failure-detail",
- action="store_true",
- default=env.get('NOSE_DETAILED_ERRORS'),
- dest="detailedErrors", help="Add detail to error"
- " output by attempting to evaluate failed"
- " asserts [NOSE_DETAILED_ERRORS]")
-
- def configure(self, options, conf):
- """Configure plugin.
- """
- if not self.can_configure:
- return
- self.enabled = options.detailedErrors
- self.conf = conf
-
- def formatFailure(self, test, err):
- """Add detail from traceback inspection to error message of a failure.
- """
- ec, ev, tb = err
- tbinfo, str_ev = None, exc_to_unicode(ev)
-
- if tb:
- tbinfo = force_unicode(inspect_traceback(tb))
- str_ev = '\n'.join([str_ev, tbinfo])
- test.tbinfo = tbinfo
- return (ec, str_ev, tb)
-
diff --git a/lib/spack/external/nose/plugins/isolate.py b/lib/spack/external/nose/plugins/isolate.py
deleted file mode 100644
index 13235dfbd1..0000000000
--- a/lib/spack/external/nose/plugins/isolate.py
+++ /dev/null
@@ -1,103 +0,0 @@
-"""The isolation plugin resets the contents of sys.modules after running
-each test module or package. Use it by setting ``--with-isolation`` or the
-NOSE_WITH_ISOLATION environment variable.
-
-The effects are similar to wrapping the following functions around the
-import and execution of each test module::
-
- def setup(module):
- module._mods = sys.modules.copy()
-
- def teardown(module):
- to_del = [ m for m in sys.modules.keys() if m not in
- module._mods ]
- for mod in to_del:
- del sys.modules[mod]
- sys.modules.update(module._mods)
-
-Isolation works only during lazy loading. In normal use, this is only
-during discovery of modules within a directory, where the process of
-importing, loading tests and running tests from each module is
-encapsulated in a single loadTestsFromName call. This plugin
-implements loadTestsFromNames to force the same lazy-loading there,
-which allows isolation to work in directed mode as well as discovery,
-at the cost of some efficiency: lazy-loading names forces full context
-setup and teardown to run for each name, defeating the grouping that
-is normally used to ensure that context setup and teardown are run the
-fewest possible times for a given set of names.
-
-.. warning ::
-
- This plugin should not be used in conjunction with other plugins
- that assume that modules, once imported, will stay imported; for
- instance, it may cause very odd results when used with the coverage
- plugin.
-
-"""
-
-import logging
-import sys
-
-from nose.plugins import Plugin
-
-
-log = logging.getLogger('nose.plugins.isolation')
-
-class IsolationPlugin(Plugin):
- """
- Activate the isolation plugin to isolate changes to external
- modules to a single test module or package. The isolation plugin
- resets the contents of sys.modules after each test module or
- package runs to its state before the test. PLEASE NOTE that this
- plugin should not be used with the coverage plugin, or in any other case
- where module reloading may produce undesirable side-effects.
- """
- score = 10 # I want to be last
- name = 'isolation'
-
- def configure(self, options, conf):
- """Configure plugin.
- """
- Plugin.configure(self, options, conf)
- self._mod_stack = []
-
- def beforeContext(self):
- """Copy sys.modules onto my mod stack
- """
- mods = sys.modules.copy()
- self._mod_stack.append(mods)
-
- def afterContext(self):
- """Pop my mod stack and restore sys.modules to the state
- it was in when mod stack was pushed.
- """
- mods = self._mod_stack.pop()
- to_del = [ m for m in sys.modules.keys() if m not in mods ]
- if to_del:
- log.debug('removing sys modules entries: %s', to_del)
- for mod in to_del:
- del sys.modules[mod]
- sys.modules.update(mods)
-
- def loadTestsFromNames(self, names, module=None):
- """Create a lazy suite that calls beforeContext and afterContext
- around each name. The side-effect of this is that full context
- fixtures will be set up and torn down around each test named.
- """
- # Fast path for when we don't care
- if not names or len(names) == 1:
- return
- loader = self.loader
- plugins = self.conf.plugins
- def lazy():
- for name in names:
- plugins.beforeContext()
- yield loader.loadTestsFromName(name, module=module)
- plugins.afterContext()
- return (loader.suiteClass(lazy), [])
-
- def prepareTestLoader(self, loader):
- """Get handle on test loader so we can use it in loadTestsFromNames.
- """
- self.loader = loader
-
diff --git a/lib/spack/external/nose/plugins/logcapture.py b/lib/spack/external/nose/plugins/logcapture.py
deleted file mode 100644
index 4c9a79f6fd..0000000000
--- a/lib/spack/external/nose/plugins/logcapture.py
+++ /dev/null
@@ -1,245 +0,0 @@
-"""
-This plugin captures logging statements issued during test execution. When an
-error or failure occurs, the captured log messages are attached to the running
-test in the test.capturedLogging attribute, and displayed with the error failure
-output. It is enabled by default but can be turned off with the option
-``--nologcapture``.
-
-You can filter captured logging statements with the ``--logging-filter`` option.
-If set, it specifies which logger(s) will be captured; loggers that do not match
-will be passed. Example: specifying ``--logging-filter=sqlalchemy,myapp``
-will ensure that only statements logged via sqlalchemy.engine, myapp
-or myapp.foo.bar logger will be logged.
-
-You can remove other installed logging handlers with the
-``--logging-clear-handlers`` option.
-"""
-
-import logging
-from logging import Handler
-import threading
-
-from nose.plugins.base import Plugin
-from nose.util import anyp, ln, safe_str
-
-try:
- from cStringIO import StringIO
-except ImportError:
- from StringIO import StringIO
-
-log = logging.getLogger(__name__)
-
-class FilterSet(object):
- def __init__(self, filter_components):
- self.inclusive, self.exclusive = self._partition(filter_components)
-
- # @staticmethod
- def _partition(components):
- inclusive, exclusive = [], []
- for component in components:
- if component.startswith('-'):
- exclusive.append(component[1:])
- else:
- inclusive.append(component)
- return inclusive, exclusive
- _partition = staticmethod(_partition)
-
- def allow(self, record):
- """returns whether this record should be printed"""
- if not self:
- # nothing to filter
- return True
- return self._allow(record) and not self._deny(record)
-
- # @staticmethod
- def _any_match(matchers, record):
- """return the bool of whether `record` starts with
- any item in `matchers`"""
- def record_matches_key(key):
- return record == key or record.startswith(key + '.')
- return anyp(bool, map(record_matches_key, matchers))
- _any_match = staticmethod(_any_match)
-
- def _allow(self, record):
- if not self.inclusive:
- return True
- return self._any_match(self.inclusive, record)
-
- def _deny(self, record):
- if not self.exclusive:
- return False
- return self._any_match(self.exclusive, record)
-
-
-class MyMemoryHandler(Handler):
- def __init__(self, logformat, logdatefmt, filters):
- Handler.__init__(self)
- fmt = logging.Formatter(logformat, logdatefmt)
- self.setFormatter(fmt)
- self.filterset = FilterSet(filters)
- self.buffer = []
- def emit(self, record):
- self.buffer.append(self.format(record))
- def flush(self):
- pass # do nothing
- def truncate(self):
- self.buffer = []
- def filter(self, record):
- if self.filterset.allow(record.name):
- return Handler.filter(self, record)
- def __getstate__(self):
- state = self.__dict__.copy()
- del state['lock']
- return state
- def __setstate__(self, state):
- self.__dict__.update(state)
- self.lock = threading.RLock()
-
-
-class LogCapture(Plugin):
- """
- Log capture plugin. Enabled by default. Disable with --nologcapture.
- This plugin captures logging statements issued during test execution,
- appending any output captured to the error or failure output,
- should the test fail or raise an error.
- """
- enabled = True
- env_opt = 'NOSE_NOLOGCAPTURE'
- name = 'logcapture'
- score = 500
- logformat = '%(name)s: %(levelname)s: %(message)s'
- logdatefmt = None
- clear = False
- filters = ['-nose']
-
- def options(self, parser, env):
- """Register commandline options.
- """
- parser.add_option(
- "--nologcapture", action="store_false",
- default=not env.get(self.env_opt), dest="logcapture",
- help="Disable logging capture plugin. "
- "Logging configuration will be left intact."
- " [NOSE_NOLOGCAPTURE]")
- parser.add_option(
- "--logging-format", action="store", dest="logcapture_format",
- default=env.get('NOSE_LOGFORMAT') or self.logformat,
- metavar="FORMAT",
- help="Specify custom format to print statements. "
- "Uses the same format as used by standard logging handlers."
- " [NOSE_LOGFORMAT]")
- parser.add_option(
- "--logging-datefmt", action="store", dest="logcapture_datefmt",
- default=env.get('NOSE_LOGDATEFMT') or self.logdatefmt,
- metavar="FORMAT",
- help="Specify custom date/time format to print statements. "
- "Uses the same format as used by standard logging handlers."
- " [NOSE_LOGDATEFMT]")
- parser.add_option(
- "--logging-filter", action="store", dest="logcapture_filters",
- default=env.get('NOSE_LOGFILTER'),
- metavar="FILTER",
- help="Specify which statements to filter in/out. "
- "By default, everything is captured. If the output is too"
- " verbose,\nuse this option to filter out needless output.\n"
- "Example: filter=foo will capture statements issued ONLY to\n"
- " foo or foo.what.ever.sub but not foobar or other logger.\n"
- "Specify multiple loggers with comma: filter=foo,bar,baz.\n"
- "If any logger name is prefixed with a minus, eg filter=-foo,\n"
- "it will be excluded rather than included. Default: "
- "exclude logging messages from nose itself (-nose)."
- " [NOSE_LOGFILTER]\n")
- parser.add_option(
- "--logging-clear-handlers", action="store_true",
- default=False, dest="logcapture_clear",
- help="Clear all other logging handlers")
- parser.add_option(
- "--logging-level", action="store",
- default='NOTSET', dest="logcapture_level",
- help="Set the log level to capture")
-
- def configure(self, options, conf):
- """Configure plugin.
- """
- self.conf = conf
- # Disable if explicitly disabled, or if logging is
- # configured via logging config file
- if not options.logcapture or conf.loggingConfig:
- self.enabled = False
- self.logformat = options.logcapture_format
- self.logdatefmt = options.logcapture_datefmt
- self.clear = options.logcapture_clear
- self.loglevel = options.logcapture_level
- if options.logcapture_filters:
- self.filters = options.logcapture_filters.split(',')
-
- def setupLoghandler(self):
- # setup our handler with root logger
- root_logger = logging.getLogger()
- if self.clear:
- if hasattr(root_logger, "handlers"):
- for handler in root_logger.handlers:
- root_logger.removeHandler(handler)
- for logger in logging.Logger.manager.loggerDict.values():
- if hasattr(logger, "handlers"):
- for handler in logger.handlers:
- logger.removeHandler(handler)
- # make sure there isn't one already
- # you can't simply use "if self.handler not in root_logger.handlers"
- # since at least in unit tests this doesn't work --
- # LogCapture() is instantiated for each test case while root_logger
- # is module global
- # so we always add new MyMemoryHandler instance
- for handler in root_logger.handlers[:]:
- if isinstance(handler, MyMemoryHandler):
- root_logger.handlers.remove(handler)
- root_logger.addHandler(self.handler)
- # to make sure everything gets captured
- loglevel = getattr(self, "loglevel", "NOTSET")
- root_logger.setLevel(getattr(logging, loglevel))
-
- def begin(self):
- """Set up logging handler before test run begins.
- """
- self.start()
-
- def start(self):
- self.handler = MyMemoryHandler(self.logformat, self.logdatefmt,
- self.filters)
- self.setupLoghandler()
-
- def end(self):
- pass
-
- def beforeTest(self, test):
- """Clear buffers and handlers before test.
- """
- self.setupLoghandler()
-
- def afterTest(self, test):
- """Clear buffers after test.
- """
- self.handler.truncate()
-
- def formatFailure(self, test, err):
- """Add captured log messages to failure output.
- """
- return self.formatError(test, err)
-
- def formatError(self, test, err):
- """Add captured log messages to error output.
- """
- # logic flow copied from Capture.formatError
- test.capturedLogging = records = self.formatLogRecords()
- if not records:
- return err
- ec, ev, tb = err
- return (ec, self.addCaptureToErr(ev, records), tb)
-
- def formatLogRecords(self):
- return map(safe_str, self.handler.buffer)
-
- def addCaptureToErr(self, ev, records):
- return '\n'.join([safe_str(ev), ln('>> begin captured logging <<')] + \
- records + \
- [ln('>> end captured logging <<')])
diff --git a/lib/spack/external/nose/plugins/manager.py b/lib/spack/external/nose/plugins/manager.py
deleted file mode 100644
index 4d2ed22b6f..0000000000
--- a/lib/spack/external/nose/plugins/manager.py
+++ /dev/null
@@ -1,460 +0,0 @@
-"""
-Plugin Manager
---------------
-
-A plugin manager class is used to load plugins, manage the list of
-loaded plugins, and proxy calls to those plugins.
-
-The plugin managers provided with nose are:
-
-:class:`PluginManager`
- This manager doesn't implement loadPlugins, so it can only work
- with a static list of plugins.
-
-:class:`BuiltinPluginManager`
- This manager loads plugins referenced in ``nose.plugins.builtin``.
-
-:class:`EntryPointPluginManager`
- This manager uses setuptools entrypoints to load plugins.
-
-:class:`ExtraPluginsPluginManager`
- This manager loads extra plugins specified with the keyword
- `addplugins`.
-
-:class:`DefaultPluginMananger`
- This is the manager class that will be used by default. If
- setuptools is installed, it is a subclass of
- :class:`EntryPointPluginManager` and :class:`BuiltinPluginManager`;
- otherwise, an alias to :class:`BuiltinPluginManager`.
-
-:class:`RestrictedPluginManager`
- This manager is for use in test runs where some plugin calls are
- not available, such as runs started with ``python setup.py test``,
- where the test runner is the default unittest :class:`TextTestRunner`. It
- is a subclass of :class:`DefaultPluginManager`.
-
-Writing a plugin manager
-========================
-
-If you want to load plugins via some other means, you can write a
-plugin manager and pass an instance of your plugin manager class when
-instantiating the :class:`nose.config.Config` instance that you pass to
-:class:`TestProgram` (or :func:`main` or :func:`run`).
-
-To implement your plugin loading scheme, implement ``loadPlugins()``,
-and in that method, call ``addPlugin()`` with an instance of each plugin
-you wish to make available. Make sure to call
-``super(self).loadPlugins()`` as well if have subclassed a manager
-other than ``PluginManager``.
-
-"""
-import inspect
-import logging
-import os
-import sys
-from itertools import chain as iterchain
-from warnings import warn
-import nose.config
-from nose.failure import Failure
-from nose.plugins.base import IPluginInterface
-from nose.pyversion import sort_list
-
-try:
- import cPickle as pickle
-except:
- import pickle
-try:
- from cStringIO import StringIO
-except:
- from StringIO import StringIO
-
-
-__all__ = ['DefaultPluginManager', 'PluginManager', 'EntryPointPluginManager',
- 'BuiltinPluginManager', 'RestrictedPluginManager']
-
-log = logging.getLogger(__name__)
-
-
-class PluginProxy(object):
- """Proxy for plugin calls. Essentially a closure bound to the
- given call and plugin list.
-
- The plugin proxy also must be bound to a particular plugin
- interface specification, so that it knows what calls are available
- and any special handling that is required for each call.
- """
- interface = IPluginInterface
- def __init__(self, call, plugins):
- try:
- self.method = getattr(self.interface, call)
- except AttributeError:
- raise AttributeError("%s is not a valid %s method"
- % (call, self.interface.__name__))
- self.call = self.makeCall(call)
- self.plugins = []
- for p in plugins:
- self.addPlugin(p, call)
-
- def __call__(self, *arg, **kw):
- return self.call(*arg, **kw)
-
- def addPlugin(self, plugin, call):
- """Add plugin to my list of plugins to call, if it has the attribute
- I'm bound to.
- """
- meth = getattr(plugin, call, None)
- if meth is not None:
- if call == 'loadTestsFromModule' and \
- len(inspect.getargspec(meth)[0]) == 2:
- orig_meth = meth
- meth = lambda module, path, **kwargs: orig_meth(module)
- self.plugins.append((plugin, meth))
-
- def makeCall(self, call):
- if call == 'loadTestsFromNames':
- # special case -- load tests from names behaves somewhat differently
- # from other chainable calls, because plugins return a tuple, only
- # part of which can be chained to the next plugin.
- return self._loadTestsFromNames
-
- meth = self.method
- if getattr(meth, 'generative', False):
- # call all plugins and yield a flattened iterator of their results
- return lambda *arg, **kw: list(self.generate(*arg, **kw))
- elif getattr(meth, 'chainable', False):
- return self.chain
- else:
- # return a value from the first plugin that returns non-None
- return self.simple
-
- def chain(self, *arg, **kw):
- """Call plugins in a chain, where the result of each plugin call is
- sent to the next plugin as input. The final output result is returned.
- """
- result = None
- # extract the static arguments (if any) from arg so they can
- # be passed to each plugin call in the chain
- static = [a for (static, a)
- in zip(getattr(self.method, 'static_args', []), arg)
- if static]
- for p, meth in self.plugins:
- result = meth(*arg, **kw)
- arg = static[:]
- arg.append(result)
- return result
-
- def generate(self, *arg, **kw):
- """Call all plugins, yielding each item in each non-None result.
- """
- for p, meth in self.plugins:
- result = None
- try:
- result = meth(*arg, **kw)
- if result is not None:
- for r in result:
- yield r
- except (KeyboardInterrupt, SystemExit):
- raise
- except:
- exc = sys.exc_info()
- yield Failure(*exc)
- continue
-
- def simple(self, *arg, **kw):
- """Call all plugins, returning the first non-None result.
- """
- for p, meth in self.plugins:
- result = meth(*arg, **kw)
- if result is not None:
- return result
-
- def _loadTestsFromNames(self, names, module=None):
- """Chainable but not quite normal. Plugins return a tuple of
- (tests, names) after processing the names. The tests are added
- to a suite that is accumulated throughout the full call, while
- names are input for the next plugin in the chain.
- """
- suite = []
- for p, meth in self.plugins:
- result = meth(names, module=module)
- if result is not None:
- suite_part, names = result
- if suite_part:
- suite.extend(suite_part)
- return suite, names
-
-
-class NoPlugins(object):
- """Null Plugin manager that has no plugins."""
- interface = IPluginInterface
- def __init__(self):
- self._plugins = self.plugins = ()
-
- def __iter__(self):
- return ()
-
- def _doNothing(self, *args, **kwds):
- pass
-
- def _emptyIterator(self, *args, **kwds):
- return ()
-
- def __getattr__(self, call):
- method = getattr(self.interface, call)
- if getattr(method, "generative", False):
- return self._emptyIterator
- else:
- return self._doNothing
-
- def addPlugin(self, plug):
- raise NotImplementedError()
-
- def addPlugins(self, plugins):
- raise NotImplementedError()
-
- def configure(self, options, config):
- pass
-
- def loadPlugins(self):
- pass
-
- def sort(self):
- pass
-
-
-class PluginManager(object):
- """Base class for plugin managers. PluginManager is intended to be
- used only with a static list of plugins. The loadPlugins() implementation
- only reloads plugins from _extraplugins to prevent those from being
- overridden by a subclass.
-
- The basic functionality of a plugin manager is to proxy all unknown
- attributes through a ``PluginProxy`` to a list of plugins.
-
- Note that the list of plugins *may not* be changed after the first plugin
- call.
- """
- proxyClass = PluginProxy
-
- def __init__(self, plugins=(), proxyClass=None):
- self._plugins = []
- self._extraplugins = ()
- self._proxies = {}
- if plugins:
- self.addPlugins(plugins)
- if proxyClass is not None:
- self.proxyClass = proxyClass
-
- def __getattr__(self, call):
- try:
- return self._proxies[call]
- except KeyError:
- proxy = self.proxyClass(call, self._plugins)
- self._proxies[call] = proxy
- return proxy
-
- def __iter__(self):
- return iter(self.plugins)
-
- def addPlugin(self, plug):
- # allow, for instance, plugins loaded via entry points to
- # supplant builtin plugins.
- new_name = getattr(plug, 'name', object())
- self._plugins[:] = [p for p in self._plugins
- if getattr(p, 'name', None) != new_name]
- self._plugins.append(plug)
-
- def addPlugins(self, plugins=(), extraplugins=()):
- """extraplugins are maintained in a separate list and
- re-added by loadPlugins() to prevent their being overwritten
- by plugins added by a subclass of PluginManager
- """
- self._extraplugins = extraplugins
- for plug in iterchain(plugins, extraplugins):
- self.addPlugin(plug)
-
- def configure(self, options, config):
- """Configure the set of plugins with the given options
- and config instance. After configuration, disabled plugins
- are removed from the plugins list.
- """
- log.debug("Configuring plugins")
- self.config = config
- cfg = PluginProxy('configure', self._plugins)
- cfg(options, config)
- enabled = [plug for plug in self._plugins if plug.enabled]
- self.plugins = enabled
- self.sort()
- log.debug("Plugins enabled: %s", enabled)
-
- def loadPlugins(self):
- for plug in self._extraplugins:
- self.addPlugin(plug)
-
- def sort(self):
- return sort_list(self._plugins, lambda x: getattr(x, 'score', 1), reverse=True)
-
- def _get_plugins(self):
- return self._plugins
-
- def _set_plugins(self, plugins):
- self._plugins = []
- self.addPlugins(plugins)
-
- plugins = property(_get_plugins, _set_plugins, None,
- """Access the list of plugins managed by
- this plugin manager""")
-
-
-class ZeroNinePlugin:
- """Proxy for 0.9 plugins, adapts 0.10 calls to 0.9 standard.
- """
- def __init__(self, plugin):
- self.plugin = plugin
-
- def options(self, parser, env=os.environ):
- self.plugin.add_options(parser, env)
-
- def addError(self, test, err):
- if not hasattr(self.plugin, 'addError'):
- return
- # switch off to addSkip, addDeprecated if those types
- from nose.exc import SkipTest, DeprecatedTest
- ec, ev, tb = err
- if issubclass(ec, SkipTest):
- if not hasattr(self.plugin, 'addSkip'):
- return
- return self.plugin.addSkip(test.test)
- elif issubclass(ec, DeprecatedTest):
- if not hasattr(self.plugin, 'addDeprecated'):
- return
- return self.plugin.addDeprecated(test.test)
- # add capt
- capt = test.capturedOutput
- return self.plugin.addError(test.test, err, capt)
-
- def loadTestsFromFile(self, filename):
- if hasattr(self.plugin, 'loadTestsFromPath'):
- return self.plugin.loadTestsFromPath(filename)
-
- def addFailure(self, test, err):
- if not hasattr(self.plugin, 'addFailure'):
- return
- # add capt and tbinfo
- capt = test.capturedOutput
- tbinfo = test.tbinfo
- return self.plugin.addFailure(test.test, err, capt, tbinfo)
-
- def addSuccess(self, test):
- if not hasattr(self.plugin, 'addSuccess'):
- return
- capt = test.capturedOutput
- self.plugin.addSuccess(test.test, capt)
-
- def startTest(self, test):
- if not hasattr(self.plugin, 'startTest'):
- return
- return self.plugin.startTest(test.test)
-
- def stopTest(self, test):
- if not hasattr(self.plugin, 'stopTest'):
- return
- return self.plugin.stopTest(test.test)
-
- def __getattr__(self, val):
- return getattr(self.plugin, val)
-
-
-class EntryPointPluginManager(PluginManager):
- """Plugin manager that loads plugins from the `nose.plugins` and
- `nose.plugins.0.10` entry points.
- """
- entry_points = (('nose.plugins.0.10', None),
- ('nose.plugins', ZeroNinePlugin))
-
- def loadPlugins(self):
- """Load plugins by iterating the `nose.plugins` entry point.
- """
- from pkg_resources import iter_entry_points
- loaded = {}
- for entry_point, adapt in self.entry_points:
- for ep in iter_entry_points(entry_point):
- if ep.name in loaded:
- continue
- loaded[ep.name] = True
- log.debug('%s load plugin %s', self.__class__.__name__, ep)
- try:
- plugcls = ep.load()
- except KeyboardInterrupt:
- raise
- except Exception, e:
- # never want a plugin load to kill the test run
- # but we can't log here because the logger is not yet
- # configured
- warn("Unable to load plugin %s: %s" % (ep, e),
- RuntimeWarning)
- continue
- if adapt:
- plug = adapt(plugcls())
- else:
- plug = plugcls()
- self.addPlugin(plug)
- super(EntryPointPluginManager, self).loadPlugins()
-
-
-class BuiltinPluginManager(PluginManager):
- """Plugin manager that loads plugins from the list in
- `nose.plugins.builtin`.
- """
- def loadPlugins(self):
- """Load plugins in nose.plugins.builtin
- """
- from nose.plugins import builtin
- for plug in builtin.plugins:
- self.addPlugin(plug())
- super(BuiltinPluginManager, self).loadPlugins()
-
-try:
- import pkg_resources
- class DefaultPluginManager(EntryPointPluginManager, BuiltinPluginManager):
- pass
-
-except ImportError:
- class DefaultPluginManager(BuiltinPluginManager):
- pass
-
-class RestrictedPluginManager(DefaultPluginManager):
- """Plugin manager that restricts the plugin list to those not
- excluded by a list of exclude methods. Any plugin that implements
- an excluded method will be removed from the manager's plugin list
- after plugins are loaded.
- """
- def __init__(self, plugins=(), exclude=(), load=True):
- DefaultPluginManager.__init__(self, plugins)
- self.load = load
- self.exclude = exclude
- self.excluded = []
- self._excludedOpts = None
-
- def excludedOption(self, name):
- if self._excludedOpts is None:
- from optparse import OptionParser
- self._excludedOpts = OptionParser(add_help_option=False)
- for plugin in self.excluded:
- plugin.options(self._excludedOpts, env={})
- return self._excludedOpts.get_option('--' + name)
-
- def loadPlugins(self):
- if self.load:
- DefaultPluginManager.loadPlugins(self)
- allow = []
- for plugin in self.plugins:
- ok = True
- for method in self.exclude:
- if hasattr(plugin, method):
- ok = False
- self.excluded.append(plugin)
- break
- if ok:
- allow.append(plugin)
- self.plugins = allow
diff --git a/lib/spack/external/nose/plugins/multiprocess.py b/lib/spack/external/nose/plugins/multiprocess.py
deleted file mode 100644
index 2cae744a11..0000000000
--- a/lib/spack/external/nose/plugins/multiprocess.py
+++ /dev/null
@@ -1,835 +0,0 @@
-"""
-Overview
-========
-
-The multiprocess plugin enables you to distribute your test run among a set of
-worker processes that run tests in parallel. This can speed up CPU-bound test
-runs (as long as the number of work processeses is around the number of
-processors or cores available), but is mainly useful for IO-bound tests that
-spend most of their time waiting for data to arrive from someplace else.
-
-.. note ::
-
- See :doc:`../doc_tests/test_multiprocess/multiprocess` for
- additional documentation and examples. Use of this plugin on python
- 2.5 or earlier requires the multiprocessing_ module, also available
- from PyPI.
-
-.. _multiprocessing : http://code.google.com/p/python-multiprocessing/
-
-How tests are distributed
-=========================
-
-The ideal case would be to dispatch each test to a worker process
-separately. This ideal is not attainable in all cases, however, because many
-test suites depend on context (class, module or package) fixtures.
-
-The plugin can't know (unless you tell it -- see below!) if a context fixture
-can be called many times concurrently (is re-entrant), or if it can be shared
-among tests running in different processes. Therefore, if a context has
-fixtures, the default behavior is to dispatch the entire suite to a worker as
-a unit.
-
-Controlling distribution
-^^^^^^^^^^^^^^^^^^^^^^^^
-
-There are two context-level variables that you can use to control this default
-behavior.
-
-If a context's fixtures are re-entrant, set ``_multiprocess_can_split_ = True``
-in the context, and the plugin will dispatch tests in suites bound to that
-context as if the context had no fixtures. This means that the fixtures will
-execute concurrently and multiple times, typically once per test.
-
-If a context's fixtures can be shared by tests running in different processes
--- such as a package-level fixture that starts an external http server or
-initializes a shared database -- then set ``_multiprocess_shared_ = True`` in
-the context. These fixtures will then execute in the primary nose process, and
-tests in those contexts will be individually dispatched to run in parallel.
-
-How results are collected and reported
-======================================
-
-As each test or suite executes in a worker process, results (failures, errors,
-and specially handled exceptions like SkipTest) are collected in that
-process. When the worker process finishes, it returns results to the main
-nose process. There, any progress output is printed (dots!), and the
-results from the test run are combined into a consolidated result
-set. When results have been received for all dispatched tests, or all
-workers have died, the result summary is output as normal.
-
-Beware!
-=======
-
-Not all test suites will benefit from, or even operate correctly using, this
-plugin. For example, CPU-bound tests will run more slowly if you don't have
-multiple processors. There are also some differences in plugin
-interactions and behaviors due to the way in which tests are dispatched and
-loaded. In general, test loading under this plugin operates as if it were
-always in directed mode instead of discovered mode. For instance, doctests
-in test modules will always be found when using this plugin with the doctest
-plugin.
-
-But the biggest issue you will face is probably concurrency. Unless you
-have kept your tests as religiously pure unit tests, with no side-effects, no
-ordering issues, and no external dependencies, chances are you will experience
-odd, intermittent and unexplainable failures and errors when using this
-plugin. This doesn't necessarily mean the plugin is broken; it may mean that
-your test suite is not safe for concurrency.
-
-New Features in 1.1.0
-=====================
-
-* functions generated by test generators are now added to the worker queue
- making them multi-threaded.
-* fixed timeout functionality, now functions will be terminated with a
- TimedOutException exception when they exceed their execution time. The
- worker processes are not terminated.
-* added ``--process-restartworker`` option to restart workers once they are
- done, this helps control memory usage. Sometimes memory leaks can accumulate
- making long runs very difficult.
-* added global _instantiate_plugins to configure which plugins are started
- on the worker processes.
-
-"""
-
-import logging
-import os
-import sys
-import time
-import traceback
-import unittest
-import pickle
-import signal
-import nose.case
-from nose.core import TextTestRunner
-from nose import failure
-from nose import loader
-from nose.plugins.base import Plugin
-from nose.pyversion import bytes_
-from nose.result import TextTestResult
-from nose.suite import ContextSuite
-from nose.util import test_address
-try:
- # 2.7+
- from unittest.runner import _WritelnDecorator
-except ImportError:
- from unittest import _WritelnDecorator
-from Queue import Empty
-from warnings import warn
-try:
- from cStringIO import StringIO
-except ImportError:
- import StringIO
-
-# this is a list of plugin classes that will be checked for and created inside
-# each worker process
-_instantiate_plugins = None
-
-log = logging.getLogger(__name__)
-
-Process = Queue = Pool = Event = Value = Array = None
-
-# have to inherit KeyboardInterrupt to it will interrupt process properly
-class TimedOutException(KeyboardInterrupt):
- def __init__(self, value = "Timed Out"):
- self.value = value
- def __str__(self):
- return repr(self.value)
-
-def _import_mp():
- global Process, Queue, Pool, Event, Value, Array
- try:
- from multiprocessing import Manager, Process
- #prevent the server process created in the manager which holds Python
- #objects and allows other processes to manipulate them using proxies
- #to interrupt on SIGINT (keyboardinterrupt) so that the communication
- #channel between subprocesses and main process is still usable after
- #ctrl+C is received in the main process.
- old=signal.signal(signal.SIGINT, signal.SIG_IGN)
- m = Manager()
- #reset it back so main process will receive a KeyboardInterrupt
- #exception on ctrl+c
- signal.signal(signal.SIGINT, old)
- Queue, Pool, Event, Value, Array = (
- m.Queue, m.Pool, m.Event, m.Value, m.Array
- )
- except ImportError:
- warn("multiprocessing module is not available, multiprocess plugin "
- "cannot be used", RuntimeWarning)
-
-
-class TestLet:
- def __init__(self, case):
- try:
- self._id = case.id()
- except AttributeError:
- pass
- self._short_description = case.shortDescription()
- self._str = str(case)
-
- def id(self):
- return self._id
-
- def shortDescription(self):
- return self._short_description
-
- def __str__(self):
- return self._str
-
-class MultiProcess(Plugin):
- """
- Run tests in multiple processes. Requires processing module.
- """
- score = 1000
- status = {}
-
- def options(self, parser, env):
- """
- Register command-line options.
- """
- parser.add_option("--processes", action="store",
- default=env.get('NOSE_PROCESSES', 0),
- dest="multiprocess_workers",
- metavar="NUM",
- help="Spread test run among this many processes. "
- "Set a number equal to the number of processors "
- "or cores in your machine for best results. "
- "Pass a negative number to have the number of "
- "processes automatically set to the number of "
- "cores. Passing 0 means to disable parallel "
- "testing. Default is 0 unless NOSE_PROCESSES is "
- "set. "
- "[NOSE_PROCESSES]")
- parser.add_option("--process-timeout", action="store",
- default=env.get('NOSE_PROCESS_TIMEOUT', 10),
- dest="multiprocess_timeout",
- metavar="SECONDS",
- help="Set timeout for return of results from each "
- "test runner process. Default is 10. "
- "[NOSE_PROCESS_TIMEOUT]")
- parser.add_option("--process-restartworker", action="store_true",
- default=env.get('NOSE_PROCESS_RESTARTWORKER', False),
- dest="multiprocess_restartworker",
- help="If set, will restart each worker process once"
- " their tests are done, this helps control memory "
- "leaks from killing the system. "
- "[NOSE_PROCESS_RESTARTWORKER]")
-
- def configure(self, options, config):
- """
- Configure plugin.
- """
- try:
- self.status.pop('active')
- except KeyError:
- pass
- if not hasattr(options, 'multiprocess_workers'):
- self.enabled = False
- return
- # don't start inside of a worker process
- if config.worker:
- return
- self.config = config
- try:
- workers = int(options.multiprocess_workers)
- except (TypeError, ValueError):
- workers = 0
- if workers:
- _import_mp()
- if Process is None:
- self.enabled = False
- return
- # Negative number of workers will cause multiprocessing to hang.
- # Set the number of workers to the CPU count to avoid this.
- if workers < 0:
- try:
- import multiprocessing
- workers = multiprocessing.cpu_count()
- except NotImplementedError:
- self.enabled = False
- return
- self.enabled = True
- self.config.multiprocess_workers = workers
- t = float(options.multiprocess_timeout)
- self.config.multiprocess_timeout = t
- r = int(options.multiprocess_restartworker)
- self.config.multiprocess_restartworker = r
- self.status['active'] = True
-
- def prepareTestLoader(self, loader):
- """Remember loader class so MultiProcessTestRunner can instantiate
- the right loader.
- """
- self.loaderClass = loader.__class__
-
- def prepareTestRunner(self, runner):
- """Replace test runner with MultiProcessTestRunner.
- """
- # replace with our runner class
- return MultiProcessTestRunner(stream=runner.stream,
- verbosity=self.config.verbosity,
- config=self.config,
- loaderClass=self.loaderClass)
-
-def signalhandler(sig, frame):
- raise TimedOutException()
-
-class MultiProcessTestRunner(TextTestRunner):
- waitkilltime = 5.0 # max time to wait to terminate a process that does not
- # respond to SIGILL
- def __init__(self, **kw):
- self.loaderClass = kw.pop('loaderClass', loader.defaultTestLoader)
- super(MultiProcessTestRunner, self).__init__(**kw)
-
- def collect(self, test, testQueue, tasks, to_teardown, result):
- # dispatch and collect results
- # put indexes only on queue because tests aren't picklable
- for case in self.nextBatch(test):
- log.debug("Next batch %s (%s)", case, type(case))
- if (isinstance(case, nose.case.Test) and
- isinstance(case.test, failure.Failure)):
- log.debug("Case is a Failure")
- case(result) # run here to capture the failure
- continue
- # handle shared fixtures
- if isinstance(case, ContextSuite) and case.context is failure.Failure:
- log.debug("Case is a Failure")
- case(result) # run here to capture the failure
- continue
- elif isinstance(case, ContextSuite) and self.sharedFixtures(case):
- log.debug("%s has shared fixtures", case)
- try:
- case.setUp()
- except (KeyboardInterrupt, SystemExit):
- raise
- except:
- log.debug("%s setup failed", sys.exc_info())
- result.addError(case, sys.exc_info())
- else:
- to_teardown.append(case)
- if case.factory:
- ancestors=case.factory.context.get(case, [])
- for an in ancestors[:2]:
- #log.debug('reset ancestor %s', an)
- if getattr(an, '_multiprocess_shared_', False):
- an._multiprocess_can_split_=True
- #an._multiprocess_shared_=False
- self.collect(case, testQueue, tasks, to_teardown, result)
-
- else:
- test_addr = self.addtask(testQueue,tasks,case)
- log.debug("Queued test %s (%s) to %s",
- len(tasks), test_addr, testQueue)
-
- def startProcess(self, iworker, testQueue, resultQueue, shouldStop, result):
- currentaddr = Value('c',bytes_(''))
- currentstart = Value('d',time.time())
- keyboardCaught = Event()
- p = Process(target=runner,
- args=(iworker, testQueue,
- resultQueue,
- currentaddr,
- currentstart,
- keyboardCaught,
- shouldStop,
- self.loaderClass,
- result.__class__,
- pickle.dumps(self.config)))
- p.currentaddr = currentaddr
- p.currentstart = currentstart
- p.keyboardCaught = keyboardCaught
- old = signal.signal(signal.SIGILL, signalhandler)
- p.start()
- signal.signal(signal.SIGILL, old)
- return p
-
- def run(self, test):
- """
- Execute the test (which may be a test suite). If the test is a suite,
- distribute it out among as many processes as have been configured, at
- as fine a level as is possible given the context fixtures defined in
- the suite or any sub-suites.
-
- """
- log.debug("%s.run(%s) (%s)", self, test, os.getpid())
- wrapper = self.config.plugins.prepareTest(test)
- if wrapper is not None:
- test = wrapper
-
- # plugins can decorate or capture the output stream
- wrapped = self.config.plugins.setOutputStream(self.stream)
- if wrapped is not None:
- self.stream = wrapped
-
- testQueue = Queue()
- resultQueue = Queue()
- tasks = []
- completed = []
- workers = []
- to_teardown = []
- shouldStop = Event()
-
- result = self._makeResult()
- start = time.time()
-
- self.collect(test, testQueue, tasks, to_teardown, result)
-
- log.debug("Starting %s workers", self.config.multiprocess_workers)
- for i in range(self.config.multiprocess_workers):
- p = self.startProcess(i, testQueue, resultQueue, shouldStop, result)
- workers.append(p)
- log.debug("Started worker process %s", i+1)
-
- total_tasks = len(tasks)
- # need to keep track of the next time to check for timeouts in case
- # more than one process times out at the same time.
- nexttimeout=self.config.multiprocess_timeout
- thrownError = None
-
- try:
- while tasks:
- log.debug("Waiting for results (%s/%s tasks), next timeout=%.3fs",
- len(completed), total_tasks,nexttimeout)
- try:
- iworker, addr, newtask_addrs, batch_result = resultQueue.get(
- timeout=nexttimeout)
- log.debug('Results received for worker %d, %s, new tasks: %d',
- iworker,addr,len(newtask_addrs))
- try:
- try:
- tasks.remove(addr)
- except ValueError:
- log.warn('worker %s failed to remove from tasks: %s',
- iworker,addr)
- total_tasks += len(newtask_addrs)
- tasks.extend(newtask_addrs)
- except KeyError:
- log.debug("Got result for unknown task? %s", addr)
- log.debug("current: %s",str(list(tasks)[0]))
- else:
- completed.append([addr,batch_result])
- self.consolidate(result, batch_result)
- if (self.config.stopOnError
- and not result.wasSuccessful()):
- # set the stop condition
- shouldStop.set()
- break
- if self.config.multiprocess_restartworker:
- log.debug('joining worker %s',iworker)
- # wait for working, but not that important if worker
- # cannot be joined in fact, for workers that add to
- # testQueue, they will not terminate until all their
- # items are read
- workers[iworker].join(timeout=1)
- if not shouldStop.is_set() and not testQueue.empty():
- log.debug('starting new process on worker %s',iworker)
- workers[iworker] = self.startProcess(iworker, testQueue, resultQueue, shouldStop, result)
- except Empty:
- log.debug("Timed out with %s tasks pending "
- "(empty testQueue=%r): %s",
- len(tasks),testQueue.empty(),str(tasks))
- any_alive = False
- for iworker, w in enumerate(workers):
- if w.is_alive():
- worker_addr = bytes_(w.currentaddr.value,'ascii')
- timeprocessing = time.time() - w.currentstart.value
- if ( len(worker_addr) == 0
- and timeprocessing > self.config.multiprocess_timeout-0.1):
- log.debug('worker %d has finished its work item, '
- 'but is not exiting? do we wait for it?',
- iworker)
- else:
- any_alive = True
- if (len(worker_addr) > 0
- and timeprocessing > self.config.multiprocess_timeout-0.1):
- log.debug('timed out worker %s: %s',
- iworker,worker_addr)
- w.currentaddr.value = bytes_('')
- # If the process is in C++ code, sending a SIGILL
- # might not send a python KeybordInterrupt exception
- # therefore, send multiple signals until an
- # exception is caught. If this takes too long, then
- # terminate the process
- w.keyboardCaught.clear()
- startkilltime = time.time()
- while not w.keyboardCaught.is_set() and w.is_alive():
- if time.time()-startkilltime > self.waitkilltime:
- # have to terminate...
- log.error("terminating worker %s",iworker)
- w.terminate()
- # there is a small probability that the
- # terminated process might send a result,
- # which has to be specially handled or
- # else processes might get orphaned.
- workers[iworker] = w = self.startProcess(iworker, testQueue, resultQueue, shouldStop, result)
- break
- os.kill(w.pid, signal.SIGILL)
- time.sleep(0.1)
- if not any_alive and testQueue.empty():
- log.debug("All workers dead")
- break
- nexttimeout=self.config.multiprocess_timeout
- for w in workers:
- if w.is_alive() and len(w.currentaddr.value) > 0:
- timeprocessing = time.time()-w.currentstart.value
- if timeprocessing <= self.config.multiprocess_timeout:
- nexttimeout = min(nexttimeout,
- self.config.multiprocess_timeout-timeprocessing)
- log.debug("Completed %s tasks (%s remain)", len(completed), len(tasks))
-
- except (KeyboardInterrupt, SystemExit), e:
- log.info('parent received ctrl-c when waiting for test results')
- thrownError = e
- #resultQueue.get(False)
-
- result.addError(test, sys.exc_info())
-
- try:
- for case in to_teardown:
- log.debug("Tearing down shared fixtures for %s", case)
- try:
- case.tearDown()
- except (KeyboardInterrupt, SystemExit):
- raise
- except:
- result.addError(case, sys.exc_info())
-
- stop = time.time()
-
- # first write since can freeze on shutting down processes
- result.printErrors()
- result.printSummary(start, stop)
- self.config.plugins.finalize(result)
-
- if thrownError is None:
- log.debug("Tell all workers to stop")
- for w in workers:
- if w.is_alive():
- testQueue.put('STOP', block=False)
-
- # wait for the workers to end
- for iworker,worker in enumerate(workers):
- if worker.is_alive():
- log.debug('joining worker %s',iworker)
- worker.join()
- if worker.is_alive():
- log.debug('failed to join worker %s',iworker)
- except (KeyboardInterrupt, SystemExit):
- log.info('parent received ctrl-c when shutting down: stop all processes')
- for worker in workers:
- if worker.is_alive():
- worker.terminate()
-
- if thrownError: raise thrownError
- else: raise
-
- return result
-
- def addtask(testQueue,tasks,case):
- arg = None
- if isinstance(case,nose.case.Test) and hasattr(case.test,'arg'):
- # this removes the top level descriptor and allows real function
- # name to be returned
- case.test.descriptor = None
- arg = case.test.arg
- test_addr = MultiProcessTestRunner.address(case)
- testQueue.put((test_addr,arg), block=False)
- if arg is not None:
- test_addr += str(arg)
- if tasks is not None:
- tasks.append(test_addr)
- return test_addr
- addtask = staticmethod(addtask)
-
- def address(case):
- if hasattr(case, 'address'):
- file, mod, call = case.address()
- elif hasattr(case, 'context'):
- file, mod, call = test_address(case.context)
- else:
- raise Exception("Unable to convert %s to address" % case)
- parts = []
- if file is None:
- if mod is None:
- raise Exception("Unaddressable case %s" % case)
- else:
- parts.append(mod)
- else:
- # strip __init__.py(c) from end of file part
- # if present, having it there confuses loader
- dirname, basename = os.path.split(file)
- if basename.startswith('__init__'):
- file = dirname
- parts.append(file)
- if call is not None:
- parts.append(call)
- return ':'.join(map(str, parts))
- address = staticmethod(address)
-
- def nextBatch(self, test):
- # allows tests or suites to mark themselves as not safe
- # for multiprocess execution
- if hasattr(test, 'context'):
- if not getattr(test.context, '_multiprocess_', True):
- return
-
- if ((isinstance(test, ContextSuite)
- and test.hasFixtures(self.checkCanSplit))
- or not getattr(test, 'can_split', True)
- or not isinstance(test, unittest.TestSuite)):
- # regular test case, or a suite with context fixtures
-
- # special case: when run like nosetests path/to/module.py
- # the top-level suite has only one item, and it shares
- # the same context as that item. In that case, we want the
- # item, not the top-level suite
- if isinstance(test, ContextSuite):
- contained = list(test)
- if (len(contained) == 1
- and getattr(contained[0],
- 'context', None) == test.context):
- test = contained[0]
- yield test
- else:
- # Suite is without fixtures at this level; but it may have
- # fixtures at any deeper level, so we need to examine it all
- # the way down to the case level
- for case in test:
- for batch in self.nextBatch(case):
- yield batch
-
- def checkCanSplit(context, fixt):
- """
- Callback that we use to check whether the fixtures found in a
- context or ancestor are ones we care about.
-
- Contexts can tell us that their fixtures are reentrant by setting
- _multiprocess_can_split_. So if we see that, we return False to
- disregard those fixtures.
- """
- if not fixt:
- return False
- if getattr(context, '_multiprocess_can_split_', False):
- return False
- return True
- checkCanSplit = staticmethod(checkCanSplit)
-
- def sharedFixtures(self, case):
- context = getattr(case, 'context', None)
- if not context:
- return False
- return getattr(context, '_multiprocess_shared_', False)
-
- def consolidate(self, result, batch_result):
- log.debug("batch result is %s" , batch_result)
- try:
- output, testsRun, failures, errors, errorClasses = batch_result
- except ValueError:
- log.debug("result in unexpected format %s", batch_result)
- failure.Failure(*sys.exc_info())(result)
- return
- self.stream.write(output)
- result.testsRun += testsRun
- result.failures.extend(failures)
- result.errors.extend(errors)
- for key, (storage, label, isfail) in errorClasses.items():
- if key not in result.errorClasses:
- # Ordinarily storage is result attribute
- # but it's only processed through the errorClasses
- # dict, so it's ok to fake it here
- result.errorClasses[key] = ([], label, isfail)
- mystorage, _junk, _junk = result.errorClasses[key]
- mystorage.extend(storage)
- log.debug("Ran %s tests (total: %s)", testsRun, result.testsRun)
-
-
-def runner(ix, testQueue, resultQueue, currentaddr, currentstart,
- keyboardCaught, shouldStop, loaderClass, resultClass, config):
- try:
- try:
- return __runner(ix, testQueue, resultQueue, currentaddr, currentstart,
- keyboardCaught, shouldStop, loaderClass, resultClass, config)
- except KeyboardInterrupt:
- log.debug('Worker %s keyboard interrupt, stopping',ix)
- except Empty:
- log.debug("Worker %s timed out waiting for tasks", ix)
-
-def __runner(ix, testQueue, resultQueue, currentaddr, currentstart,
- keyboardCaught, shouldStop, loaderClass, resultClass, config):
-
- config = pickle.loads(config)
- dummy_parser = config.parserClass()
- if _instantiate_plugins is not None:
- for pluginclass in _instantiate_plugins:
- plugin = pluginclass()
- plugin.addOptions(dummy_parser,{})
- config.plugins.addPlugin(plugin)
- config.plugins.configure(config.options,config)
- config.plugins.begin()
- log.debug("Worker %s executing, pid=%d", ix,os.getpid())
- loader = loaderClass(config=config)
- loader.suiteClass.suiteClass = NoSharedFixtureContextSuite
-
- def get():
- return testQueue.get(timeout=config.multiprocess_timeout)
-
- def makeResult():
- stream = _WritelnDecorator(StringIO())
- result = resultClass(stream, descriptions=1,
- verbosity=config.verbosity,
- config=config)
- plug_result = config.plugins.prepareTestResult(result)
- if plug_result:
- return plug_result
- return result
-
- def batch(result):
- failures = [(TestLet(c), err) for c, err in result.failures]
- errors = [(TestLet(c), err) for c, err in result.errors]
- errorClasses = {}
- for key, (storage, label, isfail) in result.errorClasses.items():
- errorClasses[key] = ([(TestLet(c), err) for c, err in storage],
- label, isfail)
- return (
- result.stream.getvalue(),
- result.testsRun,
- failures,
- errors,
- errorClasses)
- for test_addr, arg in iter(get, 'STOP'):
- if shouldStop.is_set():
- log.exception('Worker %d STOPPED',ix)
- break
- result = makeResult()
- test = loader.loadTestsFromNames([test_addr])
- test.testQueue = testQueue
- test.tasks = []
- test.arg = arg
- log.debug("Worker %s Test is %s (%s)", ix, test_addr, test)
- try:
- if arg is not None:
- test_addr = test_addr + str(arg)
- currentaddr.value = bytes_(test_addr)
- currentstart.value = time.time()
- test(result)
- currentaddr.value = bytes_('')
- resultQueue.put((ix, test_addr, test.tasks, batch(result)))
- except KeyboardInterrupt, e: #TimedOutException:
- timeout = isinstance(e, TimedOutException)
- if timeout:
- keyboardCaught.set()
- if len(currentaddr.value):
- if timeout:
- msg = 'Worker %s timed out, failing current test %s'
- else:
- msg = 'Worker %s keyboard interrupt, failing current test %s'
- log.exception(msg,ix,test_addr)
- currentaddr.value = bytes_('')
- failure.Failure(*sys.exc_info())(result)
- resultQueue.put((ix, test_addr, test.tasks, batch(result)))
- else:
- if timeout:
- msg = 'Worker %s test %s timed out'
- else:
- msg = 'Worker %s test %s keyboard interrupt'
- log.debug(msg,ix,test_addr)
- resultQueue.put((ix, test_addr, test.tasks, batch(result)))
- if not timeout:
- raise
- except SystemExit:
- currentaddr.value = bytes_('')
- log.exception('Worker %s system exit',ix)
- raise
- except:
- currentaddr.value = bytes_('')
- log.exception("Worker %s error running test or returning "
- "results",ix)
- failure.Failure(*sys.exc_info())(result)
- resultQueue.put((ix, test_addr, test.tasks, batch(result)))
- if config.multiprocess_restartworker:
- break
- log.debug("Worker %s ending", ix)
-
-
-class NoSharedFixtureContextSuite(ContextSuite):
- """
- Context suite that never fires shared fixtures.
-
- When a context sets _multiprocess_shared_, fixtures in that context
- are executed by the main process. Using this suite class prevents them
- from executing in the runner process as well.
-
- """
- testQueue = None
- tasks = None
- arg = None
- def setupContext(self, context):
- if getattr(context, '_multiprocess_shared_', False):
- return
- super(NoSharedFixtureContextSuite, self).setupContext(context)
-
- def teardownContext(self, context):
- if getattr(context, '_multiprocess_shared_', False):
- return
- super(NoSharedFixtureContextSuite, self).teardownContext(context)
- def run(self, result):
- """Run tests in suite inside of suite fixtures.
- """
- # proxy the result for myself
- log.debug("suite %s (%s) run called, tests: %s",
- id(self), self, self._tests)
- if self.resultProxy:
- result, orig = self.resultProxy(result, self), result
- else:
- result, orig = result, result
- try:
- #log.debug('setUp for %s', id(self));
- self.setUp()
- except KeyboardInterrupt:
- raise
- except:
- self.error_context = 'setup'
- result.addError(self, self._exc_info())
- return
- try:
- for test in self._tests:
- if (isinstance(test,nose.case.Test)
- and self.arg is not None):
- test.test.arg = self.arg
- else:
- test.arg = self.arg
- test.testQueue = self.testQueue
- test.tasks = self.tasks
- if result.shouldStop:
- log.debug("stopping")
- break
- # each nose.case.Test will create its own result proxy
- # so the cases need the original result, to avoid proxy
- # chains
- #log.debug('running test %s in suite %s', test, self);
- try:
- test(orig)
- except KeyboardInterrupt, e:
- timeout = isinstance(e, TimedOutException)
- if timeout:
- msg = 'Timeout when running test %s in suite %s'
- else:
- msg = 'KeyboardInterrupt when running test %s in suite %s'
- log.debug(msg, test, self)
- err = (TimedOutException,TimedOutException(str(test)),
- sys.exc_info()[2])
- test.config.plugins.addError(test,err)
- orig.addError(test,err)
- if not timeout:
- raise
- finally:
- self.has_run = True
- try:
- #log.debug('tearDown for %s', id(self));
- self.tearDown()
- except KeyboardInterrupt:
- raise
- except:
- self.error_context = 'teardown'
- result.addError(self, self._exc_info())
diff --git a/lib/spack/external/nose/plugins/plugintest.py b/lib/spack/external/nose/plugins/plugintest.py
deleted file mode 100644
index 76d0d2c48c..0000000000
--- a/lib/spack/external/nose/plugins/plugintest.py
+++ /dev/null
@@ -1,416 +0,0 @@
-"""
-Testing Plugins
-===============
-
-The plugin interface is well-tested enough to safely unit test your
-use of its hooks with some level of confidence. However, there is also
-a mixin for unittest.TestCase called PluginTester that's designed to
-test plugins in their native runtime environment.
-
-Here's a simple example with a do-nothing plugin and a composed suite.
-
- >>> import unittest
- >>> from nose.plugins import Plugin, PluginTester
- >>> class FooPlugin(Plugin):
- ... pass
- >>> class TestPluginFoo(PluginTester, unittest.TestCase):
- ... activate = '--with-foo'
- ... plugins = [FooPlugin()]
- ... def test_foo(self):
- ... for line in self.output:
- ... # i.e. check for patterns
- ... pass
- ...
- ... # or check for a line containing ...
- ... assert "ValueError" in self.output
- ... def makeSuite(self):
- ... class TC(unittest.TestCase):
- ... def runTest(self):
- ... raise ValueError("I hate foo")
- ... return [TC('runTest')]
- ...
- >>> res = unittest.TestResult()
- >>> case = TestPluginFoo('test_foo')
- >>> _ = case(res)
- >>> res.errors
- []
- >>> res.failures
- []
- >>> res.wasSuccessful()
- True
- >>> res.testsRun
- 1
-
-And here is a more complex example of testing a plugin that has extra
-arguments and reads environment variables.
-
- >>> import unittest, os
- >>> from nose.plugins import Plugin, PluginTester
- >>> class FancyOutputter(Plugin):
- ... name = "fancy"
- ... def configure(self, options, conf):
- ... Plugin.configure(self, options, conf)
- ... if not self.enabled:
- ... return
- ... self.fanciness = 1
- ... if options.more_fancy:
- ... self.fanciness = 2
- ... if 'EVEN_FANCIER' in self.env:
- ... self.fanciness = 3
- ...
- ... def options(self, parser, env=os.environ):
- ... self.env = env
- ... parser.add_option('--more-fancy', action='store_true')
- ... Plugin.options(self, parser, env=env)
- ...
- ... def report(self, stream):
- ... stream.write("FANCY " * self.fanciness)
- ...
- >>> class TestFancyOutputter(PluginTester, unittest.TestCase):
- ... activate = '--with-fancy' # enables the plugin
- ... plugins = [FancyOutputter()]
- ... args = ['--more-fancy']
- ... env = {'EVEN_FANCIER': '1'}
- ...
- ... def test_fancy_output(self):
- ... assert "FANCY FANCY FANCY" in self.output, (
- ... "got: %s" % self.output)
- ... def makeSuite(self):
- ... class TC(unittest.TestCase):
- ... def runTest(self):
- ... raise ValueError("I hate fancy stuff")
- ... return [TC('runTest')]
- ...
- >>> res = unittest.TestResult()
- >>> case = TestFancyOutputter('test_fancy_output')
- >>> _ = case(res)
- >>> res.errors
- []
- >>> res.failures
- []
- >>> res.wasSuccessful()
- True
- >>> res.testsRun
- 1
-
-"""
-
-import re
-import sys
-from warnings import warn
-
-try:
- from cStringIO import StringIO
-except ImportError:
- from StringIO import StringIO
-
-__all__ = ['PluginTester', 'run']
-
-from os import getpid
-class MultiProcessFile(object):
- """
- helper for testing multiprocessing
-
- multiprocessing poses a problem for doctests, since the strategy
- of replacing sys.stdout/stderr with file-like objects then
- inspecting the results won't work: the child processes will
- write to the objects, but the data will not be reflected
- in the parent doctest-ing process.
-
- The solution is to create file-like objects which will interact with
- multiprocessing in a more desirable way.
-
- All processes can write to this object, but only the creator can read.
- This allows the testing system to see a unified picture of I/O.
- """
- def __init__(self):
- # per advice at:
- # http://docs.python.org/library/multiprocessing.html#all-platforms
- self.__master = getpid()
- self.__queue = Manager().Queue()
- self.__buffer = StringIO()
- self.softspace = 0
-
- def buffer(self):
- if getpid() != self.__master:
- return
-
- from Queue import Empty
- from collections import defaultdict
- cache = defaultdict(str)
- while True:
- try:
- pid, data = self.__queue.get_nowait()
- except Empty:
- break
- if pid == ():
- #show parent output after children
- #this is what users see, usually
- pid = ( 1e100, ) # googol!
- cache[pid] += data
- for pid in sorted(cache):
- #self.__buffer.write( '%s wrote: %r\n' % (pid, cache[pid]) ) #DEBUG
- self.__buffer.write( cache[pid] )
- def write(self, data):
- # note that these pids are in the form of current_process()._identity
- # rather than OS pids
- from multiprocessing import current_process
- pid = current_process()._identity
- self.__queue.put((pid, data))
- def __iter__(self):
- "getattr doesn't work for iter()"
- self.buffer()
- return self.__buffer
- def seek(self, offset, whence=0):
- self.buffer()
- return self.__buffer.seek(offset, whence)
- def getvalue(self):
- self.buffer()
- return self.__buffer.getvalue()
- def __getattr__(self, attr):
- return getattr(self.__buffer, attr)
-
-try:
- from multiprocessing import Manager
- Buffer = MultiProcessFile
-except ImportError:
- Buffer = StringIO
-
-class PluginTester(object):
- """A mixin for testing nose plugins in their runtime environment.
-
- Subclass this and mix in unittest.TestCase to run integration/functional
- tests on your plugin. When setUp() is called, the stub test suite is
- executed with your plugin so that during an actual test you can inspect the
- artifacts of how your plugin interacted with the stub test suite.
-
- - activate
-
- - the argument to send nosetests to activate the plugin
-
- - suitepath
-
- - if set, this is the path of the suite to test. Otherwise, you
- will need to use the hook, makeSuite()
-
- - plugins
-
- - the list of plugins to make available during the run. Note
- that this does not mean these plugins will be *enabled* during
- the run -- only the plugins enabled by the activate argument
- or other settings in argv or env will be enabled.
-
- - args
-
- - a list of arguments to add to the nosetests command, in addition to
- the activate argument
-
- - env
-
- - optional dict of environment variables to send nosetests
-
- """
- activate = None
- suitepath = None
- args = None
- env = {}
- argv = None
- plugins = []
- ignoreFiles = None
-
- def makeSuite(self):
- """returns a suite object of tests to run (unittest.TestSuite())
-
- If self.suitepath is None, this must be implemented. The returned suite
- object will be executed with all plugins activated. It may return
- None.
-
- Here is an example of a basic suite object you can return ::
-
- >>> import unittest
- >>> class SomeTest(unittest.TestCase):
- ... def runTest(self):
- ... raise ValueError("Now do something, plugin!")
- ...
- >>> unittest.TestSuite([SomeTest()]) # doctest: +ELLIPSIS
- <unittest...TestSuite tests=[<...SomeTest testMethod=runTest>]>
-
- """
- raise NotImplementedError
-
- def _execPlugin(self):
- """execute the plugin on the internal test suite.
- """
- from nose.config import Config
- from nose.core import TestProgram
- from nose.plugins.manager import PluginManager
-
- suite = None
- stream = Buffer()
- conf = Config(env=self.env,
- stream=stream,
- plugins=PluginManager(plugins=self.plugins))
- if self.ignoreFiles is not None:
- conf.ignoreFiles = self.ignoreFiles
- if not self.suitepath:
- suite = self.makeSuite()
-
- self.nose = TestProgram(argv=self.argv, config=conf, suite=suite,
- exit=False)
- self.output = AccessDecorator(stream)
-
- def setUp(self):
- """runs nosetests with the specified test suite, all plugins
- activated.
- """
- self.argv = ['nosetests', self.activate]
- if self.args:
- self.argv.extend(self.args)
- if self.suitepath:
- self.argv.append(self.suitepath)
-
- self._execPlugin()
-
-
-class AccessDecorator(object):
- stream = None
- _buf = None
- def __init__(self, stream):
- self.stream = stream
- stream.seek(0)
- self._buf = stream.read()
- stream.seek(0)
- def __contains__(self, val):
- return val in self._buf
- def __iter__(self):
- return iter(self.stream)
- def __str__(self):
- return self._buf
-
-
-def blankline_separated_blocks(text):
- "a bunch of === characters is also considered a blank line"
- block = []
- for line in text.splitlines(True):
- block.append(line)
- line = line.strip()
- if not line or line.startswith('===') and not line.strip('='):
- yield "".join(block)
- block = []
- if block:
- yield "".join(block)
-
-
-def remove_stack_traces(out):
- # this regexp taken from Python 2.5's doctest
- traceback_re = re.compile(r"""
- # Grab the traceback header. Different versions of Python have
- # said different things on the first traceback line.
- ^(?P<hdr> Traceback\ \(
- (?: most\ recent\ call\ last
- | innermost\ last
- ) \) :
- )
- \s* $ # toss trailing whitespace on the header.
- (?P<stack> .*?) # don't blink: absorb stuff until...
- ^(?=\w) # a line *starts* with alphanum.
- .*?(?P<exception> \w+ ) # exception name
- (?P<msg> [:\n] .*) # the rest
- """, re.VERBOSE | re.MULTILINE | re.DOTALL)
- blocks = []
- for block in blankline_separated_blocks(out):
- blocks.append(traceback_re.sub(r"\g<hdr>\n...\n\g<exception>\g<msg>", block))
- return "".join(blocks)
-
-
-def simplify_warnings(out):
- warn_re = re.compile(r"""
- # Cut the file and line no, up to the warning name
- ^.*:\d+:\s
- (?P<category>\w+): \s+ # warning category
- (?P<detail>.+) $ \n? # warning message
- ^ .* $ # stack frame
- """, re.VERBOSE | re.MULTILINE)
- return warn_re.sub(r"\g<category>: \g<detail>", out)
-
-
-def remove_timings(out):
- return re.sub(
- r"Ran (\d+ tests?) in [0-9.]+s", r"Ran \1 in ...s", out)
-
-
-def munge_nose_output_for_doctest(out):
- """Modify nose output to make it easy to use in doctests."""
- out = remove_stack_traces(out)
- out = simplify_warnings(out)
- out = remove_timings(out)
- return out.strip()
-
-
-def run(*arg, **kw):
- """
- Specialized version of nose.run for use inside of doctests that
- test test runs.
-
- This version of run() prints the result output to stdout. Before
- printing, the output is processed by replacing the timing
- information with an ellipsis (...), removing traceback stacks, and
- removing trailing whitespace.
-
- Use this version of run wherever you are writing a doctest that
- tests nose (or unittest) test result output.
-
- Note: do not use doctest: +ELLIPSIS when testing nose output,
- since ellipses ("test_foo ... ok") in your expected test runner
- output may match multiple lines of output, causing spurious test
- passes!
- """
- from nose import run
- from nose.config import Config
- from nose.plugins.manager import PluginManager
-
- buffer = Buffer()
- if 'config' not in kw:
- plugins = kw.pop('plugins', [])
- if isinstance(plugins, list):
- plugins = PluginManager(plugins=plugins)
- env = kw.pop('env', {})
- kw['config'] = Config(env=env, plugins=plugins)
- if 'argv' not in kw:
- kw['argv'] = ['nosetests', '-v']
- kw['config'].stream = buffer
-
- # Set up buffering so that all output goes to our buffer,
- # or warn user if deprecated behavior is active. If this is not
- # done, prints and warnings will either be out of place or
- # disappear.
- stderr = sys.stderr
- stdout = sys.stdout
- if kw.pop('buffer_all', False):
- sys.stdout = sys.stderr = buffer
- restore = True
- else:
- restore = False
- warn("The behavior of nose.plugins.plugintest.run() will change in "
- "the next release of nose. The current behavior does not "
- "correctly account for output to stdout and stderr. To enable "
- "correct behavior, use run_buffered() instead, or pass "
- "the keyword argument buffer_all=True to run().",
- DeprecationWarning, stacklevel=2)
- try:
- run(*arg, **kw)
- finally:
- if restore:
- sys.stderr = stderr
- sys.stdout = stdout
- out = buffer.getvalue()
- print munge_nose_output_for_doctest(out)
-
-
-def run_buffered(*arg, **kw):
- kw['buffer_all'] = True
- run(*arg, **kw)
-
-if __name__ == '__main__':
- import doctest
- doctest.testmod()
diff --git a/lib/spack/external/nose/plugins/prof.py b/lib/spack/external/nose/plugins/prof.py
deleted file mode 100644
index 4d304a934b..0000000000
--- a/lib/spack/external/nose/plugins/prof.py
+++ /dev/null
@@ -1,154 +0,0 @@
-"""This plugin will run tests using the hotshot profiler, which is part
-of the standard library. To turn it on, use the ``--with-profile`` option
-or set the NOSE_WITH_PROFILE environment variable. Profiler output can be
-controlled with the ``--profile-sort`` and ``--profile-restrict`` options,
-and the profiler output file may be changed with ``--profile-stats-file``.
-
-See the `hotshot documentation`_ in the standard library documentation for
-more details on the various output options.
-
-.. _hotshot documentation: http://docs.python.org/library/hotshot.html
-"""
-
-try:
- import hotshot
- from hotshot import stats
-except ImportError:
- hotshot, stats = None, None
-import logging
-import os
-import sys
-import tempfile
-from nose.plugins.base import Plugin
-from nose.util import tolist
-
-log = logging.getLogger('nose.plugins')
-
-class Profile(Plugin):
- """
- Use this plugin to run tests using the hotshot profiler.
- """
- pfile = None
- clean_stats_file = False
- def options(self, parser, env):
- """Register commandline options.
- """
- if not self.available():
- return
- Plugin.options(self, parser, env)
- parser.add_option('--profile-sort', action='store', dest='profile_sort',
- default=env.get('NOSE_PROFILE_SORT', 'cumulative'),
- metavar="SORT",
- help="Set sort order for profiler output")
- parser.add_option('--profile-stats-file', action='store',
- dest='profile_stats_file',
- metavar="FILE",
- default=env.get('NOSE_PROFILE_STATS_FILE'),
- help='Profiler stats file; default is a new '
- 'temp file on each run')
- parser.add_option('--profile-restrict', action='append',
- dest='profile_restrict',
- metavar="RESTRICT",
- default=env.get('NOSE_PROFILE_RESTRICT'),
- help="Restrict profiler output. See help for "
- "pstats.Stats for details")
-
- def available(cls):
- return hotshot is not None
- available = classmethod(available)
-
- def begin(self):
- """Create profile stats file and load profiler.
- """
- if not self.available():
- return
- self._create_pfile()
- self.prof = hotshot.Profile(self.pfile)
-
- def configure(self, options, conf):
- """Configure plugin.
- """
- if not self.available():
- self.enabled = False
- return
- Plugin.configure(self, options, conf)
- self.conf = conf
- if options.profile_stats_file:
- self.pfile = options.profile_stats_file
- self.clean_stats_file = False
- else:
- self.pfile = None
- self.clean_stats_file = True
- self.fileno = None
- self.sort = options.profile_sort
- self.restrict = tolist(options.profile_restrict)
-
- def prepareTest(self, test):
- """Wrap entire test run in :func:`prof.runcall`.
- """
- if not self.available():
- return
- log.debug('preparing test %s' % test)
- def run_and_profile(result, prof=self.prof, test=test):
- self._create_pfile()
- prof.runcall(test, result)
- return run_and_profile
-
- def report(self, stream):
- """Output profiler report.
- """
- log.debug('printing profiler report')
- self.prof.close()
- prof_stats = stats.load(self.pfile)
- prof_stats.sort_stats(self.sort)
-
- # 2.5 has completely different stream handling from 2.4 and earlier.
- # Before 2.5, stats objects have no stream attribute; in 2.5 and later
- # a reference sys.stdout is stored before we can tweak it.
- compat_25 = hasattr(prof_stats, 'stream')
- if compat_25:
- tmp = prof_stats.stream
- prof_stats.stream = stream
- else:
- tmp = sys.stdout
- sys.stdout = stream
- try:
- if self.restrict:
- log.debug('setting profiler restriction to %s', self.restrict)
- prof_stats.print_stats(*self.restrict)
- else:
- prof_stats.print_stats()
- finally:
- if compat_25:
- prof_stats.stream = tmp
- else:
- sys.stdout = tmp
-
- def finalize(self, result):
- """Clean up stats file, if configured to do so.
- """
- if not self.available():
- return
- try:
- self.prof.close()
- except AttributeError:
- # TODO: is this trying to catch just the case where not
- # hasattr(self.prof, "close")? If so, the function call should be
- # moved out of the try: suite.
- pass
- if self.clean_stats_file:
- if self.fileno:
- try:
- os.close(self.fileno)
- except OSError:
- pass
- try:
- os.unlink(self.pfile)
- except OSError:
- pass
- return None
-
- def _create_pfile(self):
- if not self.pfile:
- self.fileno, self.pfile = tempfile.mkstemp()
- self.clean_stats_file = True
diff --git a/lib/spack/external/nose/plugins/skip.py b/lib/spack/external/nose/plugins/skip.py
deleted file mode 100644
index 9d1ac8f604..0000000000
--- a/lib/spack/external/nose/plugins/skip.py
+++ /dev/null
@@ -1,63 +0,0 @@
-"""
-This plugin installs a SKIP error class for the SkipTest exception.
-When SkipTest is raised, the exception will be logged in the skipped
-attribute of the result, 'S' or 'SKIP' (verbose) will be output, and
-the exception will not be counted as an error or failure. This plugin
-is enabled by default but may be disabled with the ``--no-skip`` option.
-"""
-
-from nose.plugins.errorclass import ErrorClass, ErrorClassPlugin
-
-
-# on SkipTest:
-# - unittest SkipTest is first preference, but it's only available
-# for >= 2.7
-# - unittest2 SkipTest is second preference for older pythons. This
-# mirrors logic for choosing SkipTest exception in testtools
-# - if none of the above, provide custom class
-try:
- from unittest.case import SkipTest
-except ImportError:
- try:
- from unittest2.case import SkipTest
- except ImportError:
- class SkipTest(Exception):
- """Raise this exception to mark a test as skipped.
- """
- pass
-
-
-class Skip(ErrorClassPlugin):
- """
- Plugin that installs a SKIP error class for the SkipTest
- exception. When SkipTest is raised, the exception will be logged
- in the skipped attribute of the result, 'S' or 'SKIP' (verbose)
- will be output, and the exception will not be counted as an error
- or failure.
- """
- enabled = True
- skipped = ErrorClass(SkipTest,
- label='SKIP',
- isfailure=False)
-
- def options(self, parser, env):
- """
- Add my options to command line.
- """
- env_opt = 'NOSE_WITHOUT_SKIP'
- parser.add_option('--no-skip', action='store_true',
- dest='noSkip', default=env.get(env_opt, False),
- help="Disable special handling of SkipTest "
- "exceptions.")
-
- def configure(self, options, conf):
- """
- Configure plugin. Skip plugin is enabled by default.
- """
- if not self.can_configure:
- return
- self.conf = conf
- disable = getattr(options, 'noSkip', False)
- if disable:
- self.enabled = False
-
diff --git a/lib/spack/external/nose/plugins/testid.py b/lib/spack/external/nose/plugins/testid.py
deleted file mode 100644
index ae8119bd01..0000000000
--- a/lib/spack/external/nose/plugins/testid.py
+++ /dev/null
@@ -1,311 +0,0 @@
-"""
-This plugin adds a test id (like #1) to each test name output. After
-you've run once to generate test ids, you can re-run individual
-tests by activating the plugin and passing the ids (with or
-without the # prefix) instead of test names.
-
-For example, if your normal test run looks like::
-
- % nosetests -v
- tests.test_a ... ok
- tests.test_b ... ok
- tests.test_c ... ok
-
-When adding ``--with-id`` you'll see::
-
- % nosetests -v --with-id
- #1 tests.test_a ... ok
- #2 tests.test_b ... ok
- #3 tests.test_c ... ok
-
-Then you can re-run individual tests by supplying just an id number::
-
- % nosetests -v --with-id 2
- #2 tests.test_b ... ok
-
-You can also pass multiple id numbers::
-
- % nosetests -v --with-id 2 3
- #2 tests.test_b ... ok
- #3 tests.test_c ... ok
-
-Since most shells consider '#' a special character, you can leave it out when
-specifying a test id.
-
-Note that when run without the -v switch, no special output is displayed, but
-the ids file is still written.
-
-Looping over failed tests
--------------------------
-
-This plugin also adds a mode that will direct the test runner to record
-failed tests. Subsequent test runs will then run only the tests that failed
-last time. Activate this mode with the ``--failed`` switch::
-
- % nosetests -v --failed
- #1 test.test_a ... ok
- #2 test.test_b ... ERROR
- #3 test.test_c ... FAILED
- #4 test.test_d ... ok
-
-On the second run, only tests #2 and #3 will run::
-
- % nosetests -v --failed
- #2 test.test_b ... ERROR
- #3 test.test_c ... FAILED
-
-As you correct errors and tests pass, they'll drop out of subsequent runs.
-
-First::
-
- % nosetests -v --failed
- #2 test.test_b ... ok
- #3 test.test_c ... FAILED
-
-Second::
-
- % nosetests -v --failed
- #3 test.test_c ... FAILED
-
-When all tests pass, the full set will run on the next invocation.
-
-First::
-
- % nosetests -v --failed
- #3 test.test_c ... ok
-
-Second::
-
- % nosetests -v --failed
- #1 test.test_a ... ok
- #2 test.test_b ... ok
- #3 test.test_c ... ok
- #4 test.test_d ... ok
-
-.. note ::
-
- If you expect to use ``--failed`` regularly, it's a good idea to always run
- using the ``--with-id`` option. This will ensure that an id file is always
- created, allowing you to add ``--failed`` to the command line as soon as
- you have failing tests. Otherwise, your first run using ``--failed`` will
- (perhaps surprisingly) run *all* tests, because there won't be an id file
- containing the record of failed tests from your previous run.
-
-"""
-__test__ = False
-
-import logging
-import os
-from nose.plugins import Plugin
-from nose.util import src, set
-
-try:
- from cPickle import dump, load
-except ImportError:
- from pickle import dump, load
-
-log = logging.getLogger(__name__)
-
-
-class TestId(Plugin):
- """
- Activate to add a test id (like #1) to each test name output. Activate
- with --failed to rerun failing tests only.
- """
- name = 'id'
- idfile = None
- collecting = True
- loopOnFailed = False
-
- def options(self, parser, env):
- """Register commandline options.
- """
- Plugin.options(self, parser, env)
- parser.add_option('--id-file', action='store', dest='testIdFile',
- default='.noseids', metavar="FILE",
- help="Store test ids found in test runs in this "
- "file. Default is the file .noseids in the "
- "working directory.")
- parser.add_option('--failed', action='store_true',
- dest='failed', default=False,
- help="Run the tests that failed in the last "
- "test run.")
-
- def configure(self, options, conf):
- """Configure plugin.
- """
- Plugin.configure(self, options, conf)
- if options.failed:
- self.enabled = True
- self.loopOnFailed = True
- log.debug("Looping on failed tests")
- self.idfile = os.path.expanduser(options.testIdFile)
- if not os.path.isabs(self.idfile):
- self.idfile = os.path.join(conf.workingDir, self.idfile)
- self.id = 1
- # Ids and tests are mirror images: ids are {id: test address} and
- # tests are {test address: id}
- self.ids = {}
- self.tests = {}
- self.failed = []
- self.source_names = []
- # used to track ids seen when tests is filled from
- # loaded ids file
- self._seen = {}
- self._write_hashes = conf.verbosity >= 2
-
- def finalize(self, result):
- """Save new ids file, if needed.
- """
- if result.wasSuccessful():
- self.failed = []
- if self.collecting:
- ids = dict(list(zip(list(self.tests.values()), list(self.tests.keys()))))
- else:
- ids = self.ids
- fh = open(self.idfile, 'wb')
- dump({'ids': ids,
- 'failed': self.failed,
- 'source_names': self.source_names}, fh)
- fh.close()
- log.debug('Saved test ids: %s, failed %s to %s',
- ids, self.failed, self.idfile)
-
- def loadTestsFromNames(self, names, module=None):
- """Translate ids in the list of requested names into their
- test addresses, if they are found in my dict of tests.
- """
- log.debug('ltfn %s %s', names, module)
- try:
- fh = open(self.idfile, 'rb')
- data = load(fh)
- if 'ids' in data:
- self.ids = data['ids']
- self.failed = data['failed']
- self.source_names = data['source_names']
- else:
- # old ids field
- self.ids = data
- self.failed = []
- self.source_names = names
- if self.ids:
- self.id = max(self.ids) + 1
- self.tests = dict(list(zip(list(self.ids.values()), list(self.ids.keys()))))
- else:
- self.id = 1
- log.debug(
- 'Loaded test ids %s tests %s failed %s sources %s from %s',
- self.ids, self.tests, self.failed, self.source_names,
- self.idfile)
- fh.close()
- except ValueError, e:
- # load() may throw a ValueError when reading the ids file, if it
- # was generated with a newer version of Python than we are currently
- # running.
- log.debug('Error loading %s : %s', self.idfile, str(e))
- except IOError:
- log.debug('IO error reading %s', self.idfile)
-
- if self.loopOnFailed and self.failed:
- self.collecting = False
- names = self.failed
- self.failed = []
- # I don't load any tests myself, only translate names like '#2'
- # into the associated test addresses
- translated = []
- new_source = []
- really_new = []
- for name in names:
- trans = self.tr(name)
- if trans != name:
- translated.append(trans)
- else:
- new_source.append(name)
- # names that are not ids and that are not in the current
- # list of source names go into the list for next time
- if new_source:
- new_set = set(new_source)
- old_set = set(self.source_names)
- log.debug("old: %s new: %s", old_set, new_set)
- really_new = [s for s in new_source
- if not s in old_set]
- if really_new:
- # remember new sources
- self.source_names.extend(really_new)
- if not translated:
- # new set of source names, no translations
- # means "run the requested tests"
- names = new_source
- else:
- # no new names to translate and add to id set
- self.collecting = False
- log.debug("translated: %s new sources %s names %s",
- translated, really_new, names)
- return (None, translated + really_new or names)
-
- def makeName(self, addr):
- log.debug("Make name %s", addr)
- filename, module, call = addr
- if filename is not None:
- head = src(filename)
- else:
- head = module
- if call is not None:
- return "%s:%s" % (head, call)
- return head
-
- def setOutputStream(self, stream):
- """Get handle on output stream so the plugin can print id #s
- """
- self.stream = stream
-
- def startTest(self, test):
- """Maybe output an id # before the test name.
-
- Example output::
-
- #1 test.test ... ok
- #2 test.test_two ... ok
-
- """
- adr = test.address()
- log.debug('start test %s (%s)', adr, adr in self.tests)
- if adr in self.tests:
- if adr in self._seen:
- self.write(' ')
- else:
- self.write('#%s ' % self.tests[adr])
- self._seen[adr] = 1
- return
- self.tests[adr] = self.id
- self.write('#%s ' % self.id)
- self.id += 1
-
- def afterTest(self, test):
- # None means test never ran, False means failed/err
- if test.passed is False:
- try:
- key = str(self.tests[test.address()])
- except KeyError:
- # never saw this test -- startTest didn't run
- pass
- else:
- if key not in self.failed:
- self.failed.append(key)
-
- def tr(self, name):
- log.debug("tr '%s'", name)
- try:
- key = int(name.replace('#', ''))
- except ValueError:
- return name
- log.debug("Got key %s", key)
- # I'm running tests mapped from the ids file,
- # not collecting new ones
- if key in self.ids:
- return self.makeName(self.ids[key])
- return name
-
- def write(self, output):
- if self._write_hashes:
- self.stream.write(output)
diff --git a/lib/spack/external/nose/plugins/xunit.py b/lib/spack/external/nose/plugins/xunit.py
deleted file mode 100644
index 90b52f5f61..0000000000
--- a/lib/spack/external/nose/plugins/xunit.py
+++ /dev/null
@@ -1,341 +0,0 @@
-"""This plugin provides test results in the standard XUnit XML format.
-
-It's designed for the `Jenkins`_ (previously Hudson) continuous build
-system, but will probably work for anything else that understands an
-XUnit-formatted XML representation of test results.
-
-Add this shell command to your builder ::
-
- nosetests --with-xunit
-
-And by default a file named nosetests.xml will be written to the
-working directory.
-
-In a Jenkins builder, tick the box named "Publish JUnit test result report"
-under the Post-build Actions and enter this value for Test report XMLs::
-
- **/nosetests.xml
-
-If you need to change the name or location of the file, you can set the
-``--xunit-file`` option.
-
-If you need to change the name of the test suite, you can set the
-``--xunit-testsuite-name`` option.
-
-Here is an abbreviated version of what an XML test report might look like::
-
- <?xml version="1.0" encoding="UTF-8"?>
- <testsuite name="nosetests" tests="1" errors="1" failures="0" skip="0">
- <testcase classname="path_to_test_suite.TestSomething"
- name="test_it" time="0">
- <error type="exceptions.TypeError" message="oops, wrong type">
- Traceback (most recent call last):
- ...
- TypeError: oops, wrong type
- </error>
- </testcase>
- </testsuite>
-
-.. _Jenkins: http://jenkins-ci.org/
-
-"""
-import codecs
-import doctest
-import os
-import sys
-import traceback
-import re
-import inspect
-from StringIO import StringIO
-from time import time
-from xml.sax import saxutils
-
-from nose.plugins.base import Plugin
-from nose.exc import SkipTest
-from nose.pyversion import force_unicode, format_exception
-
-# Invalid XML characters, control characters 0-31 sans \t, \n and \r
-CONTROL_CHARACTERS = re.compile(r"[\000-\010\013\014\016-\037]")
-
-TEST_ID = re.compile(r'^(.*?)(\(.*\))$')
-
-def xml_safe(value):
- """Replaces invalid XML characters with '?'."""
- return CONTROL_CHARACTERS.sub('?', value)
-
-def escape_cdata(cdata):
- """Escape a string for an XML CDATA section."""
- return xml_safe(cdata).replace(']]>', ']]>]]&gt;<![CDATA[')
-
-def id_split(idval):
- m = TEST_ID.match(idval)
- if m:
- name, fargs = m.groups()
- head, tail = name.rsplit(".", 1)
- return [head, tail+fargs]
- else:
- return idval.rsplit(".", 1)
-
-def nice_classname(obj):
- """Returns a nice name for class object or class instance.
-
- >>> nice_classname(Exception()) # doctest: +ELLIPSIS
- '...Exception'
- >>> nice_classname(Exception) # doctest: +ELLIPSIS
- '...Exception'
-
- """
- if inspect.isclass(obj):
- cls_name = obj.__name__
- else:
- cls_name = obj.__class__.__name__
- mod = inspect.getmodule(obj)
- if mod:
- name = mod.__name__
- # jython
- if name.startswith('org.python.core.'):
- name = name[len('org.python.core.'):]
- return "%s.%s" % (name, cls_name)
- else:
- return cls_name
-
-def exc_message(exc_info):
- """Return the exception's message."""
- exc = exc_info[1]
- if exc is None:
- # str exception
- result = exc_info[0]
- else:
- try:
- result = str(exc)
- except UnicodeEncodeError:
- try:
- result = unicode(exc)
- except UnicodeError:
- # Fallback to args as neither str nor
- # unicode(Exception(u'\xe6')) work in Python < 2.6
- result = exc.args[0]
- result = force_unicode(result, 'UTF-8')
- return xml_safe(result)
-
-class Tee(object):
- def __init__(self, encoding, *args):
- self._encoding = encoding
- self._streams = args
-
- def write(self, data):
- data = force_unicode(data, self._encoding)
- for s in self._streams:
- s.write(data)
-
- def writelines(self, lines):
- for line in lines:
- self.write(line)
-
- def flush(self):
- for s in self._streams:
- s.flush()
-
- def isatty(self):
- return False
-
-
-class Xunit(Plugin):
- """This plugin provides test results in the standard XUnit XML format."""
- name = 'xunit'
- score = 1500
- encoding = 'UTF-8'
- error_report_file = None
-
- def __init__(self):
- super(Xunit, self).__init__()
- self._capture_stack = []
- self._currentStdout = None
- self._currentStderr = None
-
- def _timeTaken(self):
- if hasattr(self, '_timer'):
- taken = time() - self._timer
- else:
- # test died before it ran (probably error in setup())
- # or success/failure added before test started probably
- # due to custom TestResult munging
- taken = 0.0
- return taken
-
- def _quoteattr(self, attr):
- """Escape an XML attribute. Value can be unicode."""
- attr = xml_safe(attr)
- return saxutils.quoteattr(attr)
-
- def options(self, parser, env):
- """Sets additional command line options."""
- Plugin.options(self, parser, env)
- parser.add_option(
- '--xunit-file', action='store',
- dest='xunit_file', metavar="FILE",
- default=env.get('NOSE_XUNIT_FILE', 'nosetests.xml'),
- help=("Path to xml file to store the xunit report in. "
- "Default is nosetests.xml in the working directory "
- "[NOSE_XUNIT_FILE]"))
-
- parser.add_option(
- '--xunit-testsuite-name', action='store',
- dest='xunit_testsuite_name', metavar="PACKAGE",
- default=env.get('NOSE_XUNIT_TESTSUITE_NAME', 'nosetests'),
- help=("Name of the testsuite in the xunit xml, generated by plugin. "
- "Default test suite name is nosetests."))
-
- def configure(self, options, config):
- """Configures the xunit plugin."""
- Plugin.configure(self, options, config)
- self.config = config
- if self.enabled:
- self.stats = {'errors': 0,
- 'failures': 0,
- 'passes': 0,
- 'skipped': 0
- }
- self.errorlist = []
- self.error_report_file_name = os.path.realpath(options.xunit_file)
- self.xunit_testsuite_name = options.xunit_testsuite_name
-
- def report(self, stream):
- """Writes an Xunit-formatted XML file
-
- The file includes a report of test errors and failures.
-
- """
- self.error_report_file = codecs.open(self.error_report_file_name, 'w',
- self.encoding, 'replace')
- self.stats['encoding'] = self.encoding
- self.stats['testsuite_name'] = self.xunit_testsuite_name
- self.stats['total'] = (self.stats['errors'] + self.stats['failures']
- + self.stats['passes'] + self.stats['skipped'])
- self.error_report_file.write(
- u'<?xml version="1.0" encoding="%(encoding)s"?>'
- u'<testsuite name="%(testsuite_name)s" tests="%(total)d" '
- u'errors="%(errors)d" failures="%(failures)d" '
- u'skip="%(skipped)d">' % self.stats)
- self.error_report_file.write(u''.join([force_unicode(e, self.encoding)
- for e in self.errorlist]))
- self.error_report_file.write(u'</testsuite>')
- self.error_report_file.close()
- if self.config.verbosity > 1:
- stream.writeln("-" * 70)
- stream.writeln("XML: %s" % self.error_report_file.name)
-
- def _startCapture(self):
- self._capture_stack.append((sys.stdout, sys.stderr))
- self._currentStdout = StringIO()
- self._currentStderr = StringIO()
- sys.stdout = Tee(self.encoding, self._currentStdout, sys.stdout)
- sys.stderr = Tee(self.encoding, self._currentStderr, sys.stderr)
-
- def startContext(self, context):
- self._startCapture()
-
- def stopContext(self, context):
- self._endCapture()
-
- def beforeTest(self, test):
- """Initializes a timer before starting a test."""
- self._timer = time()
- self._startCapture()
-
- def _endCapture(self):
- if self._capture_stack:
- sys.stdout, sys.stderr = self._capture_stack.pop()
-
- def afterTest(self, test):
- self._endCapture()
- self._currentStdout = None
- self._currentStderr = None
-
- def finalize(self, test):
- while self._capture_stack:
- self._endCapture()
-
- def _getCapturedStdout(self):
- if self._currentStdout:
- value = self._currentStdout.getvalue()
- if value:
- return '<system-out><![CDATA[%s]]></system-out>' % escape_cdata(
- value)
- return ''
-
- def _getCapturedStderr(self):
- if self._currentStderr:
- value = self._currentStderr.getvalue()
- if value:
- return '<system-err><![CDATA[%s]]></system-err>' % escape_cdata(
- value)
- return ''
-
- def addError(self, test, err, capt=None):
- """Add error output to Xunit report.
- """
- taken = self._timeTaken()
-
- if issubclass(err[0], SkipTest):
- type = 'skipped'
- self.stats['skipped'] += 1
- else:
- type = 'error'
- self.stats['errors'] += 1
-
- tb = format_exception(err, self.encoding)
- id = test.id()
-
- self.errorlist.append(
- u'<testcase classname=%(cls)s name=%(name)s time="%(taken).3f">'
- u'<%(type)s type=%(errtype)s message=%(message)s><![CDATA[%(tb)s]]>'
- u'</%(type)s>%(systemout)s%(systemerr)s</testcase>' %
- {'cls': self._quoteattr(id_split(id)[0]),
- 'name': self._quoteattr(id_split(id)[-1]),
- 'taken': taken,
- 'type': type,
- 'errtype': self._quoteattr(nice_classname(err[0])),
- 'message': self._quoteattr(exc_message(err)),
- 'tb': escape_cdata(tb),
- 'systemout': self._getCapturedStdout(),
- 'systemerr': self._getCapturedStderr(),
- })
-
- def addFailure(self, test, err, capt=None, tb_info=None):
- """Add failure output to Xunit report.
- """
- taken = self._timeTaken()
- tb = format_exception(err, self.encoding)
- self.stats['failures'] += 1
- id = test.id()
-
- self.errorlist.append(
- u'<testcase classname=%(cls)s name=%(name)s time="%(taken).3f">'
- u'<failure type=%(errtype)s message=%(message)s><![CDATA[%(tb)s]]>'
- u'</failure>%(systemout)s%(systemerr)s</testcase>' %
- {'cls': self._quoteattr(id_split(id)[0]),
- 'name': self._quoteattr(id_split(id)[-1]),
- 'taken': taken,
- 'errtype': self._quoteattr(nice_classname(err[0])),
- 'message': self._quoteattr(exc_message(err)),
- 'tb': escape_cdata(tb),
- 'systemout': self._getCapturedStdout(),
- 'systemerr': self._getCapturedStderr(),
- })
-
- def addSuccess(self, test, capt=None):
- """Add success output to Xunit report.
- """
- taken = self._timeTaken()
- self.stats['passes'] += 1
- id = test.id()
- self.errorlist.append(
- '<testcase classname=%(cls)s name=%(name)s '
- 'time="%(taken).3f">%(systemout)s%(systemerr)s</testcase>' %
- {'cls': self._quoteattr(id_split(id)[0]),
- 'name': self._quoteattr(id_split(id)[-1]),
- 'taken': taken,
- 'systemout': self._getCapturedStdout(),
- 'systemerr': self._getCapturedStderr(),
- })
diff --git a/lib/spack/external/nose/proxy.py b/lib/spack/external/nose/proxy.py
deleted file mode 100644
index c2676cb195..0000000000
--- a/lib/spack/external/nose/proxy.py
+++ /dev/null
@@ -1,188 +0,0 @@
-"""
-Result Proxy
-------------
-
-The result proxy wraps the result instance given to each test. It
-performs two functions: enabling extended error/failure reporting
-and calling plugins.
-
-As each result event is fired, plugins are called with the same event;
-however, plugins are called with the nose.case.Test instance that
-wraps the actual test. So when a test fails and calls
-result.addFailure(self, err), the result proxy calls
-addFailure(self.test, err) for each plugin. This allows plugins to
-have a single stable interface for all test types, and also to
-manipulate the test object itself by setting the `test` attribute of
-the nose.case.Test that they receive.
-"""
-import logging
-from nose.config import Config
-
-
-log = logging.getLogger(__name__)
-
-
-def proxied_attribute(local_attr, proxied_attr, doc):
- """Create a property that proxies attribute ``proxied_attr`` through
- the local attribute ``local_attr``.
- """
- def fget(self):
- return getattr(getattr(self, local_attr), proxied_attr)
- def fset(self, value):
- setattr(getattr(self, local_attr), proxied_attr, value)
- def fdel(self):
- delattr(getattr(self, local_attr), proxied_attr)
- return property(fget, fset, fdel, doc)
-
-
-class ResultProxyFactory(object):
- """Factory for result proxies. Generates a ResultProxy bound to each test
- and the result passed to the test.
- """
- def __init__(self, config=None):
- if config is None:
- config = Config()
- self.config = config
- self.__prepared = False
- self.__result = None
-
- def __call__(self, result, test):
- """Return a ResultProxy for the current test.
-
- On first call, plugins are given a chance to replace the
- result used for the remaining tests. If a plugin returns a
- value from prepareTestResult, that object will be used as the
- result for all tests.
- """
- if not self.__prepared:
- self.__prepared = True
- plug_result = self.config.plugins.prepareTestResult(result)
- if plug_result is not None:
- self.__result = result = plug_result
- if self.__result is not None:
- result = self.__result
- return ResultProxy(result, test, config=self.config)
-
-
-class ResultProxy(object):
- """Proxy to TestResults (or other results handler).
-
- One ResultProxy is created for each nose.case.Test. The result
- proxy calls plugins with the nose.case.Test instance (instead of
- the wrapped test case) as each result call is made. Finally, the
- real result method is called, also with the nose.case.Test
- instance as the test parameter.
-
- """
- def __init__(self, result, test, config=None):
- if config is None:
- config = Config()
- self.config = config
- self.plugins = config.plugins
- self.result = result
- self.test = test
-
- def __repr__(self):
- return repr(self.result)
-
- def _prepareErr(self, err):
- if not isinstance(err[1], Exception) and isinstance(err[0], type):
- # Turn value back into an Exception (required in Python 3.x).
- # Plugins do all sorts of crazy things with exception values.
- # Convert it to a custom subclass of Exception with the same
- # name as the actual exception to make it print correctly.
- value = type(err[0].__name__, (Exception,), {})(err[1])
- err = (err[0], value, err[2])
- return err
-
- def assertMyTest(self, test):
- # The test I was called with must be my .test or my
- # .test's .test. or my .test.test's .case
-
- case = getattr(self.test, 'test', None)
- assert (test is self.test
- or test is case
- or test is getattr(case, '_nose_case', None)), (
- "ResultProxy for %r (%s) was called with test %r (%s)"
- % (self.test, id(self.test), test, id(test)))
-
- def afterTest(self, test):
- self.assertMyTest(test)
- self.plugins.afterTest(self.test)
- if hasattr(self.result, "afterTest"):
- self.result.afterTest(self.test)
-
- def beforeTest(self, test):
- self.assertMyTest(test)
- self.plugins.beforeTest(self.test)
- if hasattr(self.result, "beforeTest"):
- self.result.beforeTest(self.test)
-
- def addError(self, test, err):
- self.assertMyTest(test)
- plugins = self.plugins
- plugin_handled = plugins.handleError(self.test, err)
- if plugin_handled:
- return
- # test.passed is set in result, to account for error classes
- formatted = plugins.formatError(self.test, err)
- if formatted is not None:
- err = formatted
- plugins.addError(self.test, err)
- self.result.addError(self.test, self._prepareErr(err))
- if not self.result.wasSuccessful() and self.config.stopOnError:
- self.shouldStop = True
-
- def addFailure(self, test, err):
- self.assertMyTest(test)
- plugins = self.plugins
- plugin_handled = plugins.handleFailure(self.test, err)
- if plugin_handled:
- return
- self.test.passed = False
- formatted = plugins.formatFailure(self.test, err)
- if formatted is not None:
- err = formatted
- plugins.addFailure(self.test, err)
- self.result.addFailure(self.test, self._prepareErr(err))
- if self.config.stopOnError:
- self.shouldStop = True
-
- def addSkip(self, test, reason):
- # 2.7 compat shim
- from nose.plugins.skip import SkipTest
- self.assertMyTest(test)
- plugins = self.plugins
- if not isinstance(reason, Exception):
- # for Python 3.2+
- reason = Exception(reason)
- plugins.addError(self.test, (SkipTest, reason, None))
- self.result.addSkip(self.test, reason)
-
- def addSuccess(self, test):
- self.assertMyTest(test)
- self.plugins.addSuccess(self.test)
- self.result.addSuccess(self.test)
-
- def startTest(self, test):
- self.assertMyTest(test)
- self.plugins.startTest(self.test)
- self.result.startTest(self.test)
-
- def stop(self):
- self.result.stop()
-
- def stopTest(self, test):
- self.assertMyTest(test)
- self.plugins.stopTest(self.test)
- self.result.stopTest(self.test)
-
- # proxied attributes
- shouldStop = proxied_attribute('result', 'shouldStop',
- """Should the test run stop?""")
- errors = proxied_attribute('result', 'errors',
- """Tests that raised an exception""")
- failures = proxied_attribute('result', 'failures',
- """Tests that failed""")
- testsRun = proxied_attribute('result', 'testsRun',
- """Number of tests run""")
diff --git a/lib/spack/external/nose/pyversion.py b/lib/spack/external/nose/pyversion.py
deleted file mode 100644
index 091238da75..0000000000
--- a/lib/spack/external/nose/pyversion.py
+++ /dev/null
@@ -1,215 +0,0 @@
-"""
-This module contains fixups for using nose under different versions of Python.
-"""
-import sys
-import os
-import traceback
-import types
-import inspect
-import nose.util
-
-__all__ = ['make_instancemethod', 'cmp_to_key', 'sort_list', 'ClassType',
- 'TypeType', 'UNICODE_STRINGS', 'unbound_method', 'ismethod',
- 'bytes_', 'is_base_exception', 'force_unicode', 'exc_to_unicode',
- 'format_exception']
-
-# In Python 3.x, all strings are unicode (the call to 'unicode()' in the 2.x
-# source will be replaced with 'str()' when running 2to3, so this test will
-# then become true)
-UNICODE_STRINGS = (type(unicode()) == type(str()))
-
-if sys.version_info[:2] < (3, 0):
- def force_unicode(s, encoding='UTF-8'):
- try:
- s = unicode(s)
- except UnicodeDecodeError:
- s = str(s).decode(encoding, 'replace')
-
- return s
-else:
- def force_unicode(s, encoding='UTF-8'):
- return str(s)
-
-# new.instancemethod() is obsolete for new-style classes (Python 3.x)
-# We need to use descriptor methods instead.
-try:
- import new
- def make_instancemethod(function, instance):
- return new.instancemethod(function.im_func, instance,
- instance.__class__)
-except ImportError:
- def make_instancemethod(function, instance):
- return function.__get__(instance, instance.__class__)
-
-# To be forward-compatible, we do all list sorts using keys instead of cmp
-# functions. However, part of the unittest.TestLoader API involves a
-# user-provideable cmp function, so we need some way to convert that.
-def cmp_to_key(mycmp):
- 'Convert a cmp= function into a key= function'
- class Key(object):
- def __init__(self, obj):
- self.obj = obj
- def __lt__(self, other):
- return mycmp(self.obj, other.obj) < 0
- def __gt__(self, other):
- return mycmp(self.obj, other.obj) > 0
- def __eq__(self, other):
- return mycmp(self.obj, other.obj) == 0
- return Key
-
-# Python 2.3 also does not support list-sorting by key, so we need to convert
-# keys to cmp functions if we're running on old Python..
-if sys.version_info < (2, 4):
- def sort_list(l, key, reverse=False):
- if reverse:
- return l.sort(lambda a, b: cmp(key(b), key(a)))
- else:
- return l.sort(lambda a, b: cmp(key(a), key(b)))
-else:
- def sort_list(l, key, reverse=False):
- return l.sort(key=key, reverse=reverse)
-
-# In Python 3.x, all objects are "new style" objects descended from 'type', and
-# thus types.ClassType and types.TypeType don't exist anymore. For
-# compatibility, we make sure they still work.
-if hasattr(types, 'ClassType'):
- ClassType = types.ClassType
- TypeType = types.TypeType
-else:
- ClassType = type
- TypeType = type
-
-# The following emulates the behavior (we need) of an 'unbound method' under
-# Python 3.x (namely, the ability to have a class associated with a function
-# definition so that things can do stuff based on its associated class)
-class UnboundMethod:
- def __init__(self, cls, func):
- # Make sure we have all the same attributes as the original function,
- # so that the AttributeSelector plugin will work correctly...
- self.__dict__ = func.__dict__.copy()
- self._func = func
- self.__self__ = UnboundSelf(cls)
- if sys.version_info < (3, 0):
- self.im_class = cls
- self.__doc__ = getattr(func, '__doc__', None)
-
- def address(self):
- cls = self.__self__.cls
- modname = cls.__module__
- module = sys.modules[modname]
- filename = getattr(module, '__file__', None)
- if filename is not None:
- filename = os.path.abspath(filename)
- return (nose.util.src(filename), modname, "%s.%s" % (cls.__name__,
- self._func.__name__))
-
- def __call__(self, *args, **kwargs):
- return self._func(*args, **kwargs)
-
- def __getattr__(self, attr):
- return getattr(self._func, attr)
-
- def __repr__(self):
- return '<unbound method %s.%s>' % (self.__self__.cls.__name__,
- self._func.__name__)
-
-class UnboundSelf:
- def __init__(self, cls):
- self.cls = cls
-
- # We have to do this hackery because Python won't let us override the
- # __class__ attribute...
- def __getattribute__(self, attr):
- if attr == '__class__':
- return self.cls
- else:
- return object.__getattribute__(self, attr)
-
-def unbound_method(cls, func):
- if inspect.ismethod(func):
- return func
- if not inspect.isfunction(func):
- raise TypeError('%s is not a function' % (repr(func),))
- return UnboundMethod(cls, func)
-
-def ismethod(obj):
- return inspect.ismethod(obj) or isinstance(obj, UnboundMethod)
-
-
-# Make a pseudo-bytes function that can be called without the encoding arg:
-if sys.version_info >= (3, 0):
- def bytes_(s, encoding='utf8'):
- if isinstance(s, bytes):
- return s
- return bytes(s, encoding)
-else:
- def bytes_(s, encoding=None):
- return str(s)
-
-
-if sys.version_info[:2] >= (2, 6):
- def isgenerator(o):
- if isinstance(o, UnboundMethod):
- o = o._func
- return inspect.isgeneratorfunction(o) or inspect.isgenerator(o)
-else:
- try:
- from compiler.consts import CO_GENERATOR
- except ImportError:
- # IronPython doesn't have a complier module
- CO_GENERATOR=0x20
-
- def isgenerator(func):
- try:
- return func.func_code.co_flags & CO_GENERATOR != 0
- except AttributeError:
- return False
-
-# Make a function to help check if an exception is derived from BaseException.
-# In Python 2.4, we just use Exception instead.
-if sys.version_info[:2] < (2, 5):
- def is_base_exception(exc):
- return isinstance(exc, Exception)
-else:
- def is_base_exception(exc):
- return isinstance(exc, BaseException)
-
-if sys.version_info[:2] < (3, 0):
- def exc_to_unicode(ev, encoding='utf-8'):
- if is_base_exception(ev):
- if not hasattr(ev, '__unicode__'):
- # 2.5-
- if not hasattr(ev, 'message'):
- # 2.4
- msg = len(ev.args) and ev.args[0] or ''
- else:
- msg = ev.message
- msg = force_unicode(msg, encoding=encoding)
- clsname = force_unicode(ev.__class__.__name__,
- encoding=encoding)
- ev = u'%s: %s' % (clsname, msg)
- elif not isinstance(ev, unicode):
- ev = repr(ev)
-
- return force_unicode(ev, encoding=encoding)
-else:
- def exc_to_unicode(ev, encoding='utf-8'):
- return str(ev)
-
-def format_exception(exc_info, encoding='UTF-8'):
- ec, ev, tb = exc_info
-
- # Our exception object may have been turned into a string, and Python 3's
- # traceback.format_exception() doesn't take kindly to that (it expects an
- # actual exception object). So we work around it, by doing the work
- # ourselves if ev is not an exception object.
- if not is_base_exception(ev):
- tb_data = force_unicode(
- ''.join(traceback.format_tb(tb)),
- encoding)
- ev = exc_to_unicode(ev)
- return tb_data + ev
- else:
- return force_unicode(
- ''.join(traceback.format_exception(*exc_info)),
- encoding)
diff --git a/lib/spack/external/nose/result.py b/lib/spack/external/nose/result.py
deleted file mode 100644
index f974a14ae2..0000000000
--- a/lib/spack/external/nose/result.py
+++ /dev/null
@@ -1,200 +0,0 @@
-"""
-Test Result
------------
-
-Provides a TextTestResult that extends unittest's _TextTestResult to
-provide support for error classes (such as the builtin skip and
-deprecated classes), and hooks for plugins to take over or extend
-reporting.
-"""
-
-import logging
-try:
- # 2.7+
- from unittest.runner import _TextTestResult
-except ImportError:
- from unittest import _TextTestResult
-from nose.config import Config
-from nose.util import isclass, ln as _ln # backwards compat
-
-log = logging.getLogger('nose.result')
-
-
-def _exception_detail(exc):
- # this is what stdlib module traceback does
- try:
- return str(exc)
- except:
- return '<unprintable %s object>' % type(exc).__name__
-
-
-class TextTestResult(_TextTestResult):
- """Text test result that extends unittest's default test result
- support for a configurable set of errorClasses (eg, Skip,
- Deprecated, TODO) that extend the errors/failures/success triad.
- """
- def __init__(self, stream, descriptions, verbosity, config=None,
- errorClasses=None):
- if errorClasses is None:
- errorClasses = {}
- self.errorClasses = errorClasses
- if config is None:
- config = Config()
- self.config = config
- _TextTestResult.__init__(self, stream, descriptions, verbosity)
-
- def addSkip(self, test, reason):
- # 2.7 skip compat
- from nose.plugins.skip import SkipTest
- if SkipTest in self.errorClasses:
- storage, label, isfail = self.errorClasses[SkipTest]
- storage.append((test, reason))
- self.printLabel(label, (SkipTest, reason, None))
-
- def addError(self, test, err):
- """Overrides normal addError to add support for
- errorClasses. If the exception is a registered class, the
- error will be added to the list for that class, not errors.
- """
- ec, ev, tb = err
- try:
- exc_info = self._exc_info_to_string(err, test)
- except TypeError:
- # 2.3 compat
- exc_info = self._exc_info_to_string(err)
- for cls, (storage, label, isfail) in self.errorClasses.items():
- #if 'Skip' in cls.__name__ or 'Skip' in ec.__name__:
- # from nose.tools import set_trace
- # set_trace()
- if isclass(ec) and issubclass(ec, cls):
- if isfail:
- test.passed = False
- storage.append((test, exc_info))
- self.printLabel(label, err)
- return
- self.errors.append((test, exc_info))
- test.passed = False
- self.printLabel('ERROR')
-
- # override to bypass changes in 2.7
- def getDescription(self, test):
- if self.descriptions:
- return test.shortDescription() or str(test)
- else:
- return str(test)
-
- def printLabel(self, label, err=None):
- # Might get patched into a streamless result
- stream = getattr(self, 'stream', None)
- if stream is not None:
- if self.showAll:
- message = [label]
- if err:
- detail = _exception_detail(err[1])
- if detail:
- message.append(detail)
- stream.writeln(": ".join(message))
- elif self.dots:
- stream.write(label[:1])
-
- def printErrors(self):
- """Overrides to print all errorClasses errors as well.
- """
- _TextTestResult.printErrors(self)
- for cls in self.errorClasses.keys():
- storage, label, isfail = self.errorClasses[cls]
- if isfail:
- self.printErrorList(label, storage)
- # Might get patched into a result with no config
- if hasattr(self, 'config'):
- self.config.plugins.report(self.stream)
-
- def printSummary(self, start, stop):
- """Called by the test runner to print the final summary of test
- run results.
- """
- write = self.stream.write
- writeln = self.stream.writeln
- taken = float(stop - start)
- run = self.testsRun
- plural = run != 1 and "s" or ""
-
- writeln(self.separator2)
- writeln("Ran %s test%s in %.3fs" % (run, plural, taken))
- writeln()
-
- summary = {}
- eckeys = self.errorClasses.keys()
- for cls in eckeys:
- storage, label, isfail = self.errorClasses[cls]
- count = len(storage)
- if not count:
- continue
- summary[label] = count
- if len(self.failures):
- summary['failures'] = len(self.failures)
- if len(self.errors):
- summary['errors'] = len(self.errors)
-
- if not self.wasSuccessful():
- write("FAILED")
- else:
- write("OK")
- items = summary.items()
- if items:
- items.sort()
- write(" (")
- write(", ".join(["%s=%s" % (label, count) for
- label, count in items]))
- writeln(")")
- else:
- writeln()
-
- def wasSuccessful(self):
- """Overrides to check that there are no errors in errorClasses
- lists that are marked as errors and should cause a run to
- fail.
- """
- if self.errors or self.failures:
- return False
- for cls in self.errorClasses.keys():
- storage, label, isfail = self.errorClasses[cls]
- if not isfail:
- continue
- if storage:
- return False
- return True
-
- def _addError(self, test, err):
- try:
- exc_info = self._exc_info_to_string(err, test)
- except TypeError:
- # 2.3: does not take test arg
- exc_info = self._exc_info_to_string(err)
- self.errors.append((test, exc_info))
- if self.showAll:
- self.stream.write('ERROR')
- elif self.dots:
- self.stream.write('E')
-
- def _exc_info_to_string(self, err, test=None):
- # 2.7 skip compat
- from nose.plugins.skip import SkipTest
- if isclass(err[0]) and issubclass(err[0], SkipTest):
- return str(err[1])
- # 2.3/2.4 -- 2.4 passes test, 2.3 does not
- try:
- return _TextTestResult._exc_info_to_string(self, err, test)
- except TypeError:
- # 2.3: does not take test arg
- return _TextTestResult._exc_info_to_string(self, err)
-
-
-def ln(*arg, **kw):
- from warnings import warn
- warn("ln() has moved to nose.util from nose.result and will be removed "
- "from nose.result in a future release. Please update your imports ",
- DeprecationWarning)
- return _ln(*arg, **kw)
-
-
diff --git a/lib/spack/external/nose/selector.py b/lib/spack/external/nose/selector.py
deleted file mode 100644
index b63f7af0b1..0000000000
--- a/lib/spack/external/nose/selector.py
+++ /dev/null
@@ -1,251 +0,0 @@
-"""
-Test Selection
---------------
-
-Test selection is handled by a Selector. The test loader calls the
-appropriate selector method for each object it encounters that it
-thinks may be a test.
-"""
-import logging
-import os
-import unittest
-from nose.config import Config
-from nose.util import split_test_name, src, getfilename, getpackage, ispackage, is_executable
-
-log = logging.getLogger(__name__)
-
-__all__ = ['Selector', 'defaultSelector', 'TestAddress']
-
-
-# for efficiency and easier mocking
-op_join = os.path.join
-op_basename = os.path.basename
-op_exists = os.path.exists
-op_splitext = os.path.splitext
-op_isabs = os.path.isabs
-op_abspath = os.path.abspath
-
-
-class Selector(object):
- """Core test selector. Examines test candidates and determines whether,
- given the specified configuration, the test candidate should be selected
- as a test.
- """
- def __init__(self, config):
- if config is None:
- config = Config()
- self.configure(config)
-
- def configure(self, config):
- self.config = config
- self.exclude = config.exclude
- self.ignoreFiles = config.ignoreFiles
- self.include = config.include
- self.plugins = config.plugins
- self.match = config.testMatch
-
- def matches(self, name):
- """Does the name match my requirements?
-
- To match, a name must match config.testMatch OR config.include
- and it must not match config.exclude
- """
- return ((self.match.search(name)
- or (self.include and
- filter(None,
- [inc.search(name) for inc in self.include])))
- and ((not self.exclude)
- or not filter(None,
- [exc.search(name) for exc in self.exclude])
- ))
-
- def wantClass(self, cls):
- """Is the class a wanted test class?
-
- A class must be a unittest.TestCase subclass, or match test name
- requirements. Classes that start with _ are always excluded.
- """
- declared = getattr(cls, '__test__', None)
- if declared is not None:
- wanted = declared
- else:
- wanted = (not cls.__name__.startswith('_')
- and (issubclass(cls, unittest.TestCase)
- or self.matches(cls.__name__)))
-
- plug_wants = self.plugins.wantClass(cls)
- if plug_wants is not None:
- log.debug("Plugin setting selection of %s to %s", cls, plug_wants)
- wanted = plug_wants
- log.debug("wantClass %s? %s", cls, wanted)
- return wanted
-
- def wantDirectory(self, dirname):
- """Is the directory a wanted test directory?
-
- All package directories match, so long as they do not match exclude.
- All other directories must match test requirements.
- """
- tail = op_basename(dirname)
- if ispackage(dirname):
- wanted = (not self.exclude
- or not filter(None,
- [exc.search(tail) for exc in self.exclude]
- ))
- else:
- wanted = (self.matches(tail)
- or (self.config.srcDirs
- and tail in self.config.srcDirs))
- plug_wants = self.plugins.wantDirectory(dirname)
- if plug_wants is not None:
- log.debug("Plugin setting selection of %s to %s",
- dirname, plug_wants)
- wanted = plug_wants
- log.debug("wantDirectory %s? %s", dirname, wanted)
- return wanted
-
- def wantFile(self, file):
- """Is the file a wanted test file?
-
- The file must be a python source file and match testMatch or
- include, and not match exclude. Files that match ignore are *never*
- wanted, regardless of plugin, testMatch, include or exclude settings.
- """
- # never, ever load files that match anything in ignore
- # (.* _* and *setup*.py by default)
- base = op_basename(file)
- ignore_matches = [ ignore_this for ignore_this in self.ignoreFiles
- if ignore_this.search(base) ]
- if ignore_matches:
- log.debug('%s matches ignoreFiles pattern; skipped',
- base)
- return False
- if not self.config.includeExe and is_executable(file):
- log.info('%s is executable; skipped', file)
- return False
- dummy, ext = op_splitext(base)
- pysrc = ext == '.py'
-
- wanted = pysrc and self.matches(base)
- plug_wants = self.plugins.wantFile(file)
- if plug_wants is not None:
- log.debug("plugin setting want %s to %s", file, plug_wants)
- wanted = plug_wants
- log.debug("wantFile %s? %s", file, wanted)
- return wanted
-
- def wantFunction(self, function):
- """Is the function a test function?
- """
- try:
- if hasattr(function, 'compat_func_name'):
- funcname = function.compat_func_name
- else:
- funcname = function.__name__
- except AttributeError:
- # not a function
- return False
- declared = getattr(function, '__test__', None)
- if declared is not None:
- wanted = declared
- else:
- wanted = not funcname.startswith('_') and self.matches(funcname)
- plug_wants = self.plugins.wantFunction(function)
- if plug_wants is not None:
- wanted = plug_wants
- log.debug("wantFunction %s? %s", function, wanted)
- return wanted
-
- def wantMethod(self, method):
- """Is the method a test method?
- """
- try:
- method_name = method.__name__
- except AttributeError:
- # not a method
- return False
- if method_name.startswith('_'):
- # never collect 'private' methods
- return False
- declared = getattr(method, '__test__', None)
- if declared is not None:
- wanted = declared
- else:
- wanted = self.matches(method_name)
- plug_wants = self.plugins.wantMethod(method)
- if plug_wants is not None:
- wanted = plug_wants
- log.debug("wantMethod %s? %s", method, wanted)
- return wanted
-
- def wantModule(self, module):
- """Is the module a test module?
-
- The tail of the module name must match test requirements. One exception:
- we always want __main__.
- """
- declared = getattr(module, '__test__', None)
- if declared is not None:
- wanted = declared
- else:
- wanted = self.matches(module.__name__.split('.')[-1]) \
- or module.__name__ == '__main__'
- plug_wants = self.plugins.wantModule(module)
- if plug_wants is not None:
- wanted = plug_wants
- log.debug("wantModule %s? %s", module, wanted)
- return wanted
-
-defaultSelector = Selector
-
-
-class TestAddress(object):
- """A test address represents a user's request to run a particular
- test. The user may specify a filename or module (or neither),
- and/or a callable (a class, function, or method). The naming
- format for test addresses is:
-
- filename_or_module:callable
-
- Filenames that are not absolute will be made absolute relative to
- the working dir.
-
- The filename or module part will be considered a module name if it
- doesn't look like a file, that is, if it doesn't exist on the file
- system and it doesn't contain any directory separators and it
- doesn't end in .py.
-
- Callables may be a class name, function name, method name, or
- class.method specification.
- """
- def __init__(self, name, workingDir=None):
- if workingDir is None:
- workingDir = os.getcwd()
- self.name = name
- self.workingDir = workingDir
- self.filename, self.module, self.call = split_test_name(name)
- log.debug('Test name %s resolved to file %s, module %s, call %s',
- name, self.filename, self.module, self.call)
- if self.filename is None:
- if self.module is not None:
- self.filename = getfilename(self.module, self.workingDir)
- if self.filename:
- self.filename = src(self.filename)
- if not op_isabs(self.filename):
- self.filename = op_abspath(op_join(workingDir,
- self.filename))
- if self.module is None:
- self.module = getpackage(self.filename)
- log.debug(
- 'Final resolution of test name %s: file %s module %s call %s',
- name, self.filename, self.module, self.call)
-
- def totuple(self):
- return (self.filename, self.module, self.call)
-
- def __str__(self):
- return self.name
-
- def __repr__(self):
- return "%s: (%s, %s, %s)" % (self.name, self.filename,
- self.module, self.call)
diff --git a/lib/spack/external/nose/sphinx/__init__.py b/lib/spack/external/nose/sphinx/__init__.py
deleted file mode 100644
index 2ae28399f5..0000000000
--- a/lib/spack/external/nose/sphinx/__init__.py
+++ /dev/null
@@ -1 +0,0 @@
-pass
diff --git a/lib/spack/external/nose/sphinx/pluginopts.py b/lib/spack/external/nose/sphinx/pluginopts.py
deleted file mode 100644
index d2b284ab27..0000000000
--- a/lib/spack/external/nose/sphinx/pluginopts.py
+++ /dev/null
@@ -1,189 +0,0 @@
-"""
-Adds a sphinx directive that can be used to automatically document a plugin.
-
-this::
-
- .. autoplugin :: nose.plugins.foo
- :plugin: Pluggy
-
-produces::
-
- .. automodule :: nose.plugins.foo
-
- Options
- -------
-
- .. cmdoption :: --foo=BAR, --fooble=BAR
-
- Do the foo thing to the new thing.
-
- Plugin
- ------
-
- .. autoclass :: nose.plugins.foo.Pluggy
- :members:
-
- Source
- ------
-
- .. include :: path/to/nose/plugins/foo.py
- :literal:
-
-"""
-import os
-try:
- from docutils import nodes, utils
- from docutils.statemachine import ViewList
- from docutils.parsers.rst import directives
-except ImportError:
- pass # won't run anyway
-
-from nose.util import resolve_name
-from nose.plugins.base import Plugin
-from nose.plugins.manager import BuiltinPluginManager
-from nose.config import Config
-from nose.core import TestProgram
-from inspect import isclass
-
-
-def autoplugin_directive(dirname, arguments, options, content, lineno,
- content_offset, block_text, state, state_machine):
- mod_name = arguments[0]
- mod = resolve_name(mod_name)
- plug_name = options.get('plugin', None)
- if plug_name:
- obj = getattr(mod, plug_name)
- else:
- for entry in dir(mod):
- obj = getattr(mod, entry)
- if isclass(obj) and issubclass(obj, Plugin) and obj is not Plugin:
- plug_name = '%s.%s' % (mod_name, entry)
- break
-
- # mod docstring
- rst = ViewList()
- rst.append('.. automodule :: %s\n' % mod_name, '<autodoc>')
- rst.append('', '<autodoc>')
-
- # options
- rst.append('Options', '<autodoc>')
- rst.append('-------', '<autodoc>')
- rst.append('', '<autodoc>')
-
- plug = obj()
- opts = OptBucket()
- plug.options(opts, {})
- for opt in opts:
- rst.append(opt.options(), '<autodoc>')
- rst.append(' \n', '<autodoc>')
- rst.append(' ' + opt.help + '\n', '<autodoc>')
- rst.append('\n', '<autodoc>')
-
- # plugin class
- rst.append('Plugin', '<autodoc>')
- rst.append('------', '<autodoc>')
- rst.append('', '<autodoc>')
-
- rst.append('.. autoclass :: %s\n' % plug_name, '<autodoc>')
- rst.append(' :members:\n', '<autodoc>')
- rst.append(' :show-inheritance:\n', '<autodoc>')
- rst.append('', '<autodoc>')
-
- # source
- rst.append('Source', '<autodoc>')
- rst.append('------', '<autodoc>')
- rst.append(
- '.. include :: %s\n' % utils.relative_path(
- state_machine.document['source'],
- os.path.abspath(mod.__file__.replace('.pyc', '.py'))),
- '<autodoc>')
- rst.append(' :literal:\n', '<autodoc>')
- rst.append('', '<autodoc>')
-
- node = nodes.section()
- node.document = state.document
- surrounding_title_styles = state.memo.title_styles
- surrounding_section_level = state.memo.section_level
- state.memo.title_styles = []
- state.memo.section_level = 0
- state.nested_parse(rst, 0, node, match_titles=1)
- state.memo.title_styles = surrounding_title_styles
- state.memo.section_level = surrounding_section_level
-
- return node.children
-
-
-def autohelp_directive(dirname, arguments, options, content, lineno,
- content_offset, block_text, state, state_machine):
- """produces rst from nose help"""
- config = Config(parserClass=OptBucket,
- plugins=BuiltinPluginManager())
- parser = config.getParser(TestProgram.usage())
- rst = ViewList()
- for line in parser.format_help().split('\n'):
- rst.append(line, '<autodoc>')
-
- rst.append('Options', '<autodoc>')
- rst.append('-------', '<autodoc>')
- rst.append('', '<autodoc>')
- for opt in parser:
- rst.append(opt.options(), '<autodoc>')
- rst.append(' \n', '<autodoc>')
- rst.append(' ' + opt.help + '\n', '<autodoc>')
- rst.append('\n', '<autodoc>')
- node = nodes.section()
- node.document = state.document
- surrounding_title_styles = state.memo.title_styles
- surrounding_section_level = state.memo.section_level
- state.memo.title_styles = []
- state.memo.section_level = 0
- state.nested_parse(rst, 0, node, match_titles=1)
- state.memo.title_styles = surrounding_title_styles
- state.memo.section_level = surrounding_section_level
-
- return node.children
-
-
-class OptBucket(object):
- def __init__(self, doc=None, prog='nosetests'):
- self.opts = []
- self.doc = doc
- self.prog = prog
-
- def __iter__(self):
- return iter(self.opts)
-
- def format_help(self):
- return self.doc.replace('%prog', self.prog).replace(':\n', '::\n')
-
- def add_option(self, *arg, **kw):
- self.opts.append(Opt(*arg, **kw))
-
-
-class Opt(object):
- def __init__(self, *arg, **kw):
- self.opts = arg
- self.action = kw.pop('action', None)
- self.default = kw.pop('default', None)
- self.metavar = kw.pop('metavar', None)
- self.help = kw.pop('help', None)
-
- def options(self):
- buf = []
- for optstring in self.opts:
- desc = optstring
- if self.action not in ('store_true', 'store_false'):
- desc += '=%s' % self.meta(optstring)
- buf.append(desc)
- return '.. cmdoption :: ' + ', '.join(buf)
-
- def meta(self, optstring):
- # FIXME optparser default metavar?
- return self.metavar or 'DEFAULT'
-
-
-def setup(app):
- app.add_directive('autoplugin',
- autoplugin_directive, 1, (1, 0, 1),
- plugin=directives.unchanged)
- app.add_directive('autohelp', autohelp_directive, 0, (0, 0, 1))
diff --git a/lib/spack/external/nose/suite.py b/lib/spack/external/nose/suite.py
deleted file mode 100644
index a831105e34..0000000000
--- a/lib/spack/external/nose/suite.py
+++ /dev/null
@@ -1,609 +0,0 @@
-"""
-Test Suites
------------
-
-Provides a LazySuite, which is a suite whose test list is a generator
-function, and ContextSuite,which can run fixtures (setup/teardown
-functions or methods) for the context that contains its tests.
-
-"""
-from __future__ import generators
-
-import logging
-import sys
-import unittest
-from nose.case import Test
-from nose.config import Config
-from nose.proxy import ResultProxyFactory
-from nose.util import isclass, resolve_name, try_run
-
-if sys.platform == 'cli':
- if sys.version_info[:2] < (2, 6):
- import clr
- clr.AddReference("IronPython")
- from IronPython.Runtime.Exceptions import StringException
- else:
- class StringException(Exception):
- pass
-
-log = logging.getLogger(__name__)
-#log.setLevel(logging.DEBUG)
-
-# Singleton for default value -- see ContextSuite.__init__ below
-_def = object()
-
-
-def _strclass(cls):
- return "%s.%s" % (cls.__module__, cls.__name__)
-
-class MixedContextError(Exception):
- """Error raised when a context suite sees tests from more than
- one context.
- """
- pass
-
-
-class LazySuite(unittest.TestSuite):
- """A suite that may use a generator as its list of tests
- """
- def __init__(self, tests=()):
- """Initialize the suite. tests may be an iterable or a generator
- """
- super(LazySuite, self).__init__()
- self._set_tests(tests)
-
- def __iter__(self):
- return iter(self._tests)
-
- def __repr__(self):
- return "<%s tests=generator (%s)>" % (
- _strclass(self.__class__), id(self))
-
- def __hash__(self):
- return object.__hash__(self)
-
- __str__ = __repr__
-
- def addTest(self, test):
- self._precache.append(test)
-
- # added to bypass run changes in 2.7's unittest
- def run(self, result):
- for test in self._tests:
- if result.shouldStop:
- break
- test(result)
- return result
-
- def __nonzero__(self):
- log.debug("tests in %s?", id(self))
- if self._precache:
- return True
- if self.test_generator is None:
- return False
- try:
- test = self.test_generator.next()
- if test is not None:
- self._precache.append(test)
- return True
- except StopIteration:
- pass
- return False
-
- def _get_tests(self):
- log.debug("precache is %s", self._precache)
- for test in self._precache:
- yield test
- if self.test_generator is None:
- return
- for test in self.test_generator:
- yield test
-
- def _set_tests(self, tests):
- self._precache = []
- is_suite = isinstance(tests, unittest.TestSuite)
- if callable(tests) and not is_suite:
- self.test_generator = tests()
- elif is_suite:
- # Suites need special treatment: they must be called like
- # tests for their setup/teardown to run (if any)
- self.addTests([tests])
- self.test_generator = None
- else:
- self.addTests(tests)
- self.test_generator = None
-
- _tests = property(_get_tests, _set_tests, None,
- "Access the tests in this suite. Access is through a "
- "generator, so iteration may not be repeatable.")
-
-
-class ContextSuite(LazySuite):
- """A suite with context.
-
- A ContextSuite executes fixtures (setup and teardown functions or
- methods) for the context containing its tests.
-
- The context may be explicitly passed. If it is not, a context (or
- nested set of contexts) will be constructed by examining the tests
- in the suite.
- """
- failureException = unittest.TestCase.failureException
- was_setup = False
- was_torndown = False
- classSetup = ('setup_class', 'setup_all', 'setupClass', 'setupAll',
- 'setUpClass', 'setUpAll')
- classTeardown = ('teardown_class', 'teardown_all', 'teardownClass',
- 'teardownAll', 'tearDownClass', 'tearDownAll')
- moduleSetup = ('setup_module', 'setupModule', 'setUpModule', 'setup',
- 'setUp')
- moduleTeardown = ('teardown_module', 'teardownModule', 'tearDownModule',
- 'teardown', 'tearDown')
- packageSetup = ('setup_package', 'setupPackage', 'setUpPackage')
- packageTeardown = ('teardown_package', 'teardownPackage',
- 'tearDownPackage')
-
- def __init__(self, tests=(), context=None, factory=None,
- config=None, resultProxy=None, can_split=True):
- log.debug("Context suite for %s (%s) (%s)", tests, context, id(self))
- self.context = context
- self.factory = factory
- if config is None:
- config = Config()
- self.config = config
- self.resultProxy = resultProxy
- self.has_run = False
- self.can_split = can_split
- self.error_context = None
- super(ContextSuite, self).__init__(tests)
-
- def __repr__(self):
- return "<%s context=%s>" % (
- _strclass(self.__class__),
- getattr(self.context, '__name__', self.context))
- __str__ = __repr__
-
- def id(self):
- if self.error_context:
- return '%s:%s' % (repr(self), self.error_context)
- else:
- return repr(self)
-
- def __hash__(self):
- return object.__hash__(self)
-
- # 2.3 compat -- force 2.4 call sequence
- def __call__(self, *arg, **kw):
- return self.run(*arg, **kw)
-
- def exc_info(self):
- """Hook for replacing error tuple output
- """
- return sys.exc_info()
-
- def _exc_info(self):
- """Bottleneck to fix up IronPython string exceptions
- """
- e = self.exc_info()
- if sys.platform == 'cli':
- if isinstance(e[0], StringException):
- # IronPython throws these StringExceptions, but
- # traceback checks type(etype) == str. Make a real
- # string here.
- e = (str(e[0]), e[1], e[2])
-
- return e
-
- def run(self, result):
- """Run tests in suite inside of suite fixtures.
- """
- # proxy the result for myself
- log.debug("suite %s (%s) run called, tests: %s", id(self), self, self._tests)
- #import pdb
- #pdb.set_trace()
- if self.resultProxy:
- result, orig = self.resultProxy(result, self), result
- else:
- result, orig = result, result
- try:
- self.setUp()
- except KeyboardInterrupt:
- raise
- except:
- self.error_context = 'setup'
- result.addError(self, self._exc_info())
- return
- try:
- for test in self._tests:
- if result.shouldStop:
- log.debug("stopping")
- break
- # each nose.case.Test will create its own result proxy
- # so the cases need the original result, to avoid proxy
- # chains
- test(orig)
- finally:
- self.has_run = True
- try:
- self.tearDown()
- except KeyboardInterrupt:
- raise
- except:
- self.error_context = 'teardown'
- result.addError(self, self._exc_info())
-
- def hasFixtures(self, ctx_callback=None):
- context = self.context
- if context is None:
- return False
- if self.implementsAnyFixture(context, ctx_callback=ctx_callback):
- return True
- # My context doesn't have any, but its ancestors might
- factory = self.factory
- if factory:
- ancestors = factory.context.get(self, [])
- for ancestor in ancestors:
- if self.implementsAnyFixture(
- ancestor, ctx_callback=ctx_callback):
- return True
- return False
-
- def implementsAnyFixture(self, context, ctx_callback):
- if isclass(context):
- names = self.classSetup + self.classTeardown
- else:
- names = self.moduleSetup + self.moduleTeardown
- if hasattr(context, '__path__'):
- names += self.packageSetup + self.packageTeardown
- # If my context has any fixture attribute, I have fixtures
- fixt = False
- for m in names:
- if hasattr(context, m):
- fixt = True
- break
- if ctx_callback is None:
- return fixt
- return ctx_callback(context, fixt)
-
- def setUp(self):
- log.debug("suite %s setUp called, tests: %s", id(self), self._tests)
- if not self:
- # I have no tests
- log.debug("suite %s has no tests", id(self))
- return
- if self.was_setup:
- log.debug("suite %s already set up", id(self))
- return
- context = self.context
- if context is None:
- return
- # before running my own context's setup, I need to
- # ask the factory if my context's contexts' setups have been run
- factory = self.factory
- if factory:
- # get a copy, since we'll be destroying it as we go
- ancestors = factory.context.get(self, [])[:]
- while ancestors:
- ancestor = ancestors.pop()
- log.debug("ancestor %s may need setup", ancestor)
- if ancestor in factory.was_setup:
- continue
- log.debug("ancestor %s does need setup", ancestor)
- self.setupContext(ancestor)
- if not context in factory.was_setup:
- self.setupContext(context)
- else:
- self.setupContext(context)
- self.was_setup = True
- log.debug("completed suite setup")
-
- def setupContext(self, context):
- self.config.plugins.startContext(context)
- log.debug("%s setup context %s", self, context)
- if self.factory:
- if context in self.factory.was_setup:
- return
- # note that I ran the setup for this context, so that I'll run
- # the teardown in my teardown
- self.factory.was_setup[context] = self
- if isclass(context):
- names = self.classSetup
- else:
- names = self.moduleSetup
- if hasattr(context, '__path__'):
- names = self.packageSetup + names
- try_run(context, names)
-
- def shortDescription(self):
- if self.context is None:
- return "test suite"
- return "test suite for %s" % self.context
-
- def tearDown(self):
- log.debug('context teardown')
- if not self.was_setup or self.was_torndown:
- log.debug(
- "No reason to teardown (was_setup? %s was_torndown? %s)"
- % (self.was_setup, self.was_torndown))
- return
- self.was_torndown = True
- context = self.context
- if context is None:
- log.debug("No context to tear down")
- return
-
- # for each ancestor... if the ancestor was setup
- # and I did the setup, I can do teardown
- factory = self.factory
- if factory:
- ancestors = factory.context.get(self, []) + [context]
- for ancestor in ancestors:
- log.debug('ancestor %s may need teardown', ancestor)
- if not ancestor in factory.was_setup:
- log.debug('ancestor %s was not setup', ancestor)
- continue
- if ancestor in factory.was_torndown:
- log.debug('ancestor %s already torn down', ancestor)
- continue
- setup = factory.was_setup[ancestor]
- log.debug("%s setup ancestor %s", setup, ancestor)
- if setup is self:
- self.teardownContext(ancestor)
- else:
- self.teardownContext(context)
-
- def teardownContext(self, context):
- log.debug("%s teardown context %s", self, context)
- if self.factory:
- if context in self.factory.was_torndown:
- return
- self.factory.was_torndown[context] = self
- if isclass(context):
- names = self.classTeardown
- else:
- names = self.moduleTeardown
- if hasattr(context, '__path__'):
- names = self.packageTeardown + names
- try_run(context, names)
- self.config.plugins.stopContext(context)
-
- # FIXME the wrapping has to move to the factory?
- def _get_wrapped_tests(self):
- for test in self._get_tests():
- if isinstance(test, Test) or isinstance(test, unittest.TestSuite):
- yield test
- else:
- yield Test(test,
- config=self.config,
- resultProxy=self.resultProxy)
-
- _tests = property(_get_wrapped_tests, LazySuite._set_tests, None,
- "Access the tests in this suite. Tests are returned "
- "inside of a context wrapper.")
-
-
-class ContextSuiteFactory(object):
- """Factory for ContextSuites. Called with a collection of tests,
- the factory decides on a hierarchy of contexts by introspecting
- the collection or the tests themselves to find the objects
- containing the test objects. It always returns one suite, but that
- suite may consist of a hierarchy of nested suites.
- """
- suiteClass = ContextSuite
- def __init__(self, config=None, suiteClass=None, resultProxy=_def):
- if config is None:
- config = Config()
- self.config = config
- if suiteClass is not None:
- self.suiteClass = suiteClass
- # Using a singleton to represent default instead of None allows
- # passing resultProxy=None to turn proxying off.
- if resultProxy is _def:
- resultProxy = ResultProxyFactory(config=config)
- self.resultProxy = resultProxy
- self.suites = {}
- self.context = {}
- self.was_setup = {}
- self.was_torndown = {}
-
- def __call__(self, tests, **kw):
- """Return ``ContextSuite`` for tests. ``tests`` may either
- be a callable (in which case the resulting ContextSuite will
- have no parent context and be evaluated lazily) or an
- iterable. In that case the tests will wrapped in
- nose.case.Test, be examined and the context of each found and a
- suite of suites returned, organized into a stack with the
- outermost suites belonging to the outermost contexts.
- """
- log.debug("Create suite for %s", tests)
- context = kw.pop('context', getattr(tests, 'context', None))
- log.debug("tests %s context %s", tests, context)
- if context is None:
- tests = self.wrapTests(tests)
- try:
- context = self.findContext(tests)
- except MixedContextError:
- return self.makeSuite(self.mixedSuites(tests), None, **kw)
- return self.makeSuite(tests, context, **kw)
-
- def ancestry(self, context):
- """Return the ancestry of the context (that is, all of the
- packages and modules containing the context), in order of
- descent with the outermost ancestor last.
- This method is a generator.
- """
- log.debug("get ancestry %s", context)
- if context is None:
- return
- # Methods include reference to module they are defined in, we
- # don't want that, instead want the module the class is in now
- # (classes are re-ancestored elsewhere).
- if hasattr(context, 'im_class'):
- context = context.im_class
- elif hasattr(context, '__self__'):
- context = context.__self__.__class__
- if hasattr(context, '__module__'):
- ancestors = context.__module__.split('.')
- elif hasattr(context, '__name__'):
- ancestors = context.__name__.split('.')[:-1]
- else:
- raise TypeError("%s has no ancestors?" % context)
- while ancestors:
- log.debug(" %s ancestors %s", context, ancestors)
- yield resolve_name('.'.join(ancestors))
- ancestors.pop()
-
- def findContext(self, tests):
- if callable(tests) or isinstance(tests, unittest.TestSuite):
- return None
- context = None
- for test in tests:
- # Don't look at suites for contexts, only tests
- ctx = getattr(test, 'context', None)
- if ctx is None:
- continue
- if context is None:
- context = ctx
- elif context != ctx:
- raise MixedContextError(
- "Tests with different contexts in same suite! %s != %s"
- % (context, ctx))
- return context
-
- def makeSuite(self, tests, context, **kw):
- suite = self.suiteClass(
- tests, context=context, config=self.config, factory=self,
- resultProxy=self.resultProxy, **kw)
- if context is not None:
- self.suites.setdefault(context, []).append(suite)
- self.context.setdefault(suite, []).append(context)
- log.debug("suite %s has context %s", suite,
- getattr(context, '__name__', None))
- for ancestor in self.ancestry(context):
- self.suites.setdefault(ancestor, []).append(suite)
- self.context[suite].append(ancestor)
- log.debug("suite %s has ancestor %s", suite, ancestor.__name__)
- return suite
-
- def mixedSuites(self, tests):
- """The complex case where there are tests that don't all share
- the same context. Groups tests into suites with common ancestors,
- according to the following (essentially tail-recursive) procedure:
-
- Starting with the context of the first test, if it is not
- None, look for tests in the remaining tests that share that
- ancestor. If any are found, group into a suite with that
- ancestor as the context, and replace the current suite with
- that suite. Continue this process for each ancestor of the
- first test, until all ancestors have been processed. At this
- point if any tests remain, recurse with those tests as the
- input, returning a list of the common suite (which may be the
- suite or test we started with, if no common tests were found)
- plus the results of recursion.
- """
- if not tests:
- return []
- head = tests.pop(0)
- if not tests:
- return [head] # short circuit when none are left to combine
- suite = head # the common ancestry suite, so far
- tail = tests[:]
- context = getattr(head, 'context', None)
- if context is not None:
- ancestors = [context] + [a for a in self.ancestry(context)]
- for ancestor in ancestors:
- common = [suite] # tests with ancestor in common, so far
- remain = [] # tests that remain to be processed
- for test in tail:
- found_common = False
- test_ctx = getattr(test, 'context', None)
- if test_ctx is None:
- remain.append(test)
- continue
- if test_ctx is ancestor:
- common.append(test)
- continue
- for test_ancestor in self.ancestry(test_ctx):
- if test_ancestor is ancestor:
- common.append(test)
- found_common = True
- break
- if not found_common:
- remain.append(test)
- if common:
- suite = self.makeSuite(common, ancestor)
- tail = self.mixedSuites(remain)
- return [suite] + tail
-
- def wrapTests(self, tests):
- log.debug("wrap %s", tests)
- if callable(tests) or isinstance(tests, unittest.TestSuite):
- log.debug("I won't wrap")
- return tests
- wrapped = []
- for test in tests:
- log.debug("wrapping %s", test)
- if isinstance(test, Test) or isinstance(test, unittest.TestSuite):
- wrapped.append(test)
- elif isinstance(test, ContextList):
- wrapped.append(self.makeSuite(test, context=test.context))
- else:
- wrapped.append(
- Test(test, config=self.config, resultProxy=self.resultProxy)
- )
- return wrapped
-
-
-class ContextList(object):
- """Not quite a suite -- a group of tests in a context. This is used
- to hint the ContextSuiteFactory about what context the tests
- belong to, in cases where it may be ambiguous or missing.
- """
- def __init__(self, tests, context=None):
- self.tests = tests
- self.context = context
-
- def __iter__(self):
- return iter(self.tests)
-
-
-class FinalizingSuiteWrapper(unittest.TestSuite):
- """Wraps suite and calls final function after suite has
- executed. Used to call final functions in cases (like running in
- the standard test runner) where test running is not under nose's
- control.
- """
- def __init__(self, suite, finalize):
- super(FinalizingSuiteWrapper, self).__init__()
- self.suite = suite
- self.finalize = finalize
-
- def __call__(self, *arg, **kw):
- return self.run(*arg, **kw)
-
- # 2.7 compat
- def __iter__(self):
- return iter(self.suite)
-
- def run(self, *arg, **kw):
- try:
- return self.suite(*arg, **kw)
- finally:
- self.finalize(*arg, **kw)
-
-
-# backwards compat -- sort of
-class TestDir:
- def __init__(*arg, **kw):
- raise NotImplementedError(
- "TestDir is not usable with nose 0.10. The class is present "
- "in nose.suite for backwards compatibility purposes but it "
- "may not be used.")
-
-
-class TestModule:
- def __init__(*arg, **kw):
- raise NotImplementedError(
- "TestModule is not usable with nose 0.10. The class is present "
- "in nose.suite for backwards compatibility purposes but it "
- "may not be used.")
diff --git a/lib/spack/external/nose/tools/__init__.py b/lib/spack/external/nose/tools/__init__.py
deleted file mode 100644
index 74dab16a74..0000000000
--- a/lib/spack/external/nose/tools/__init__.py
+++ /dev/null
@@ -1,15 +0,0 @@
-"""
-Tools for testing
------------------
-
-nose.tools provides a few convenience functions to make writing tests
-easier. You don't have to use them; nothing in the rest of nose depends
-on any of these methods.
-
-"""
-from nose.tools.nontrivial import *
-from nose.tools.nontrivial import __all__ as nontrivial_all
-from nose.tools.trivial import *
-from nose.tools.trivial import __all__ as trivial_all
-
-__all__ = trivial_all + nontrivial_all
diff --git a/lib/spack/external/nose/tools/nontrivial.py b/lib/spack/external/nose/tools/nontrivial.py
deleted file mode 100644
index 283973245b..0000000000
--- a/lib/spack/external/nose/tools/nontrivial.py
+++ /dev/null
@@ -1,151 +0,0 @@
-"""Tools not exempt from being descended into in tracebacks"""
-
-import time
-
-
-__all__ = ['make_decorator', 'raises', 'set_trace', 'timed', 'with_setup',
- 'TimeExpired', 'istest', 'nottest']
-
-
-class TimeExpired(AssertionError):
- pass
-
-
-def make_decorator(func):
- """
- Wraps a test decorator so as to properly replicate metadata
- of the decorated function, including nose's additional stuff
- (namely, setup and teardown).
- """
- def decorate(newfunc):
- if hasattr(func, 'compat_func_name'):
- name = func.compat_func_name
- else:
- name = func.__name__
- newfunc.__dict__ = func.__dict__
- newfunc.__doc__ = func.__doc__
- newfunc.__module__ = func.__module__
- if not hasattr(newfunc, 'compat_co_firstlineno'):
- newfunc.compat_co_firstlineno = func.func_code.co_firstlineno
- try:
- newfunc.__name__ = name
- except TypeError:
- # can't set func name in 2.3
- newfunc.compat_func_name = name
- return newfunc
- return decorate
-
-
-def raises(*exceptions):
- """Test must raise one of expected exceptions to pass.
-
- Example use::
-
- @raises(TypeError, ValueError)
- def test_raises_type_error():
- raise TypeError("This test passes")
-
- @raises(Exception)
- def test_that_fails_by_passing():
- pass
-
- If you want to test many assertions about exceptions in a single test,
- you may want to use `assert_raises` instead.
- """
- valid = ' or '.join([e.__name__ for e in exceptions])
- def decorate(func):
- name = func.__name__
- def newfunc(*arg, **kw):
- try:
- func(*arg, **kw)
- except exceptions:
- pass
- except:
- raise
- else:
- message = "%s() did not raise %s" % (name, valid)
- raise AssertionError(message)
- newfunc = make_decorator(func)(newfunc)
- return newfunc
- return decorate
-
-
-def set_trace():
- """Call pdb.set_trace in the calling frame, first restoring
- sys.stdout to the real output stream. Note that sys.stdout is NOT
- reset to whatever it was before the call once pdb is done!
- """
- import pdb
- import sys
- stdout = sys.stdout
- sys.stdout = sys.__stdout__
- pdb.Pdb().set_trace(sys._getframe().f_back)
-
-
-def timed(limit):
- """Test must finish within specified time limit to pass.
-
- Example use::
-
- @timed(.1)
- def test_that_fails():
- time.sleep(.2)
- """
- def decorate(func):
- def newfunc(*arg, **kw):
- start = time.time()
- result = func(*arg, **kw)
- end = time.time()
- if end - start > limit:
- raise TimeExpired("Time limit (%s) exceeded" % limit)
- return result
- newfunc = make_decorator(func)(newfunc)
- return newfunc
- return decorate
-
-
-def with_setup(setup=None, teardown=None):
- """Decorator to add setup and/or teardown methods to a test function::
-
- @with_setup(setup, teardown)
- def test_something():
- " ... "
-
- Note that `with_setup` is useful *only* for test functions, not for test
- methods or inside of TestCase subclasses.
- """
- def decorate(func, setup=setup, teardown=teardown):
- if setup:
- if hasattr(func, 'setup'):
- _old_s = func.setup
- def _s():
- setup()
- _old_s()
- func.setup = _s
- else:
- func.setup = setup
- if teardown:
- if hasattr(func, 'teardown'):
- _old_t = func.teardown
- def _t():
- _old_t()
- teardown()
- func.teardown = _t
- else:
- func.teardown = teardown
- return func
- return decorate
-
-
-def istest(func):
- """Decorator to mark a function or method as a test
- """
- func.__test__ = True
- return func
-
-
-def nottest(func):
- """Decorator to mark a function or method as *not* a test
- """
- func.__test__ = False
- return func
diff --git a/lib/spack/external/nose/tools/trivial.py b/lib/spack/external/nose/tools/trivial.py
deleted file mode 100644
index cf83efeda5..0000000000
--- a/lib/spack/external/nose/tools/trivial.py
+++ /dev/null
@@ -1,54 +0,0 @@
-"""Tools so trivial that tracebacks should not descend into them
-
-We define the ``__unittest`` symbol in their module namespace so unittest will
-skip them when printing tracebacks, just as it does for their corresponding
-methods in ``unittest`` proper.
-
-"""
-import re
-import unittest
-
-
-__all__ = ['ok_', 'eq_']
-
-# Use the same flag as unittest itself to prevent descent into these functions:
-__unittest = 1
-
-
-def ok_(expr, msg=None):
- """Shorthand for assert. Saves 3 whole characters!
- """
- if not expr:
- raise AssertionError(msg)
-
-
-def eq_(a, b, msg=None):
- """Shorthand for 'assert a == b, "%r != %r" % (a, b)
- """
- if not a == b:
- raise AssertionError(msg or "%r != %r" % (a, b))
-
-
-#
-# Expose assert* from unittest.TestCase
-# - give them pep8 style names
-#
-caps = re.compile('([A-Z])')
-
-def pep8(name):
- return caps.sub(lambda m: '_' + m.groups()[0].lower(), name)
-
-class Dummy(unittest.TestCase):
- def nop():
- pass
-_t = Dummy('nop')
-
-for at in [ at for at in dir(_t)
- if at.startswith('assert') and not '_' in at ]:
- pepd = pep8(at)
- vars()[pepd] = getattr(_t, at)
- __all__.append(pepd)
-
-del Dummy
-del _t
-del pep8
diff --git a/lib/spack/external/nose/twistedtools.py b/lib/spack/external/nose/twistedtools.py
deleted file mode 100644
index 8d9c6ffe9b..0000000000
--- a/lib/spack/external/nose/twistedtools.py
+++ /dev/null
@@ -1,173 +0,0 @@
-"""
-Twisted integration
--------------------
-
-This module provides a very simple way to integrate your tests with the
-Twisted_ event loop.
-
-You must import this module *before* importing anything from Twisted itself!
-
-Example::
-
- from nose.twistedtools import reactor, deferred
-
- @deferred()
- def test_resolve():
- return reactor.resolve("www.python.org")
-
-Or, more realistically::
-
- @deferred(timeout=5.0)
- def test_resolve():
- d = reactor.resolve("www.python.org")
- def check_ip(ip):
- assert ip == "67.15.36.43"
- d.addCallback(check_ip)
- return d
-
-.. _Twisted: http://twistedmatrix.com/trac/
-"""
-
-import sys
-from Queue import Queue, Empty
-from nose.tools import make_decorator, TimeExpired
-
-__all__ = [
- 'threaded_reactor', 'reactor', 'deferred', 'TimeExpired',
- 'stop_reactor'
-]
-
-_twisted_thread = None
-
-def threaded_reactor():
- """
- Start the Twisted reactor in a separate thread, if not already done.
- Returns the reactor.
- The thread will automatically be destroyed when all the tests are done.
- """
- global _twisted_thread
- try:
- from twisted.internet import reactor
- except ImportError:
- return None, None
- if not _twisted_thread:
- from twisted.python import threadable
- from threading import Thread
- _twisted_thread = Thread(target=lambda: reactor.run( \
- installSignalHandlers=False))
- _twisted_thread.setDaemon(True)
- _twisted_thread.start()
- return reactor, _twisted_thread
-
-# Export global reactor variable, as Twisted does
-reactor, reactor_thread = threaded_reactor()
-
-
-def stop_reactor():
- """Stop the reactor and join the reactor thread until it stops.
- Call this function in teardown at the module or package level to
- reset the twisted system after your tests. You *must* do this if
- you mix tests using these tools and tests using twisted.trial.
- """
- global _twisted_thread
-
- def stop_reactor():
- '''Helper for calling stop from withing the thread.'''
- reactor.stop()
-
- reactor.callFromThread(stop_reactor)
- reactor_thread.join()
- for p in reactor.getDelayedCalls():
- if p.active():
- p.cancel()
- _twisted_thread = None
-
-
-def deferred(timeout=None):
- """
- By wrapping a test function with this decorator, you can return a
- twisted Deferred and the test will wait for the deferred to be triggered.
- The whole test function will run inside the Twisted event loop.
-
- The optional timeout parameter specifies the maximum duration of the test.
- The difference with timed() is that timed() will still wait for the test
- to end, while deferred() will stop the test when its timeout has expired.
- The latter is more desireable when dealing with network tests, because
- the result may actually never arrive.
-
- If the callback is triggered, the test has passed.
- If the errback is triggered or the timeout expires, the test has failed.
-
- Example::
-
- @deferred(timeout=5.0)
- def test_resolve():
- return reactor.resolve("www.python.org")
-
- Attention! If you combine this decorator with other decorators (like
- "raises"), deferred() must be called *first*!
-
- In other words, this is good::
-
- @raises(DNSLookupError)
- @deferred()
- def test_error():
- return reactor.resolve("xxxjhjhj.biz")
-
- and this is bad::
-
- @deferred()
- @raises(DNSLookupError)
- def test_error():
- return reactor.resolve("xxxjhjhj.biz")
- """
- reactor, reactor_thread = threaded_reactor()
- if reactor is None:
- raise ImportError("twisted is not available or could not be imported")
- # Check for common syntax mistake
- # (otherwise, tests can be silently ignored
- # if one writes "@deferred" instead of "@deferred()")
- try:
- timeout is None or timeout + 0
- except TypeError:
- raise TypeError("'timeout' argument must be a number or None")
-
- def decorate(func):
- def wrapper(*args, **kargs):
- q = Queue()
- def callback(value):
- q.put(None)
- def errback(failure):
- # Retrieve and save full exception info
- try:
- failure.raiseException()
- except:
- q.put(sys.exc_info())
- def g():
- try:
- d = func(*args, **kargs)
- try:
- d.addCallbacks(callback, errback)
- # Check for a common mistake and display a nice error
- # message
- except AttributeError:
- raise TypeError("you must return a twisted Deferred "
- "from your test case!")
- # Catch exceptions raised in the test body (from the
- # Twisted thread)
- except:
- q.put(sys.exc_info())
- reactor.callFromThread(g)
- try:
- error = q.get(timeout=timeout)
- except Empty:
- raise TimeExpired("timeout expired before end of test (%f s.)"
- % timeout)
- # Re-raise all exceptions
- if error is not None:
- exc_type, exc_value, tb = error
- raise exc_type, exc_value, tb
- wrapper = make_decorator(func)(wrapper)
- return wrapper
- return decorate
-
diff --git a/lib/spack/external/nose/usage.txt b/lib/spack/external/nose/usage.txt
deleted file mode 100644
index bc96894ab7..0000000000
--- a/lib/spack/external/nose/usage.txt
+++ /dev/null
@@ -1,115 +0,0 @@
-nose collects tests automatically from python source files,
-directories and packages found in its working directory (which
-defaults to the current working directory). Any python source file,
-directory or package that matches the testMatch regular expression
-(by default: `(?:^|[\b_\.-])[Tt]est)` will be collected as a test (or
-source for collection of tests). In addition, all other packages
-found in the working directory will be examined for python source files
-or directories that match testMatch. Package discovery descends all
-the way down the tree, so package.tests and package.sub.tests and
-package.sub.sub2.tests will all be collected.
-
-Within a test directory or package, any python source file matching
-testMatch will be examined for test cases. Within a test module,
-functions and classes whose names match testMatch and TestCase
-subclasses with any name will be loaded and executed as tests. Tests
-may use the assert keyword or raise AssertionErrors to indicate test
-failure. TestCase subclasses may do the same or use the various
-TestCase methods available.
-
-**It is important to note that the default behavior of nose is to
-not include tests from files which are executable.** To include
-tests from such files, remove their executable bit or use
-the --exe flag (see 'Options' section below).
-
-Selecting Tests
----------------
-
-To specify which tests to run, pass test names on the command line:
-
- %prog only_test_this.py
-
-Test names specified may be file or module names, and may optionally
-indicate the test case to run by separating the module or file name
-from the test case name with a colon. Filenames may be relative or
-absolute. Examples:
-
- %prog test.module
- %prog another.test:TestCase.test_method
- %prog a.test:TestCase
- %prog /path/to/test/file.py:test_function
-
-You may also change the working directory where nose looks for tests
-by using the -w switch:
-
- %prog -w /path/to/tests
-
-Note, however, that support for multiple -w arguments is now deprecated
-and will be removed in a future release. As of nose 0.10, you can get
-the same behavior by specifying the target directories *without*
-the -w switch:
-
- %prog /path/to/tests /another/path/to/tests
-
-Further customization of test selection and loading is possible
-through the use of plugins.
-
-Test result output is identical to that of unittest, except for
-the additional features (error classes, and plugin-supplied
-features such as output capture and assert introspection) detailed
-in the options below.
-
-Configuration
--------------
-
-In addition to passing command-line options, you may also put
-configuration options in your project's *setup.cfg* file, or a .noserc
-or nose.cfg file in your home directory. In any of these standard
-ini-style config files, you put your nosetests configuration in a
-``[nosetests]`` section. Options are the same as on the command line,
-with the -- prefix removed. For options that are simple switches, you
-must supply a value:
-
- [nosetests]
- verbosity=3
- with-doctest=1
-
-All configuration files that are found will be loaded and their
-options combined. You can override the standard config file loading
-with the ``-c`` option.
-
-Using Plugins
--------------
-
-There are numerous nose plugins available via easy_install and
-elsewhere. To use a plugin, just install it. The plugin will add
-command line options to nosetests. To verify that the plugin is installed,
-run:
-
- nosetests --plugins
-
-You can add -v or -vv to that command to show more information
-about each plugin.
-
-If you are running nose.main() or nose.run() from a script, you
-can specify a list of plugins to use by passing a list of plugins
-with the plugins keyword argument.
-
-0.9 plugins
------------
-
-nose 1.0 can use SOME plugins that were written for nose 0.9. The
-default plugin manager inserts a compatibility wrapper around 0.9
-plugins that adapts the changed plugin api calls. However, plugins
-that access nose internals are likely to fail, especially if they
-attempt to access test case or test suite classes. For example,
-plugins that try to determine if a test passed to startTest is an
-individual test or a suite will fail, partly because suites are no
-longer passed to startTest and partly because it's likely that the
-plugin is trying to find out if the test is an instance of a class
-that no longer exists.
-
-0.10 and 0.11 plugins
----------------------
-
-All plugins written for nose 0.10 and 0.11 should work with nose 1.0.
diff --git a/lib/spack/external/nose/util.py b/lib/spack/external/nose/util.py
deleted file mode 100644
index bfe16589ea..0000000000
--- a/lib/spack/external/nose/util.py
+++ /dev/null
@@ -1,668 +0,0 @@
-"""Utility functions and classes used by nose internally.
-"""
-import inspect
-import itertools
-import logging
-import stat
-import os
-import re
-import sys
-import types
-import unittest
-from nose.pyversion import ClassType, TypeType, isgenerator, ismethod
-
-
-log = logging.getLogger('nose')
-
-ident_re = re.compile(r'^[A-Za-z_][A-Za-z0-9_.]*$')
-class_types = (ClassType, TypeType)
-skip_pattern = r"(?:\.svn)|(?:[^.]+\.py[co])|(?:.*~)|(?:.*\$py\.class)|(?:__pycache__)"
-
-try:
- set()
- set = set # make from nose.util import set happy
-except NameError:
- try:
- from sets import Set as set
- except ImportError:
- pass
-
-
-def ls_tree(dir_path="",
- skip_pattern=skip_pattern,
- indent="|-- ", branch_indent="| ",
- last_indent="`-- ", last_branch_indent=" "):
- # TODO: empty directories look like non-directory files
- return "\n".join(_ls_tree_lines(dir_path, skip_pattern,
- indent, branch_indent,
- last_indent, last_branch_indent))
-
-
-def _ls_tree_lines(dir_path, skip_pattern,
- indent, branch_indent, last_indent, last_branch_indent):
- if dir_path == "":
- dir_path = os.getcwd()
-
- lines = []
-
- names = os.listdir(dir_path)
- names.sort()
- dirs, nondirs = [], []
- for name in names:
- if re.match(skip_pattern, name):
- continue
- if os.path.isdir(os.path.join(dir_path, name)):
- dirs.append(name)
- else:
- nondirs.append(name)
-
- # list non-directories first
- entries = list(itertools.chain([(name, False) for name in nondirs],
- [(name, True) for name in dirs]))
- def ls_entry(name, is_dir, ind, branch_ind):
- if not is_dir:
- yield ind + name
- else:
- path = os.path.join(dir_path, name)
- if not os.path.islink(path):
- yield ind + name
- subtree = _ls_tree_lines(path, skip_pattern,
- indent, branch_indent,
- last_indent, last_branch_indent)
- for x in subtree:
- yield branch_ind + x
- for name, is_dir in entries[:-1]:
- for line in ls_entry(name, is_dir, indent, branch_indent):
- yield line
- if entries:
- name, is_dir = entries[-1]
- for line in ls_entry(name, is_dir, last_indent, last_branch_indent):
- yield line
-
-
-def absdir(path):
- """Return absolute, normalized path to directory, if it exists; None
- otherwise.
- """
- if not os.path.isabs(path):
- path = os.path.normpath(os.path.abspath(os.path.join(os.getcwd(),
- path)))
- if path is None or not os.path.isdir(path):
- return None
- return path
-
-
-def absfile(path, where=None):
- """Return absolute, normalized path to file (optionally in directory
- where), or None if the file can't be found either in where or the current
- working directory.
- """
- orig = path
- if where is None:
- where = os.getcwd()
- if isinstance(where, list) or isinstance(where, tuple):
- for maybe_path in where:
- maybe_abs = absfile(path, maybe_path)
- if maybe_abs is not None:
- return maybe_abs
- return None
- if not os.path.isabs(path):
- path = os.path.normpath(os.path.abspath(os.path.join(where, path)))
- if path is None or not os.path.exists(path):
- if where != os.getcwd():
- # try the cwd instead
- path = os.path.normpath(os.path.abspath(os.path.join(os.getcwd(),
- orig)))
- if path is None or not os.path.exists(path):
- return None
- if os.path.isdir(path):
- # might want an __init__.py from pacakge
- init = os.path.join(path,'__init__.py')
- if os.path.isfile(init):
- return init
- elif os.path.isfile(path):
- return path
- return None
-
-
-def anyp(predicate, iterable):
- for item in iterable:
- if predicate(item):
- return True
- return False
-
-
-def file_like(name):
- """A name is file-like if it is a path that exists, or it has a
- directory part, or it ends in .py, or it isn't a legal python
- identifier.
- """
- return (os.path.exists(name)
- or os.path.dirname(name)
- or name.endswith('.py')
- or not ident_re.match(os.path.splitext(name)[0]))
-
-
-def func_lineno(func):
- """Get the line number of a function. First looks for
- compat_co_firstlineno, then func_code.co_first_lineno.
- """
- try:
- return func.compat_co_firstlineno
- except AttributeError:
- try:
- return func.func_code.co_firstlineno
- except AttributeError:
- return -1
-
-
-def isclass(obj):
- """Is obj a class? Inspect's isclass is too liberal and returns True
- for objects that can't be subclasses of anything.
- """
- obj_type = type(obj)
- return obj_type in class_types or issubclass(obj_type, type)
-
-
-# backwards compat (issue #64)
-is_generator = isgenerator
-
-
-def ispackage(path):
- """
- Is this path a package directory?
-
- >>> ispackage('nose')
- True
- >>> ispackage('unit_tests')
- False
- >>> ispackage('nose/plugins')
- True
- >>> ispackage('nose/loader.py')
- False
- """
- if os.path.isdir(path):
- # at least the end of the path must be a legal python identifier
- # and __init__.py[co] must exist
- end = os.path.basename(path)
- if ident_re.match(end):
- for init in ('__init__.py', '__init__.pyc', '__init__.pyo'):
- if os.path.isfile(os.path.join(path, init)):
- return True
- if sys.platform.startswith('java') and \
- os.path.isfile(os.path.join(path, '__init__$py.class')):
- return True
- return False
-
-
-def isproperty(obj):
- """
- Is this a property?
-
- >>> class Foo:
- ... def got(self):
- ... return 2
- ... def get(self):
- ... return 1
- ... get = property(get)
-
- >>> isproperty(Foo.got)
- False
- >>> isproperty(Foo.get)
- True
- """
- return type(obj) == property
-
-
-def getfilename(package, relativeTo=None):
- """Find the python source file for a package, relative to a
- particular directory (defaults to current working directory if not
- given).
- """
- if relativeTo is None:
- relativeTo = os.getcwd()
- path = os.path.join(relativeTo, os.sep.join(package.split('.')))
- if os.path.exists(path + '/__init__.py'):
- return path
- filename = path + '.py'
- if os.path.exists(filename):
- return filename
- return None
-
-
-def getpackage(filename):
- """
- Find the full dotted package name for a given python source file
- name. Returns None if the file is not a python source file.
-
- >>> getpackage('foo.py')
- 'foo'
- >>> getpackage('biff/baf.py')
- 'baf'
- >>> getpackage('nose/util.py')
- 'nose.util'
-
- Works for directories too.
-
- >>> getpackage('nose')
- 'nose'
- >>> getpackage('nose/plugins')
- 'nose.plugins'
-
- And __init__ files stuck onto directories
-
- >>> getpackage('nose/plugins/__init__.py')
- 'nose.plugins'
-
- Absolute paths also work.
-
- >>> path = os.path.abspath(os.path.join('nose', 'plugins'))
- >>> getpackage(path)
- 'nose.plugins'
- """
- src_file = src(filename)
- if (os.path.isdir(src_file) or not src_file.endswith('.py')) and not ispackage(src_file):
- return None
- base, ext = os.path.splitext(os.path.basename(src_file))
- if base == '__init__':
- mod_parts = []
- else:
- mod_parts = [base]
- path, part = os.path.split(os.path.split(src_file)[0])
- while part:
- if ispackage(os.path.join(path, part)):
- mod_parts.append(part)
- else:
- break
- path, part = os.path.split(path)
- mod_parts.reverse()
- return '.'.join(mod_parts)
-
-
-def ln(label):
- """Draw a 70-char-wide divider, with label in the middle.
-
- >>> ln('hello there')
- '---------------------------- hello there -----------------------------'
- """
- label_len = len(label) + 2
- chunk = (70 - label_len) // 2
- out = '%s %s %s' % ('-' * chunk, label, '-' * chunk)
- pad = 70 - len(out)
- if pad > 0:
- out = out + ('-' * pad)
- return out
-
-
-def resolve_name(name, module=None):
- """Resolve a dotted name to a module and its parts. This is stolen
- wholesale from unittest.TestLoader.loadTestByName.
-
- >>> resolve_name('nose.util') #doctest: +ELLIPSIS
- <module 'nose.util' from...>
- >>> resolve_name('nose.util.resolve_name') #doctest: +ELLIPSIS
- <function resolve_name at...>
- """
- parts = name.split('.')
- parts_copy = parts[:]
- if module is None:
- while parts_copy:
- try:
- log.debug("__import__ %s", name)
- module = __import__('.'.join(parts_copy))
- break
- except ImportError:
- del parts_copy[-1]
- if not parts_copy:
- raise
- parts = parts[1:]
- obj = module
- log.debug("resolve: %s, %s, %s, %s", parts, name, obj, module)
- for part in parts:
- obj = getattr(obj, part)
- return obj
-
-
-def split_test_name(test):
- """Split a test name into a 3-tuple containing file, module, and callable
- names, any of which (but not all) may be blank.
-
- Test names are in the form:
-
- file_or_module:callable
-
- Either side of the : may be dotted. To change the splitting behavior, you
- can alter nose.util.split_test_re.
- """
- norm = os.path.normpath
- file_or_mod = test
- fn = None
- if not ':' in test:
- # only a file or mod part
- if file_like(test):
- return (norm(test), None, None)
- else:
- return (None, test, None)
-
- # could be path|mod:callable, or a : in the file path someplace
- head, tail = os.path.split(test)
- if not head:
- # this is a case like 'foo:bar' -- generally a module
- # name followed by a callable, but also may be a windows
- # drive letter followed by a path
- try:
- file_or_mod, fn = test.split(':')
- if file_like(fn):
- # must be a funny path
- file_or_mod, fn = test, None
- except ValueError:
- # more than one : in the test
- # this is a case like c:\some\path.py:a_test
- parts = test.split(':')
- if len(parts[0]) == 1:
- file_or_mod, fn = ':'.join(parts[:-1]), parts[-1]
- else:
- # nonsense like foo:bar:baz
- raise ValueError("Test name '%s' could not be parsed. Please "
- "format test names as path:callable or "
- "module:callable." % (test,))
- elif not tail:
- # this is a case like 'foo:bar/'
- # : must be part of the file path, so ignore it
- file_or_mod = test
- else:
- if ':' in tail:
- file_part, fn = tail.split(':')
- else:
- file_part = tail
- file_or_mod = os.sep.join([head, file_part])
- if file_or_mod:
- if file_like(file_or_mod):
- return (norm(file_or_mod), None, fn)
- else:
- return (None, file_or_mod, fn)
- else:
- return (None, None, fn)
-split_test_name.__test__ = False # do not collect
-
-
-def test_address(test):
- """Find the test address for a test, which may be a module, filename,
- class, method or function.
- """
- if hasattr(test, "address"):
- return test.address()
- # type-based polymorphism sucks in general, but I believe is
- # appropriate here
- t = type(test)
- file = module = call = None
- if t == types.ModuleType:
- file = getattr(test, '__file__', None)
- module = getattr(test, '__name__', None)
- return (src(file), module, call)
- if t == types.FunctionType or issubclass(t, type) or t == types.ClassType:
- module = getattr(test, '__module__', None)
- if module is not None:
- m = sys.modules[module]
- file = getattr(m, '__file__', None)
- if file is not None:
- file = os.path.abspath(file)
- call = getattr(test, '__name__', None)
- return (src(file), module, call)
- if t == types.MethodType:
- cls_adr = test_address(test.im_class)
- return (src(cls_adr[0]), cls_adr[1],
- "%s.%s" % (cls_adr[2], test.__name__))
- # handle unittest.TestCase instances
- if isinstance(test, unittest.TestCase):
- if (hasattr(test, '_FunctionTestCase__testFunc') # pre 2.7
- or hasattr(test, '_testFunc')): # 2.7
- # unittest FunctionTestCase
- try:
- return test_address(test._FunctionTestCase__testFunc)
- except AttributeError:
- return test_address(test._testFunc)
- # regular unittest.TestCase
- cls_adr = test_address(test.__class__)
- # 2.5 compat: __testMethodName changed to _testMethodName
- try:
- method_name = test._TestCase__testMethodName
- except AttributeError:
- method_name = test._testMethodName
- return (src(cls_adr[0]), cls_adr[1],
- "%s.%s" % (cls_adr[2], method_name))
- if (hasattr(test, '__class__') and
- test.__class__.__module__ not in ('__builtin__', 'builtins')):
- return test_address(test.__class__)
- raise TypeError("I don't know what %s is (%s)" % (test, t))
-test_address.__test__ = False # do not collect
-
-
-def try_run(obj, names):
- """Given a list of possible method names, try to run them with the
- provided object. Keep going until something works. Used to run
- setup/teardown methods for module, package, and function tests.
- """
- for name in names:
- func = getattr(obj, name, None)
- if func is not None:
- if type(obj) == types.ModuleType:
- # py.test compatibility
- if isinstance(func, types.FunctionType):
- args, varargs, varkw, defaults = \
- inspect.getargspec(func)
- else:
- # Not a function. If it's callable, call it anyway
- if hasattr(func, '__call__') and not inspect.ismethod(func):
- func = func.__call__
- try:
- args, varargs, varkw, defaults = \
- inspect.getargspec(func)
- args.pop(0) # pop the self off
- except TypeError:
- raise TypeError("Attribute %s of %r is not a python "
- "function. Only functions or callables"
- " may be used as fixtures." %
- (name, obj))
- if len(args):
- log.debug("call fixture %s.%s(%s)", obj, name, obj)
- return func(obj)
- log.debug("call fixture %s.%s", obj, name)
- return func()
-
-
-def src(filename):
- """Find the python source file for a .pyc, .pyo or $py.class file on
- jython. Returns the filename provided if it is not a python source
- file.
- """
- if filename is None:
- return filename
- if sys.platform.startswith('java') and filename.endswith('$py.class'):
- return '.'.join((filename[:-9], 'py'))
- base, ext = os.path.splitext(filename)
- if ext in ('.pyc', '.pyo', '.py'):
- return '.'.join((base, 'py'))
- return filename
-
-
-def regex_last_key(regex):
- """Sort key function factory that puts items that match a
- regular expression last.
-
- >>> from nose.config import Config
- >>> from nose.pyversion import sort_list
- >>> c = Config()
- >>> regex = c.testMatch
- >>> entries = ['.', '..', 'a_test', 'src', 'lib', 'test', 'foo.py']
- >>> sort_list(entries, regex_last_key(regex))
- >>> entries
- ['.', '..', 'foo.py', 'lib', 'src', 'a_test', 'test']
- """
- def k(obj):
- if regex.search(obj):
- return (1, obj)
- return (0, obj)
- return k
-
-
-def tolist(val):
- """Convert a value that may be a list or a (possibly comma-separated)
- string into a list. The exception: None is returned as None, not [None].
-
- >>> tolist(["one", "two"])
- ['one', 'two']
- >>> tolist("hello")
- ['hello']
- >>> tolist("separate,values, with, commas, spaces , are ,ok")
- ['separate', 'values', 'with', 'commas', 'spaces', 'are', 'ok']
- """
- if val is None:
- return None
- try:
- # might already be a list
- val.extend([])
- return val
- except AttributeError:
- pass
- # might be a string
- try:
- return re.split(r'\s*,\s*', val)
- except TypeError:
- # who knows...
- return list(val)
-
-
-class odict(dict):
- """Simple ordered dict implementation, based on:
-
- http://aspn.activestate.com/ASPN/Cookbook/Python/Recipe/107747
- """
- def __init__(self, *arg, **kw):
- self._keys = []
- super(odict, self).__init__(*arg, **kw)
-
- def __delitem__(self, key):
- super(odict, self).__delitem__(key)
- self._keys.remove(key)
-
- def __setitem__(self, key, item):
- super(odict, self).__setitem__(key, item)
- if key not in self._keys:
- self._keys.append(key)
-
- def __str__(self):
- return "{%s}" % ', '.join(["%r: %r" % (k, v) for k, v in self.items()])
-
- def clear(self):
- super(odict, self).clear()
- self._keys = []
-
- def copy(self):
- d = super(odict, self).copy()
- d._keys = self._keys[:]
- return d
-
- def items(self):
- return zip(self._keys, self.values())
-
- def keys(self):
- return self._keys[:]
-
- def setdefault(self, key, failobj=None):
- item = super(odict, self).setdefault(key, failobj)
- if key not in self._keys:
- self._keys.append(key)
- return item
-
- def update(self, dict):
- super(odict, self).update(dict)
- for key in dict.keys():
- if key not in self._keys:
- self._keys.append(key)
-
- def values(self):
- return map(self.get, self._keys)
-
-
-def transplant_func(func, module):
- """
- Make a function imported from module A appear as if it is located
- in module B.
-
- >>> from pprint import pprint
- >>> pprint.__module__
- 'pprint'
- >>> pp = transplant_func(pprint, __name__)
- >>> pp.__module__
- 'nose.util'
-
- The original function is not modified.
-
- >>> pprint.__module__
- 'pprint'
-
- Calling the transplanted function calls the original.
-
- >>> pp([1, 2])
- [1, 2]
- >>> pprint([1,2])
- [1, 2]
-
- """
- from nose.tools import make_decorator
- if isgenerator(func):
- def newfunc(*arg, **kw):
- for v in func(*arg, **kw):
- yield v
- else:
- def newfunc(*arg, **kw):
- return func(*arg, **kw)
-
- newfunc = make_decorator(func)(newfunc)
- newfunc.__module__ = module
- return newfunc
-
-
-def transplant_class(cls, module):
- """
- Make a class appear to reside in `module`, rather than the module in which
- it is actually defined.
-
- >>> from nose.failure import Failure
- >>> Failure.__module__
- 'nose.failure'
- >>> Nf = transplant_class(Failure, __name__)
- >>> Nf.__module__
- 'nose.util'
- >>> Nf.__name__
- 'Failure'
-
- """
- class C(cls):
- pass
- C.__module__ = module
- C.__name__ = cls.__name__
- return C
-
-
-def safe_str(val, encoding='utf-8'):
- try:
- return str(val)
- except UnicodeEncodeError:
- if isinstance(val, Exception):
- return ' '.join([safe_str(arg, encoding)
- for arg in val])
- return unicode(val).encode(encoding)
-
-
-def is_executable(file):
- if not os.path.exists(file):
- return False
- st = os.stat(file)
- return bool(st.st_mode & (stat.S_IXUSR | stat.S_IXGRP | stat.S_IXOTH))
-
-
-if __name__ == '__main__':
- import doctest
- doctest.testmod()
diff --git a/lib/spack/external/py/AUTHORS b/lib/spack/external/py/AUTHORS
new file mode 100644
index 0000000000..8c0cf9b71b
--- /dev/null
+++ b/lib/spack/external/py/AUTHORS
@@ -0,0 +1,24 @@
+Holger Krekel, holger at merlinux eu
+Benjamin Peterson, benjamin at python org
+Ronny Pfannschmidt, Ronny.Pfannschmidt at gmx de
+Guido Wesdorp, johnny at johnnydebris net
+Samuele Pedroni, pedronis at openend se
+Carl Friedrich Bolz, cfbolz at gmx de
+Armin Rigo, arigo at tunes org
+Maciek Fijalkowski, fijal at genesilico pl
+Brian Dorsey, briandorsey at gmail com
+Floris Bruynooghe, flub at devork be
+merlinux GmbH, Germany, office at merlinux eu
+
+Contributors include::
+
+Ross Lawley
+Ralf Schmitt
+Chris Lamb
+Harald Armin Massa
+Martijn Faassen
+Ian Bicking
+Jan Balster
+Grig Gheorghiu
+Bob Ippolito
+Christian Tismer
diff --git a/lib/spack/external/py/LICENSE b/lib/spack/external/py/LICENSE
new file mode 100644
index 0000000000..31ecdfb1db
--- /dev/null
+++ b/lib/spack/external/py/LICENSE
@@ -0,0 +1,19 @@
+
+ Permission is hereby granted, free of charge, to any person obtaining a copy
+ of this software and associated documentation files (the "Software"), to deal
+ in the Software without restriction, including without limitation the rights
+ to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+ copies of the Software, and to permit persons to whom the Software is
+ furnished to do so, subject to the following conditions:
+
+ The above copyright notice and this permission notice shall be included in all
+ copies or substantial portions of the Software.
+
+ THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+ IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+ FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+ AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+ LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+ OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+ SOFTWARE.
+
diff --git a/lib/spack/external/py/README.rst b/lib/spack/external/py/README.rst
new file mode 100644
index 0000000000..e836b7b50a
--- /dev/null
+++ b/lib/spack/external/py/README.rst
@@ -0,0 +1,21 @@
+.. image:: https://img.shields.io/pypi/pyversions/pytest.svg
+ :target: https://pypi.org/project/py
+.. image:: https://img.shields.io/travis/pytest-dev/py.svg
+ :target: https://travis-ci.org/pytest-dev/py
+
+The py lib is a Python development support library featuring
+the following tools and modules:
+
+* ``py.path``: uniform local and svn path objects
+* ``py.apipkg``: explicit API control and lazy-importing
+* ``py.iniconfig``: easy parsing of .ini files
+* ``py.code``: dynamic code generation and introspection
+
+NOTE: prior to the 1.4 release this distribution used to
+contain py.test which is now its own package, see http://pytest.org
+
+For questions and more information please visit http://pylib.readthedocs.org
+
+Bugs and issues: https://github.com/pytest-dev/py
+
+Authors: Holger Krekel and others, 2004-2016
diff --git a/lib/spack/external/py/__init__.py b/lib/spack/external/py/__init__.py
new file mode 100644
index 0000000000..c2273a2e64
--- /dev/null
+++ b/lib/spack/external/py/__init__.py
@@ -0,0 +1,150 @@
+"""
+py.test and pylib: rapid testing and development utils
+
+this module uses apipkg.py for lazy-loading sub modules
+and classes. The initpkg-dictionary below specifies
+name->value mappings where value can be another namespace
+dictionary or an import path.
+
+(c) Holger Krekel and others, 2004-2014
+"""
+__version__ = '1.4.32'
+
+from py import _apipkg
+
+# so that py.error.* instances are picklable
+import sys
+sys.modules['py.error'] = _apipkg.AliasModule("py.error", "py._error", 'error')
+
+_apipkg.initpkg(__name__, attr={'_apipkg': _apipkg}, exportdefs={
+ # access to all standard lib modules
+ 'std': '._std:std',
+ # access to all posix errno's as classes
+ 'error': '._error:error',
+
+ '_pydir' : '.__metainfo:pydir',
+ 'version': 'py:__version__', # backward compatibility
+
+ # pytest-2.0 has a flat namespace, we use alias modules
+ # to keep old references compatible
+ 'test' : 'pytest',
+ 'test.collect' : 'pytest',
+ 'test.cmdline' : 'pytest',
+
+ # hook into the top-level standard library
+ 'process' : {
+ '__doc__' : '._process:__doc__',
+ 'cmdexec' : '._process.cmdexec:cmdexec',
+ 'kill' : '._process.killproc:kill',
+ 'ForkedFunc' : '._process.forkedfunc:ForkedFunc',
+ },
+
+ 'apipkg' : {
+ 'initpkg' : '._apipkg:initpkg',
+ 'ApiModule' : '._apipkg:ApiModule',
+ },
+
+ 'iniconfig' : {
+ 'IniConfig' : '._iniconfig:IniConfig',
+ 'ParseError' : '._iniconfig:ParseError',
+ },
+
+ 'path' : {
+ '__doc__' : '._path:__doc__',
+ 'svnwc' : '._path.svnwc:SvnWCCommandPath',
+ 'svnurl' : '._path.svnurl:SvnCommandPath',
+ 'local' : '._path.local:LocalPath',
+ 'SvnAuth' : '._path.svnwc:SvnAuth',
+ },
+
+ # python inspection/code-generation API
+ 'code' : {
+ '__doc__' : '._code:__doc__',
+ 'compile' : '._code.source:compile_',
+ 'Source' : '._code.source:Source',
+ 'Code' : '._code.code:Code',
+ 'Frame' : '._code.code:Frame',
+ 'ExceptionInfo' : '._code.code:ExceptionInfo',
+ 'Traceback' : '._code.code:Traceback',
+ 'getfslineno' : '._code.source:getfslineno',
+ 'getrawcode' : '._code.code:getrawcode',
+ 'patch_builtins' : '._code.code:patch_builtins',
+ 'unpatch_builtins' : '._code.code:unpatch_builtins',
+ '_AssertionError' : '._code.assertion:AssertionError',
+ '_reinterpret_old' : '._code.assertion:reinterpret_old',
+ '_reinterpret' : '._code.assertion:reinterpret',
+ '_reprcompare' : '._code.assertion:_reprcompare',
+ '_format_explanation' : '._code.assertion:_format_explanation',
+ },
+
+ # backports and additions of builtins
+ 'builtin' : {
+ '__doc__' : '._builtin:__doc__',
+ 'enumerate' : '._builtin:enumerate',
+ 'reversed' : '._builtin:reversed',
+ 'sorted' : '._builtin:sorted',
+ 'any' : '._builtin:any',
+ 'all' : '._builtin:all',
+ 'set' : '._builtin:set',
+ 'frozenset' : '._builtin:frozenset',
+ 'BaseException' : '._builtin:BaseException',
+ 'GeneratorExit' : '._builtin:GeneratorExit',
+ '_sysex' : '._builtin:_sysex',
+ 'print_' : '._builtin:print_',
+ '_reraise' : '._builtin:_reraise',
+ '_tryimport' : '._builtin:_tryimport',
+ 'exec_' : '._builtin:exec_',
+ '_basestring' : '._builtin:_basestring',
+ '_totext' : '._builtin:_totext',
+ '_isbytes' : '._builtin:_isbytes',
+ '_istext' : '._builtin:_istext',
+ '_getimself' : '._builtin:_getimself',
+ '_getfuncdict' : '._builtin:_getfuncdict',
+ '_getcode' : '._builtin:_getcode',
+ 'builtins' : '._builtin:builtins',
+ 'execfile' : '._builtin:execfile',
+ 'callable' : '._builtin:callable',
+ 'bytes' : '._builtin:bytes',
+ 'text' : '._builtin:text',
+ },
+
+ # input-output helping
+ 'io' : {
+ '__doc__' : '._io:__doc__',
+ 'dupfile' : '._io.capture:dupfile',
+ 'TextIO' : '._io.capture:TextIO',
+ 'BytesIO' : '._io.capture:BytesIO',
+ 'FDCapture' : '._io.capture:FDCapture',
+ 'StdCapture' : '._io.capture:StdCapture',
+ 'StdCaptureFD' : '._io.capture:StdCaptureFD',
+ 'TerminalWriter' : '._io.terminalwriter:TerminalWriter',
+ 'ansi_print' : '._io.terminalwriter:ansi_print',
+ 'get_terminal_width' : '._io.terminalwriter:get_terminal_width',
+ 'saferepr' : '._io.saferepr:saferepr',
+ },
+
+ # small and mean xml/html generation
+ 'xml' : {
+ '__doc__' : '._xmlgen:__doc__',
+ 'html' : '._xmlgen:html',
+ 'Tag' : '._xmlgen:Tag',
+ 'raw' : '._xmlgen:raw',
+ 'Namespace' : '._xmlgen:Namespace',
+ 'escape' : '._xmlgen:escape',
+ },
+
+ 'log' : {
+ # logging API ('producers' and 'consumers' connected via keywords)
+ '__doc__' : '._log:__doc__',
+ '_apiwarn' : '._log.warning:_apiwarn',
+ 'Producer' : '._log.log:Producer',
+ 'setconsumer' : '._log.log:setconsumer',
+ '_setstate' : '._log.log:setstate',
+ '_getstate' : '._log.log:getstate',
+ 'Path' : '._log.log:Path',
+ 'STDOUT' : '._log.log:STDOUT',
+ 'STDERR' : '._log.log:STDERR',
+ 'Syslog' : '._log.log:Syslog',
+ },
+
+})
diff --git a/lib/spack/external/py/__metainfo.py b/lib/spack/external/py/__metainfo.py
new file mode 100644
index 0000000000..12581eb7af
--- /dev/null
+++ b/lib/spack/external/py/__metainfo.py
@@ -0,0 +1,2 @@
+import py
+pydir = py.path.local(py.__file__).dirpath()
diff --git a/lib/spack/external/py/_apipkg.py b/lib/spack/external/py/_apipkg.py
new file mode 100644
index 0000000000..a73b8f6d0b
--- /dev/null
+++ b/lib/spack/external/py/_apipkg.py
@@ -0,0 +1,181 @@
+"""
+apipkg: control the exported namespace of a python package.
+
+see http://pypi.python.org/pypi/apipkg
+
+(c) holger krekel, 2009 - MIT license
+"""
+import os
+import sys
+from types import ModuleType
+
+__version__ = '1.3.dev'
+
+def _py_abspath(path):
+ """
+ special version of abspath
+ that will leave paths from jython jars alone
+ """
+ if path.startswith('__pyclasspath__'):
+
+ return path
+ else:
+ return os.path.abspath(path)
+
+def initpkg(pkgname, exportdefs, attr=dict()):
+ """ initialize given package from the export definitions. """
+ oldmod = sys.modules.get(pkgname)
+ d = {}
+ f = getattr(oldmod, '__file__', None)
+ if f:
+ f = _py_abspath(f)
+ d['__file__'] = f
+ if hasattr(oldmod, '__version__'):
+ d['__version__'] = oldmod.__version__
+ if hasattr(oldmod, '__loader__'):
+ d['__loader__'] = oldmod.__loader__
+ if hasattr(oldmod, '__path__'):
+ d['__path__'] = [_py_abspath(p) for p in oldmod.__path__]
+ if '__doc__' not in exportdefs and getattr(oldmod, '__doc__', None):
+ d['__doc__'] = oldmod.__doc__
+ d.update(attr)
+ if hasattr(oldmod, "__dict__"):
+ oldmod.__dict__.update(d)
+ mod = ApiModule(pkgname, exportdefs, implprefix=pkgname, attr=d)
+ sys.modules[pkgname] = mod
+
+def importobj(modpath, attrname):
+ module = __import__(modpath, None, None, ['__doc__'])
+ if not attrname:
+ return module
+
+ retval = module
+ names = attrname.split(".")
+ for x in names:
+ retval = getattr(retval, x)
+ return retval
+
+class ApiModule(ModuleType):
+ def __docget(self):
+ try:
+ return self.__doc
+ except AttributeError:
+ if '__doc__' in self.__map__:
+ return self.__makeattr('__doc__')
+ def __docset(self, value):
+ self.__doc = value
+ __doc__ = property(__docget, __docset)
+
+ def __init__(self, name, importspec, implprefix=None, attr=None):
+ self.__name__ = name
+ self.__all__ = [x for x in importspec if x != '__onfirstaccess__']
+ self.__map__ = {}
+ self.__implprefix__ = implprefix or name
+ if attr:
+ for name, val in attr.items():
+ # print "setting", self.__name__, name, val
+ setattr(self, name, val)
+ for name, importspec in importspec.items():
+ if isinstance(importspec, dict):
+ subname = '%s.%s' % (self.__name__, name)
+ apimod = ApiModule(subname, importspec, implprefix)
+ sys.modules[subname] = apimod
+ setattr(self, name, apimod)
+ else:
+ parts = importspec.split(':')
+ modpath = parts.pop(0)
+ attrname = parts and parts[0] or ""
+ if modpath[0] == '.':
+ modpath = implprefix + modpath
+
+ if not attrname:
+ subname = '%s.%s' % (self.__name__, name)
+ apimod = AliasModule(subname, modpath)
+ sys.modules[subname] = apimod
+ if '.' not in name:
+ setattr(self, name, apimod)
+ else:
+ self.__map__[name] = (modpath, attrname)
+
+ def __repr__(self):
+ l = []
+ if hasattr(self, '__version__'):
+ l.append("version=" + repr(self.__version__))
+ if hasattr(self, '__file__'):
+ l.append('from ' + repr(self.__file__))
+ if l:
+ return '<ApiModule %r %s>' % (self.__name__, " ".join(l))
+ return '<ApiModule %r>' % (self.__name__,)
+
+ def __makeattr(self, name):
+ """lazily compute value for name or raise AttributeError if unknown."""
+ # print "makeattr", self.__name__, name
+ target = None
+ if '__onfirstaccess__' in self.__map__:
+ target = self.__map__.pop('__onfirstaccess__')
+ importobj(*target)()
+ try:
+ modpath, attrname = self.__map__[name]
+ except KeyError:
+ if target is not None and name != '__onfirstaccess__':
+ # retry, onfirstaccess might have set attrs
+ return getattr(self, name)
+ raise AttributeError(name)
+ else:
+ result = importobj(modpath, attrname)
+ setattr(self, name, result)
+ try:
+ del self.__map__[name]
+ except KeyError:
+ pass # in a recursive-import situation a double-del can happen
+ return result
+
+ __getattr__ = __makeattr
+
+ def __dict__(self):
+ # force all the content of the module to be loaded when __dict__ is read
+ dictdescr = ModuleType.__dict__['__dict__']
+ dict = dictdescr.__get__(self)
+ if dict is not None:
+ hasattr(self, 'some')
+ for name in self.__all__:
+ try:
+ self.__makeattr(name)
+ except AttributeError:
+ pass
+ return dict
+ __dict__ = property(__dict__)
+
+
+def AliasModule(modname, modpath, attrname=None):
+ mod = []
+
+ def getmod():
+ if not mod:
+ x = importobj(modpath, None)
+ if attrname is not None:
+ x = getattr(x, attrname)
+ mod.append(x)
+ return mod[0]
+
+ class AliasModule(ModuleType):
+
+ def __repr__(self):
+ x = modpath
+ if attrname:
+ x += "." + attrname
+ return '<AliasModule %r for %r>' % (modname, x)
+
+ def __getattribute__(self, name):
+ try:
+ return getattr(getmod(), name)
+ except ImportError:
+ return None
+
+ def __setattr__(self, name, value):
+ setattr(getmod(), name, value)
+
+ def __delattr__(self, name):
+ delattr(getmod(), name)
+
+ return AliasModule(str(modname))
diff --git a/lib/spack/external/py/_builtin.py b/lib/spack/external/py/_builtin.py
new file mode 100644
index 0000000000..52ee9d79ca
--- /dev/null
+++ b/lib/spack/external/py/_builtin.py
@@ -0,0 +1,248 @@
+import sys
+
+try:
+ reversed = reversed
+except NameError:
+ def reversed(sequence):
+ """reversed(sequence) -> reverse iterator over values of the sequence
+
+ Return a reverse iterator
+ """
+ if hasattr(sequence, '__reversed__'):
+ return sequence.__reversed__()
+ if not hasattr(sequence, '__getitem__'):
+ raise TypeError("argument to reversed() must be a sequence")
+ return reversed_iterator(sequence)
+
+ class reversed_iterator(object):
+
+ def __init__(self, seq):
+ self.seq = seq
+ self.remaining = len(seq)
+
+ def __iter__(self):
+ return self
+
+ def next(self):
+ i = self.remaining
+ if i > 0:
+ i -= 1
+ item = self.seq[i]
+ self.remaining = i
+ return item
+ raise StopIteration
+
+ def __length_hint__(self):
+ return self.remaining
+
+try:
+ any = any
+except NameError:
+ def any(iterable):
+ for x in iterable:
+ if x:
+ return True
+ return False
+
+try:
+ all = all
+except NameError:
+ def all(iterable):
+ for x in iterable:
+ if not x:
+ return False
+ return True
+
+try:
+ sorted = sorted
+except NameError:
+ builtin_cmp = cmp # need to use cmp as keyword arg
+
+ def sorted(iterable, cmp=None, key=None, reverse=0):
+ use_cmp = None
+ if key is not None:
+ if cmp is None:
+ def use_cmp(x, y):
+ return builtin_cmp(x[0], y[0])
+ else:
+ def use_cmp(x, y):
+ return cmp(x[0], y[0])
+ l = [(key(element), element) for element in iterable]
+ else:
+ if cmp is not None:
+ use_cmp = cmp
+ l = list(iterable)
+ if use_cmp is not None:
+ l.sort(use_cmp)
+ else:
+ l.sort()
+ if reverse:
+ l.reverse()
+ if key is not None:
+ return [element for (_, element) in l]
+ return l
+
+try:
+ set, frozenset = set, frozenset
+except NameError:
+ from sets import set, frozenset
+
+# pass through
+enumerate = enumerate
+
+try:
+ BaseException = BaseException
+except NameError:
+ BaseException = Exception
+
+try:
+ GeneratorExit = GeneratorExit
+except NameError:
+ class GeneratorExit(Exception):
+ """ This exception is never raised, it is there to make it possible to
+ write code compatible with CPython 2.5 even in lower CPython
+ versions."""
+ pass
+ GeneratorExit.__module__ = 'exceptions'
+
+_sysex = (KeyboardInterrupt, SystemExit, MemoryError, GeneratorExit)
+
+try:
+ callable = callable
+except NameError:
+ def callable(obj):
+ return hasattr(obj, "__call__")
+
+if sys.version_info >= (3, 0):
+ exec ("print_ = print ; exec_=exec")
+ import builtins
+
+ # some backward compatibility helpers
+ _basestring = str
+ def _totext(obj, encoding=None, errors=None):
+ if isinstance(obj, bytes):
+ if errors is None:
+ obj = obj.decode(encoding)
+ else:
+ obj = obj.decode(encoding, errors)
+ elif not isinstance(obj, str):
+ obj = str(obj)
+ return obj
+
+ def _isbytes(x):
+ return isinstance(x, bytes)
+ def _istext(x):
+ return isinstance(x, str)
+
+ text = str
+ bytes = bytes
+
+
+ def _getimself(function):
+ return getattr(function, '__self__', None)
+
+ def _getfuncdict(function):
+ return getattr(function, "__dict__", None)
+
+ def _getcode(function):
+ return getattr(function, "__code__", None)
+
+ def execfile(fn, globs=None, locs=None):
+ if globs is None:
+ back = sys._getframe(1)
+ globs = back.f_globals
+ locs = back.f_locals
+ del back
+ elif locs is None:
+ locs = globs
+ fp = open(fn, "r")
+ try:
+ source = fp.read()
+ finally:
+ fp.close()
+ co = compile(source, fn, "exec", dont_inherit=True)
+ exec_(co, globs, locs)
+
+else:
+ import __builtin__ as builtins
+ _totext = unicode
+ _basestring = basestring
+ text = unicode
+ bytes = str
+ execfile = execfile
+ callable = callable
+ def _isbytes(x):
+ return isinstance(x, str)
+ def _istext(x):
+ return isinstance(x, unicode)
+
+ def _getimself(function):
+ return getattr(function, 'im_self', None)
+
+ def _getfuncdict(function):
+ return getattr(function, "__dict__", None)
+
+ def _getcode(function):
+ try:
+ return getattr(function, "__code__")
+ except AttributeError:
+ return getattr(function, "func_code", None)
+
+ def print_(*args, **kwargs):
+ """ minimal backport of py3k print statement. """
+ sep = ' '
+ if 'sep' in kwargs:
+ sep = kwargs.pop('sep')
+ end = '\n'
+ if 'end' in kwargs:
+ end = kwargs.pop('end')
+ file = 'file' in kwargs and kwargs.pop('file') or sys.stdout
+ if kwargs:
+ args = ", ".join([str(x) for x in kwargs])
+ raise TypeError("invalid keyword arguments: %s" % args)
+ at_start = True
+ for x in args:
+ if not at_start:
+ file.write(sep)
+ file.write(str(x))
+ at_start = False
+ file.write(end)
+
+ def exec_(obj, globals=None, locals=None):
+ """ minimal backport of py3k exec statement. """
+ __tracebackhide__ = True
+ if globals is None:
+ frame = sys._getframe(1)
+ globals = frame.f_globals
+ if locals is None:
+ locals = frame.f_locals
+ elif locals is None:
+ locals = globals
+ exec2(obj, globals, locals)
+
+if sys.version_info >= (3, 0):
+ def _reraise(cls, val, tb):
+ __tracebackhide__ = True
+ assert hasattr(val, '__traceback__')
+ raise cls.with_traceback(val, tb)
+else:
+ exec ("""
+def _reraise(cls, val, tb):
+ __tracebackhide__ = True
+ raise cls, val, tb
+def exec2(obj, globals, locals):
+ __tracebackhide__ = True
+ exec obj in globals, locals
+""")
+
+def _tryimport(*names):
+ """ return the first successfully imported module. """
+ assert names
+ for name in names:
+ try:
+ __import__(name)
+ except ImportError:
+ excinfo = sys.exc_info()
+ else:
+ return sys.modules[name]
+ _reraise(*excinfo)
diff --git a/lib/spack/external/py/_code/__init__.py b/lib/spack/external/py/_code/__init__.py
new file mode 100644
index 0000000000..f15acf8513
--- /dev/null
+++ b/lib/spack/external/py/_code/__init__.py
@@ -0,0 +1 @@
+""" python inspection/code generation API """
diff --git a/lib/spack/external/py/_code/_assertionnew.py b/lib/spack/external/py/_code/_assertionnew.py
new file mode 100644
index 0000000000..afb1b31ff0
--- /dev/null
+++ b/lib/spack/external/py/_code/_assertionnew.py
@@ -0,0 +1,339 @@
+"""
+Find intermediate evalutation results in assert statements through builtin AST.
+This should replace _assertionold.py eventually.
+"""
+
+import sys
+import ast
+
+import py
+from py._code.assertion import _format_explanation, BuiltinAssertionError
+
+
+if sys.platform.startswith("java") and sys.version_info < (2, 5, 2):
+ # See http://bugs.jython.org/issue1497
+ _exprs = ("BoolOp", "BinOp", "UnaryOp", "Lambda", "IfExp", "Dict",
+ "ListComp", "GeneratorExp", "Yield", "Compare", "Call",
+ "Repr", "Num", "Str", "Attribute", "Subscript", "Name",
+ "List", "Tuple")
+ _stmts = ("FunctionDef", "ClassDef", "Return", "Delete", "Assign",
+ "AugAssign", "Print", "For", "While", "If", "With", "Raise",
+ "TryExcept", "TryFinally", "Assert", "Import", "ImportFrom",
+ "Exec", "Global", "Expr", "Pass", "Break", "Continue")
+ _expr_nodes = set(getattr(ast, name) for name in _exprs)
+ _stmt_nodes = set(getattr(ast, name) for name in _stmts)
+ def _is_ast_expr(node):
+ return node.__class__ in _expr_nodes
+ def _is_ast_stmt(node):
+ return node.__class__ in _stmt_nodes
+else:
+ def _is_ast_expr(node):
+ return isinstance(node, ast.expr)
+ def _is_ast_stmt(node):
+ return isinstance(node, ast.stmt)
+
+
+class Failure(Exception):
+ """Error found while interpreting AST."""
+
+ def __init__(self, explanation=""):
+ self.cause = sys.exc_info()
+ self.explanation = explanation
+
+
+def interpret(source, frame, should_fail=False):
+ mod = ast.parse(source)
+ visitor = DebugInterpreter(frame)
+ try:
+ visitor.visit(mod)
+ except Failure:
+ failure = sys.exc_info()[1]
+ return getfailure(failure)
+ if should_fail:
+ return ("(assertion failed, but when it was re-run for "
+ "printing intermediate values, it did not fail. Suggestions: "
+ "compute assert expression before the assert or use --no-assert)")
+
+def run(offending_line, frame=None):
+ if frame is None:
+ frame = py.code.Frame(sys._getframe(1))
+ return interpret(offending_line, frame)
+
+def getfailure(failure):
+ explanation = _format_explanation(failure.explanation)
+ value = failure.cause[1]
+ if str(value):
+ lines = explanation.splitlines()
+ if not lines:
+ lines.append("")
+ lines[0] += " << %s" % (value,)
+ explanation = "\n".join(lines)
+ text = "%s: %s" % (failure.cause[0].__name__, explanation)
+ if text.startswith("AssertionError: assert "):
+ text = text[16:]
+ return text
+
+
+operator_map = {
+ ast.BitOr : "|",
+ ast.BitXor : "^",
+ ast.BitAnd : "&",
+ ast.LShift : "<<",
+ ast.RShift : ">>",
+ ast.Add : "+",
+ ast.Sub : "-",
+ ast.Mult : "*",
+ ast.Div : "/",
+ ast.FloorDiv : "//",
+ ast.Mod : "%",
+ ast.Eq : "==",
+ ast.NotEq : "!=",
+ ast.Lt : "<",
+ ast.LtE : "<=",
+ ast.Gt : ">",
+ ast.GtE : ">=",
+ ast.Pow : "**",
+ ast.Is : "is",
+ ast.IsNot : "is not",
+ ast.In : "in",
+ ast.NotIn : "not in"
+}
+
+unary_map = {
+ ast.Not : "not %s",
+ ast.Invert : "~%s",
+ ast.USub : "-%s",
+ ast.UAdd : "+%s"
+}
+
+
+class DebugInterpreter(ast.NodeVisitor):
+ """Interpret AST nodes to gleam useful debugging information. """
+
+ def __init__(self, frame):
+ self.frame = frame
+
+ def generic_visit(self, node):
+ # Fallback when we don't have a special implementation.
+ if _is_ast_expr(node):
+ mod = ast.Expression(node)
+ co = self._compile(mod)
+ try:
+ result = self.frame.eval(co)
+ except Exception:
+ raise Failure()
+ explanation = self.frame.repr(result)
+ return explanation, result
+ elif _is_ast_stmt(node):
+ mod = ast.Module([node])
+ co = self._compile(mod, "exec")
+ try:
+ self.frame.exec_(co)
+ except Exception:
+ raise Failure()
+ return None, None
+ else:
+ raise AssertionError("can't handle %s" %(node,))
+
+ def _compile(self, source, mode="eval"):
+ return compile(source, "<assertion interpretation>", mode)
+
+ def visit_Expr(self, expr):
+ return self.visit(expr.value)
+
+ def visit_Module(self, mod):
+ for stmt in mod.body:
+ self.visit(stmt)
+
+ def visit_Name(self, name):
+ explanation, result = self.generic_visit(name)
+ # See if the name is local.
+ source = "%r in locals() is not globals()" % (name.id,)
+ co = self._compile(source)
+ try:
+ local = self.frame.eval(co)
+ except Exception:
+ # have to assume it isn't
+ local = False
+ if not local:
+ return name.id, result
+ return explanation, result
+
+ def visit_Compare(self, comp):
+ left = comp.left
+ left_explanation, left_result = self.visit(left)
+ for op, next_op in zip(comp.ops, comp.comparators):
+ next_explanation, next_result = self.visit(next_op)
+ op_symbol = operator_map[op.__class__]
+ explanation = "%s %s %s" % (left_explanation, op_symbol,
+ next_explanation)
+ source = "__exprinfo_left %s __exprinfo_right" % (op_symbol,)
+ co = self._compile(source)
+ try:
+ result = self.frame.eval(co, __exprinfo_left=left_result,
+ __exprinfo_right=next_result)
+ except Exception:
+ raise Failure(explanation)
+ try:
+ if not result:
+ break
+ except KeyboardInterrupt:
+ raise
+ except:
+ break
+ left_explanation, left_result = next_explanation, next_result
+
+ rcomp = py.code._reprcompare
+ if rcomp:
+ res = rcomp(op_symbol, left_result, next_result)
+ if res:
+ explanation = res
+ return explanation, result
+
+ def visit_BoolOp(self, boolop):
+ is_or = isinstance(boolop.op, ast.Or)
+ explanations = []
+ for operand in boolop.values:
+ explanation, result = self.visit(operand)
+ explanations.append(explanation)
+ if result == is_or:
+ break
+ name = is_or and " or " or " and "
+ explanation = "(" + name.join(explanations) + ")"
+ return explanation, result
+
+ def visit_UnaryOp(self, unary):
+ pattern = unary_map[unary.op.__class__]
+ operand_explanation, operand_result = self.visit(unary.operand)
+ explanation = pattern % (operand_explanation,)
+ co = self._compile(pattern % ("__exprinfo_expr",))
+ try:
+ result = self.frame.eval(co, __exprinfo_expr=operand_result)
+ except Exception:
+ raise Failure(explanation)
+ return explanation, result
+
+ def visit_BinOp(self, binop):
+ left_explanation, left_result = self.visit(binop.left)
+ right_explanation, right_result = self.visit(binop.right)
+ symbol = operator_map[binop.op.__class__]
+ explanation = "(%s %s %s)" % (left_explanation, symbol,
+ right_explanation)
+ source = "__exprinfo_left %s __exprinfo_right" % (symbol,)
+ co = self._compile(source)
+ try:
+ result = self.frame.eval(co, __exprinfo_left=left_result,
+ __exprinfo_right=right_result)
+ except Exception:
+ raise Failure(explanation)
+ return explanation, result
+
+ def visit_Call(self, call):
+ func_explanation, func = self.visit(call.func)
+ arg_explanations = []
+ ns = {"__exprinfo_func" : func}
+ arguments = []
+ for arg in call.args:
+ arg_explanation, arg_result = self.visit(arg)
+ arg_name = "__exprinfo_%s" % (len(ns),)
+ ns[arg_name] = arg_result
+ arguments.append(arg_name)
+ arg_explanations.append(arg_explanation)
+ for keyword in call.keywords:
+ arg_explanation, arg_result = self.visit(keyword.value)
+ arg_name = "__exprinfo_%s" % (len(ns),)
+ ns[arg_name] = arg_result
+ keyword_source = "%s=%%s" % (keyword.arg)
+ arguments.append(keyword_source % (arg_name,))
+ arg_explanations.append(keyword_source % (arg_explanation,))
+ if call.starargs:
+ arg_explanation, arg_result = self.visit(call.starargs)
+ arg_name = "__exprinfo_star"
+ ns[arg_name] = arg_result
+ arguments.append("*%s" % (arg_name,))
+ arg_explanations.append("*%s" % (arg_explanation,))
+ if call.kwargs:
+ arg_explanation, arg_result = self.visit(call.kwargs)
+ arg_name = "__exprinfo_kwds"
+ ns[arg_name] = arg_result
+ arguments.append("**%s" % (arg_name,))
+ arg_explanations.append("**%s" % (arg_explanation,))
+ args_explained = ", ".join(arg_explanations)
+ explanation = "%s(%s)" % (func_explanation, args_explained)
+ args = ", ".join(arguments)
+ source = "__exprinfo_func(%s)" % (args,)
+ co = self._compile(source)
+ try:
+ result = self.frame.eval(co, **ns)
+ except Exception:
+ raise Failure(explanation)
+ pattern = "%s\n{%s = %s\n}"
+ rep = self.frame.repr(result)
+ explanation = pattern % (rep, rep, explanation)
+ return explanation, result
+
+ def _is_builtin_name(self, name):
+ pattern = "%r not in globals() and %r not in locals()"
+ source = pattern % (name.id, name.id)
+ co = self._compile(source)
+ try:
+ return self.frame.eval(co)
+ except Exception:
+ return False
+
+ def visit_Attribute(self, attr):
+ if not isinstance(attr.ctx, ast.Load):
+ return self.generic_visit(attr)
+ source_explanation, source_result = self.visit(attr.value)
+ explanation = "%s.%s" % (source_explanation, attr.attr)
+ source = "__exprinfo_expr.%s" % (attr.attr,)
+ co = self._compile(source)
+ try:
+ result = self.frame.eval(co, __exprinfo_expr=source_result)
+ except Exception:
+ raise Failure(explanation)
+ explanation = "%s\n{%s = %s.%s\n}" % (self.frame.repr(result),
+ self.frame.repr(result),
+ source_explanation, attr.attr)
+ # Check if the attr is from an instance.
+ source = "%r in getattr(__exprinfo_expr, '__dict__', {})"
+ source = source % (attr.attr,)
+ co = self._compile(source)
+ try:
+ from_instance = self.frame.eval(co, __exprinfo_expr=source_result)
+ except Exception:
+ from_instance = True
+ if from_instance:
+ rep = self.frame.repr(result)
+ pattern = "%s\n{%s = %s\n}"
+ explanation = pattern % (rep, rep, explanation)
+ return explanation, result
+
+ def visit_Assert(self, assrt):
+ test_explanation, test_result = self.visit(assrt.test)
+ if test_explanation.startswith("False\n{False =") and \
+ test_explanation.endswith("\n"):
+ test_explanation = test_explanation[15:-2]
+ explanation = "assert %s" % (test_explanation,)
+ if not test_result:
+ try:
+ raise BuiltinAssertionError
+ except Exception:
+ raise Failure(explanation)
+ return explanation, test_result
+
+ def visit_Assign(self, assign):
+ value_explanation, value_result = self.visit(assign.value)
+ explanation = "... = %s" % (value_explanation,)
+ name = ast.Name("__exprinfo_expr", ast.Load(),
+ lineno=assign.value.lineno,
+ col_offset=assign.value.col_offset)
+ new_assign = ast.Assign(assign.targets, name, lineno=assign.lineno,
+ col_offset=assign.col_offset)
+ mod = ast.Module([new_assign])
+ co = self._compile(mod, "exec")
+ try:
+ self.frame.exec_(co, __exprinfo_expr=value_result)
+ except Exception:
+ raise Failure(explanation)
+ return explanation, value_result
diff --git a/lib/spack/external/py/_code/_assertionold.py b/lib/spack/external/py/_code/_assertionold.py
new file mode 100644
index 0000000000..4e81fb3ef6
--- /dev/null
+++ b/lib/spack/external/py/_code/_assertionold.py
@@ -0,0 +1,555 @@
+import py
+import sys, inspect
+from compiler import parse, ast, pycodegen
+from py._code.assertion import BuiltinAssertionError, _format_explanation
+
+passthroughex = py.builtin._sysex
+
+class Failure:
+ def __init__(self, node):
+ self.exc, self.value, self.tb = sys.exc_info()
+ self.node = node
+
+class View(object):
+ """View base class.
+
+ If C is a subclass of View, then C(x) creates a proxy object around
+ the object x. The actual class of the proxy is not C in general,
+ but a *subclass* of C determined by the rules below. To avoid confusion
+ we call view class the class of the proxy (a subclass of C, so of View)
+ and object class the class of x.
+
+ Attributes and methods not found in the proxy are automatically read on x.
+ Other operations like setting attributes are performed on the proxy, as
+ determined by its view class. The object x is available from the proxy
+ as its __obj__ attribute.
+
+ The view class selection is determined by the __view__ tuples and the
+ optional __viewkey__ method. By default, the selected view class is the
+ most specific subclass of C whose __view__ mentions the class of x.
+ If no such subclass is found, the search proceeds with the parent
+ object classes. For example, C(True) will first look for a subclass
+ of C with __view__ = (..., bool, ...) and only if it doesn't find any
+ look for one with __view__ = (..., int, ...), and then ..., object,...
+ If everything fails the class C itself is considered to be the default.
+
+ Alternatively, the view class selection can be driven by another aspect
+ of the object x, instead of the class of x, by overriding __viewkey__.
+ See last example at the end of this module.
+ """
+
+ _viewcache = {}
+ __view__ = ()
+
+ def __new__(rootclass, obj, *args, **kwds):
+ self = object.__new__(rootclass)
+ self.__obj__ = obj
+ self.__rootclass__ = rootclass
+ key = self.__viewkey__()
+ try:
+ self.__class__ = self._viewcache[key]
+ except KeyError:
+ self.__class__ = self._selectsubclass(key)
+ return self
+
+ def __getattr__(self, attr):
+ # attributes not found in the normal hierarchy rooted on View
+ # are looked up in the object's real class
+ return getattr(self.__obj__, attr)
+
+ def __viewkey__(self):
+ return self.__obj__.__class__
+
+ def __matchkey__(self, key, subclasses):
+ if inspect.isclass(key):
+ keys = inspect.getmro(key)
+ else:
+ keys = [key]
+ for key in keys:
+ result = [C for C in subclasses if key in C.__view__]
+ if result:
+ return result
+ return []
+
+ def _selectsubclass(self, key):
+ subclasses = list(enumsubclasses(self.__rootclass__))
+ for C in subclasses:
+ if not isinstance(C.__view__, tuple):
+ C.__view__ = (C.__view__,)
+ choices = self.__matchkey__(key, subclasses)
+ if not choices:
+ return self.__rootclass__
+ elif len(choices) == 1:
+ return choices[0]
+ else:
+ # combine the multiple choices
+ return type('?', tuple(choices), {})
+
+ def __repr__(self):
+ return '%s(%r)' % (self.__rootclass__.__name__, self.__obj__)
+
+
+def enumsubclasses(cls):
+ for subcls in cls.__subclasses__():
+ for subsubclass in enumsubclasses(subcls):
+ yield subsubclass
+ yield cls
+
+
+class Interpretable(View):
+ """A parse tree node with a few extra methods."""
+ explanation = None
+
+ def is_builtin(self, frame):
+ return False
+
+ def eval(self, frame):
+ # fall-back for unknown expression nodes
+ try:
+ expr = ast.Expression(self.__obj__)
+ expr.filename = '<eval>'
+ self.__obj__.filename = '<eval>'
+ co = pycodegen.ExpressionCodeGenerator(expr).getCode()
+ result = frame.eval(co)
+ except passthroughex:
+ raise
+ except:
+ raise Failure(self)
+ self.result = result
+ self.explanation = self.explanation or frame.repr(self.result)
+
+ def run(self, frame):
+ # fall-back for unknown statement nodes
+ try:
+ expr = ast.Module(None, ast.Stmt([self.__obj__]))
+ expr.filename = '<run>'
+ co = pycodegen.ModuleCodeGenerator(expr).getCode()
+ frame.exec_(co)
+ except passthroughex:
+ raise
+ except:
+ raise Failure(self)
+
+ def nice_explanation(self):
+ return _format_explanation(self.explanation)
+
+
+class Name(Interpretable):
+ __view__ = ast.Name
+
+ def is_local(self, frame):
+ source = '%r in locals() is not globals()' % self.name
+ try:
+ return frame.is_true(frame.eval(source))
+ except passthroughex:
+ raise
+ except:
+ return False
+
+ def is_global(self, frame):
+ source = '%r in globals()' % self.name
+ try:
+ return frame.is_true(frame.eval(source))
+ except passthroughex:
+ raise
+ except:
+ return False
+
+ def is_builtin(self, frame):
+ source = '%r not in locals() and %r not in globals()' % (
+ self.name, self.name)
+ try:
+ return frame.is_true(frame.eval(source))
+ except passthroughex:
+ raise
+ except:
+ return False
+
+ def eval(self, frame):
+ super(Name, self).eval(frame)
+ if not self.is_local(frame):
+ self.explanation = self.name
+
+class Compare(Interpretable):
+ __view__ = ast.Compare
+
+ def eval(self, frame):
+ expr = Interpretable(self.expr)
+ expr.eval(frame)
+ for operation, expr2 in self.ops:
+ if hasattr(self, 'result'):
+ # shortcutting in chained expressions
+ if not frame.is_true(self.result):
+ break
+ expr2 = Interpretable(expr2)
+ expr2.eval(frame)
+ self.explanation = "%s %s %s" % (
+ expr.explanation, operation, expr2.explanation)
+ source = "__exprinfo_left %s __exprinfo_right" % operation
+ try:
+ self.result = frame.eval(source,
+ __exprinfo_left=expr.result,
+ __exprinfo_right=expr2.result)
+ except passthroughex:
+ raise
+ except:
+ raise Failure(self)
+ expr = expr2
+
+class And(Interpretable):
+ __view__ = ast.And
+
+ def eval(self, frame):
+ explanations = []
+ for expr in self.nodes:
+ expr = Interpretable(expr)
+ expr.eval(frame)
+ explanations.append(expr.explanation)
+ self.result = expr.result
+ if not frame.is_true(expr.result):
+ break
+ self.explanation = '(' + ' and '.join(explanations) + ')'
+
+class Or(Interpretable):
+ __view__ = ast.Or
+
+ def eval(self, frame):
+ explanations = []
+ for expr in self.nodes:
+ expr = Interpretable(expr)
+ expr.eval(frame)
+ explanations.append(expr.explanation)
+ self.result = expr.result
+ if frame.is_true(expr.result):
+ break
+ self.explanation = '(' + ' or '.join(explanations) + ')'
+
+
+# == Unary operations ==
+keepalive = []
+for astclass, astpattern in {
+ ast.Not : 'not __exprinfo_expr',
+ ast.Invert : '(~__exprinfo_expr)',
+ }.items():
+
+ class UnaryArith(Interpretable):
+ __view__ = astclass
+
+ def eval(self, frame, astpattern=astpattern):
+ expr = Interpretable(self.expr)
+ expr.eval(frame)
+ self.explanation = astpattern.replace('__exprinfo_expr',
+ expr.explanation)
+ try:
+ self.result = frame.eval(astpattern,
+ __exprinfo_expr=expr.result)
+ except passthroughex:
+ raise
+ except:
+ raise Failure(self)
+
+ keepalive.append(UnaryArith)
+
+# == Binary operations ==
+for astclass, astpattern in {
+ ast.Add : '(__exprinfo_left + __exprinfo_right)',
+ ast.Sub : '(__exprinfo_left - __exprinfo_right)',
+ ast.Mul : '(__exprinfo_left * __exprinfo_right)',
+ ast.Div : '(__exprinfo_left / __exprinfo_right)',
+ ast.Mod : '(__exprinfo_left % __exprinfo_right)',
+ ast.Power : '(__exprinfo_left ** __exprinfo_right)',
+ }.items():
+
+ class BinaryArith(Interpretable):
+ __view__ = astclass
+
+ def eval(self, frame, astpattern=astpattern):
+ left = Interpretable(self.left)
+ left.eval(frame)
+ right = Interpretable(self.right)
+ right.eval(frame)
+ self.explanation = (astpattern
+ .replace('__exprinfo_left', left .explanation)
+ .replace('__exprinfo_right', right.explanation))
+ try:
+ self.result = frame.eval(astpattern,
+ __exprinfo_left=left.result,
+ __exprinfo_right=right.result)
+ except passthroughex:
+ raise
+ except:
+ raise Failure(self)
+
+ keepalive.append(BinaryArith)
+
+
+class CallFunc(Interpretable):
+ __view__ = ast.CallFunc
+
+ def is_bool(self, frame):
+ source = 'isinstance(__exprinfo_value, bool)'
+ try:
+ return frame.is_true(frame.eval(source,
+ __exprinfo_value=self.result))
+ except passthroughex:
+ raise
+ except:
+ return False
+
+ def eval(self, frame):
+ node = Interpretable(self.node)
+ node.eval(frame)
+ explanations = []
+ vars = {'__exprinfo_fn': node.result}
+ source = '__exprinfo_fn('
+ for a in self.args:
+ if isinstance(a, ast.Keyword):
+ keyword = a.name
+ a = a.expr
+ else:
+ keyword = None
+ a = Interpretable(a)
+ a.eval(frame)
+ argname = '__exprinfo_%d' % len(vars)
+ vars[argname] = a.result
+ if keyword is None:
+ source += argname + ','
+ explanations.append(a.explanation)
+ else:
+ source += '%s=%s,' % (keyword, argname)
+ explanations.append('%s=%s' % (keyword, a.explanation))
+ if self.star_args:
+ star_args = Interpretable(self.star_args)
+ star_args.eval(frame)
+ argname = '__exprinfo_star'
+ vars[argname] = star_args.result
+ source += '*' + argname + ','
+ explanations.append('*' + star_args.explanation)
+ if self.dstar_args:
+ dstar_args = Interpretable(self.dstar_args)
+ dstar_args.eval(frame)
+ argname = '__exprinfo_kwds'
+ vars[argname] = dstar_args.result
+ source += '**' + argname + ','
+ explanations.append('**' + dstar_args.explanation)
+ self.explanation = "%s(%s)" % (
+ node.explanation, ', '.join(explanations))
+ if source.endswith(','):
+ source = source[:-1]
+ source += ')'
+ try:
+ self.result = frame.eval(source, **vars)
+ except passthroughex:
+ raise
+ except:
+ raise Failure(self)
+ if not node.is_builtin(frame) or not self.is_bool(frame):
+ r = frame.repr(self.result)
+ self.explanation = '%s\n{%s = %s\n}' % (r, r, self.explanation)
+
+class Getattr(Interpretable):
+ __view__ = ast.Getattr
+
+ def eval(self, frame):
+ expr = Interpretable(self.expr)
+ expr.eval(frame)
+ source = '__exprinfo_expr.%s' % self.attrname
+ try:
+ self.result = frame.eval(source, __exprinfo_expr=expr.result)
+ except passthroughex:
+ raise
+ except:
+ raise Failure(self)
+ self.explanation = '%s.%s' % (expr.explanation, self.attrname)
+ # if the attribute comes from the instance, its value is interesting
+ source = ('hasattr(__exprinfo_expr, "__dict__") and '
+ '%r in __exprinfo_expr.__dict__' % self.attrname)
+ try:
+ from_instance = frame.is_true(
+ frame.eval(source, __exprinfo_expr=expr.result))
+ except passthroughex:
+ raise
+ except:
+ from_instance = True
+ if from_instance:
+ r = frame.repr(self.result)
+ self.explanation = '%s\n{%s = %s\n}' % (r, r, self.explanation)
+
+# == Re-interpretation of full statements ==
+
+class Assert(Interpretable):
+ __view__ = ast.Assert
+
+ def run(self, frame):
+ test = Interpretable(self.test)
+ test.eval(frame)
+ # simplify 'assert False where False = ...'
+ if (test.explanation.startswith('False\n{False = ') and
+ test.explanation.endswith('\n}')):
+ test.explanation = test.explanation[15:-2]
+ # print the result as 'assert <explanation>'
+ self.result = test.result
+ self.explanation = 'assert ' + test.explanation
+ if not frame.is_true(test.result):
+ try:
+ raise BuiltinAssertionError
+ except passthroughex:
+ raise
+ except:
+ raise Failure(self)
+
+class Assign(Interpretable):
+ __view__ = ast.Assign
+
+ def run(self, frame):
+ expr = Interpretable(self.expr)
+ expr.eval(frame)
+ self.result = expr.result
+ self.explanation = '... = ' + expr.explanation
+ # fall-back-run the rest of the assignment
+ ass = ast.Assign(self.nodes, ast.Name('__exprinfo_expr'))
+ mod = ast.Module(None, ast.Stmt([ass]))
+ mod.filename = '<run>'
+ co = pycodegen.ModuleCodeGenerator(mod).getCode()
+ try:
+ frame.exec_(co, __exprinfo_expr=expr.result)
+ except passthroughex:
+ raise
+ except:
+ raise Failure(self)
+
+class Discard(Interpretable):
+ __view__ = ast.Discard
+
+ def run(self, frame):
+ expr = Interpretable(self.expr)
+ expr.eval(frame)
+ self.result = expr.result
+ self.explanation = expr.explanation
+
+class Stmt(Interpretable):
+ __view__ = ast.Stmt
+
+ def run(self, frame):
+ for stmt in self.nodes:
+ stmt = Interpretable(stmt)
+ stmt.run(frame)
+
+
+def report_failure(e):
+ explanation = e.node.nice_explanation()
+ if explanation:
+ explanation = ", in: " + explanation
+ else:
+ explanation = ""
+ sys.stdout.write("%s: %s%s\n" % (e.exc.__name__, e.value, explanation))
+
+def check(s, frame=None):
+ if frame is None:
+ frame = sys._getframe(1)
+ frame = py.code.Frame(frame)
+ expr = parse(s, 'eval')
+ assert isinstance(expr, ast.Expression)
+ node = Interpretable(expr.node)
+ try:
+ node.eval(frame)
+ except passthroughex:
+ raise
+ except Failure:
+ e = sys.exc_info()[1]
+ report_failure(e)
+ else:
+ if not frame.is_true(node.result):
+ sys.stderr.write("assertion failed: %s\n" % node.nice_explanation())
+
+
+###########################################################
+# API / Entry points
+# #########################################################
+
+def interpret(source, frame, should_fail=False):
+ module = Interpretable(parse(source, 'exec').node)
+ #print "got module", module
+ if isinstance(frame, py.std.types.FrameType):
+ frame = py.code.Frame(frame)
+ try:
+ module.run(frame)
+ except Failure:
+ e = sys.exc_info()[1]
+ return getfailure(e)
+ except passthroughex:
+ raise
+ except:
+ import traceback
+ traceback.print_exc()
+ if should_fail:
+ return ("(assertion failed, but when it was re-run for "
+ "printing intermediate values, it did not fail. Suggestions: "
+ "compute assert expression before the assert or use --nomagic)")
+ else:
+ return None
+
+def getmsg(excinfo):
+ if isinstance(excinfo, tuple):
+ excinfo = py.code.ExceptionInfo(excinfo)
+ #frame, line = gettbline(tb)
+ #frame = py.code.Frame(frame)
+ #return interpret(line, frame)
+
+ tb = excinfo.traceback[-1]
+ source = str(tb.statement).strip()
+ x = interpret(source, tb.frame, should_fail=True)
+ if not isinstance(x, str):
+ raise TypeError("interpret returned non-string %r" % (x,))
+ return x
+
+def getfailure(e):
+ explanation = e.node.nice_explanation()
+ if str(e.value):
+ lines = explanation.split('\n')
+ lines[0] += " << %s" % (e.value,)
+ explanation = '\n'.join(lines)
+ text = "%s: %s" % (e.exc.__name__, explanation)
+ if text.startswith('AssertionError: assert '):
+ text = text[16:]
+ return text
+
+def run(s, frame=None):
+ if frame is None:
+ frame = sys._getframe(1)
+ frame = py.code.Frame(frame)
+ module = Interpretable(parse(s, 'exec').node)
+ try:
+ module.run(frame)
+ except Failure:
+ e = sys.exc_info()[1]
+ report_failure(e)
+
+
+if __name__ == '__main__':
+ # example:
+ def f():
+ return 5
+ def g():
+ return 3
+ def h(x):
+ return 'never'
+ check("f() * g() == 5")
+ check("not f()")
+ check("not (f() and g() or 0)")
+ check("f() == g()")
+ i = 4
+ check("i == f()")
+ check("len(f()) == 0")
+ check("isinstance(2+3+4, float)")
+
+ run("x = i")
+ check("x == 5")
+
+ run("assert not f(), 'oops'")
+ run("a, b, c = 1, 2")
+ run("a, b, c = f()")
+
+ check("max([f(),g()]) == 4")
+ check("'hello'[g()] == 'h'")
+ run("'guk%d' % h(f())")
diff --git a/lib/spack/external/py/_code/_py2traceback.py b/lib/spack/external/py/_code/_py2traceback.py
new file mode 100644
index 0000000000..d65e27cb73
--- /dev/null
+++ b/lib/spack/external/py/_code/_py2traceback.py
@@ -0,0 +1,79 @@
+# copied from python-2.7.3's traceback.py
+# CHANGES:
+# - some_str is replaced, trying to create unicode strings
+#
+import types
+
+def format_exception_only(etype, value):
+ """Format the exception part of a traceback.
+
+ The arguments are the exception type and value such as given by
+ sys.last_type and sys.last_value. The return value is a list of
+ strings, each ending in a newline.
+
+ Normally, the list contains a single string; however, for
+ SyntaxError exceptions, it contains several lines that (when
+ printed) display detailed information about where the syntax
+ error occurred.
+
+ The message indicating which exception occurred is always the last
+ string in the list.
+
+ """
+
+ # An instance should not have a meaningful value parameter, but
+ # sometimes does, particularly for string exceptions, such as
+ # >>> raise string1, string2 # deprecated
+ #
+ # Clear these out first because issubtype(string1, SyntaxError)
+ # would throw another exception and mask the original problem.
+ if (isinstance(etype, BaseException) or
+ isinstance(etype, types.InstanceType) or
+ etype is None or type(etype) is str):
+ return [_format_final_exc_line(etype, value)]
+
+ stype = etype.__name__
+
+ if not issubclass(etype, SyntaxError):
+ return [_format_final_exc_line(stype, value)]
+
+ # It was a syntax error; show exactly where the problem was found.
+ lines = []
+ try:
+ msg, (filename, lineno, offset, badline) = value.args
+ except Exception:
+ pass
+ else:
+ filename = filename or "<string>"
+ lines.append(' File "%s", line %d\n' % (filename, lineno))
+ if badline is not None:
+ lines.append(' %s\n' % badline.strip())
+ if offset is not None:
+ caretspace = badline.rstrip('\n')[:offset].lstrip()
+ # non-space whitespace (likes tabs) must be kept for alignment
+ caretspace = ((c.isspace() and c or ' ') for c in caretspace)
+ # only three spaces to account for offset1 == pos 0
+ lines.append(' %s^\n' % ''.join(caretspace))
+ value = msg
+
+ lines.append(_format_final_exc_line(stype, value))
+ return lines
+
+def _format_final_exc_line(etype, value):
+ """Return a list of a single line -- normal case for format_exception_only"""
+ valuestr = _some_str(value)
+ if value is None or not valuestr:
+ line = "%s\n" % etype
+ else:
+ line = "%s: %s\n" % (etype, valuestr)
+ return line
+
+def _some_str(value):
+ try:
+ return unicode(value)
+ except Exception:
+ try:
+ return str(value)
+ except Exception:
+ pass
+ return '<unprintable %s object>' % type(value).__name__
diff --git a/lib/spack/external/py/_code/assertion.py b/lib/spack/external/py/_code/assertion.py
new file mode 100644
index 0000000000..4ce80c75b1
--- /dev/null
+++ b/lib/spack/external/py/_code/assertion.py
@@ -0,0 +1,94 @@
+import sys
+import py
+
+BuiltinAssertionError = py.builtin.builtins.AssertionError
+
+_reprcompare = None # if set, will be called by assert reinterp for comparison ops
+
+def _format_explanation(explanation):
+ """This formats an explanation
+
+ Normally all embedded newlines are escaped, however there are
+ three exceptions: \n{, \n} and \n~. The first two are intended
+ cover nested explanations, see function and attribute explanations
+ for examples (.visit_Call(), visit_Attribute()). The last one is
+ for when one explanation needs to span multiple lines, e.g. when
+ displaying diffs.
+ """
+ raw_lines = (explanation or '').split('\n')
+ # escape newlines not followed by {, } and ~
+ lines = [raw_lines[0]]
+ for l in raw_lines[1:]:
+ if l.startswith('{') or l.startswith('}') or l.startswith('~'):
+ lines.append(l)
+ else:
+ lines[-1] += '\\n' + l
+
+ result = lines[:1]
+ stack = [0]
+ stackcnt = [0]
+ for line in lines[1:]:
+ if line.startswith('{'):
+ if stackcnt[-1]:
+ s = 'and '
+ else:
+ s = 'where '
+ stack.append(len(result))
+ stackcnt[-1] += 1
+ stackcnt.append(0)
+ result.append(' +' + ' '*(len(stack)-1) + s + line[1:])
+ elif line.startswith('}'):
+ assert line.startswith('}')
+ stack.pop()
+ stackcnt.pop()
+ result[stack[-1]] += line[1:]
+ else:
+ assert line.startswith('~')
+ result.append(' '*len(stack) + line[1:])
+ assert len(stack) == 1
+ return '\n'.join(result)
+
+
+class AssertionError(BuiltinAssertionError):
+ def __init__(self, *args):
+ BuiltinAssertionError.__init__(self, *args)
+ if args:
+ try:
+ self.msg = str(args[0])
+ except py.builtin._sysex:
+ raise
+ except:
+ self.msg = "<[broken __repr__] %s at %0xd>" %(
+ args[0].__class__, id(args[0]))
+ else:
+ f = py.code.Frame(sys._getframe(1))
+ try:
+ source = f.code.fullsource
+ if source is not None:
+ try:
+ source = source.getstatement(f.lineno, assertion=True)
+ except IndexError:
+ source = None
+ else:
+ source = str(source.deindent()).strip()
+ except py.error.ENOENT:
+ source = None
+ # this can also occur during reinterpretation, when the
+ # co_filename is set to "<run>".
+ if source:
+ self.msg = reinterpret(source, f, should_fail=True)
+ else:
+ self.msg = "<could not determine information>"
+ if not self.args:
+ self.args = (self.msg,)
+
+if sys.version_info > (3, 0):
+ AssertionError.__module__ = "builtins"
+ reinterpret_old = "old reinterpretation not available for py3"
+else:
+ from py._code._assertionold import interpret as reinterpret_old
+if sys.version_info >= (2, 6) or (sys.platform.startswith("java")):
+ from py._code._assertionnew import interpret as reinterpret
+else:
+ reinterpret = reinterpret_old
+
diff --git a/lib/spack/external/py/_code/code.py b/lib/spack/external/py/_code/code.py
new file mode 100644
index 0000000000..f14c562a29
--- /dev/null
+++ b/lib/spack/external/py/_code/code.py
@@ -0,0 +1,787 @@
+import py
+import sys
+from inspect import CO_VARARGS, CO_VARKEYWORDS
+
+builtin_repr = repr
+
+reprlib = py.builtin._tryimport('repr', 'reprlib')
+
+if sys.version_info[0] >= 3:
+ from traceback import format_exception_only
+else:
+ from py._code._py2traceback import format_exception_only
+
+class Code(object):
+ """ wrapper around Python code objects """
+ def __init__(self, rawcode):
+ if not hasattr(rawcode, "co_filename"):
+ rawcode = py.code.getrawcode(rawcode)
+ try:
+ self.filename = rawcode.co_filename
+ self.firstlineno = rawcode.co_firstlineno - 1
+ self.name = rawcode.co_name
+ except AttributeError:
+ raise TypeError("not a code object: %r" %(rawcode,))
+ self.raw = rawcode
+
+ def __eq__(self, other):
+ return self.raw == other.raw
+
+ def __ne__(self, other):
+ return not self == other
+
+ @property
+ def path(self):
+ """ return a path object pointing to source code (note that it
+ might not point to an actually existing file). """
+ p = py.path.local(self.raw.co_filename)
+ # maybe don't try this checking
+ if not p.check():
+ # XXX maybe try harder like the weird logic
+ # in the standard lib [linecache.updatecache] does?
+ p = self.raw.co_filename
+ return p
+
+ @property
+ def fullsource(self):
+ """ return a py.code.Source object for the full source file of the code
+ """
+ from py._code import source
+ full, _ = source.findsource(self.raw)
+ return full
+
+ def source(self):
+ """ return a py.code.Source object for the code object's source only
+ """
+ # return source only for that part of code
+ return py.code.Source(self.raw)
+
+ def getargs(self, var=False):
+ """ return a tuple with the argument names for the code object
+
+ if 'var' is set True also return the names of the variable and
+ keyword arguments when present
+ """
+ # handfull shortcut for getting args
+ raw = self.raw
+ argcount = raw.co_argcount
+ if var:
+ argcount += raw.co_flags & CO_VARARGS
+ argcount += raw.co_flags & CO_VARKEYWORDS
+ return raw.co_varnames[:argcount]
+
+class Frame(object):
+ """Wrapper around a Python frame holding f_locals and f_globals
+ in which expressions can be evaluated."""
+
+ def __init__(self, frame):
+ self.lineno = frame.f_lineno - 1
+ self.f_globals = frame.f_globals
+ self.f_locals = frame.f_locals
+ self.raw = frame
+ self.code = py.code.Code(frame.f_code)
+
+ @property
+ def statement(self):
+ """ statement this frame is at """
+ if self.code.fullsource is None:
+ return py.code.Source("")
+ return self.code.fullsource.getstatement(self.lineno)
+
+ def eval(self, code, **vars):
+ """ evaluate 'code' in the frame
+
+ 'vars' are optional additional local variables
+
+ returns the result of the evaluation
+ """
+ f_locals = self.f_locals.copy()
+ f_locals.update(vars)
+ return eval(code, self.f_globals, f_locals)
+
+ def exec_(self, code, **vars):
+ """ exec 'code' in the frame
+
+ 'vars' are optiona; additional local variables
+ """
+ f_locals = self.f_locals.copy()
+ f_locals.update(vars)
+ py.builtin.exec_(code, self.f_globals, f_locals )
+
+ def repr(self, object):
+ """ return a 'safe' (non-recursive, one-line) string repr for 'object'
+ """
+ return py.io.saferepr(object)
+
+ def is_true(self, object):
+ return object
+
+ def getargs(self, var=False):
+ """ return a list of tuples (name, value) for all arguments
+
+ if 'var' is set True also include the variable and keyword
+ arguments when present
+ """
+ retval = []
+ for arg in self.code.getargs(var):
+ try:
+ retval.append((arg, self.f_locals[arg]))
+ except KeyError:
+ pass # this can occur when using Psyco
+ return retval
+
+class TracebackEntry(object):
+ """ a single entry in a traceback """
+
+ _repr_style = None
+ exprinfo = None
+
+ def __init__(self, rawentry):
+ self._rawentry = rawentry
+ self.lineno = rawentry.tb_lineno - 1
+
+ def set_repr_style(self, mode):
+ assert mode in ("short", "long")
+ self._repr_style = mode
+
+ @property
+ def frame(self):
+ return py.code.Frame(self._rawentry.tb_frame)
+
+ @property
+ def relline(self):
+ return self.lineno - self.frame.code.firstlineno
+
+ def __repr__(self):
+ return "<TracebackEntry %s:%d>" %(self.frame.code.path, self.lineno+1)
+
+ @property
+ def statement(self):
+ """ py.code.Source object for the current statement """
+ source = self.frame.code.fullsource
+ return source.getstatement(self.lineno)
+
+ @property
+ def path(self):
+ """ path to the source code """
+ return self.frame.code.path
+
+ def getlocals(self):
+ return self.frame.f_locals
+ locals = property(getlocals, None, None, "locals of underlaying frame")
+
+ def reinterpret(self):
+ """Reinterpret the failing statement and returns a detailed information
+ about what operations are performed."""
+ if self.exprinfo is None:
+ source = str(self.statement).strip()
+ x = py.code._reinterpret(source, self.frame, should_fail=True)
+ if not isinstance(x, str):
+ raise TypeError("interpret returned non-string %r" % (x,))
+ self.exprinfo = x
+ return self.exprinfo
+
+ def getfirstlinesource(self):
+ # on Jython this firstlineno can be -1 apparently
+ return max(self.frame.code.firstlineno, 0)
+
+ def getsource(self, astcache=None):
+ """ return failing source code. """
+ # we use the passed in astcache to not reparse asttrees
+ # within exception info printing
+ from py._code.source import getstatementrange_ast
+ source = self.frame.code.fullsource
+ if source is None:
+ return None
+ key = astnode = None
+ if astcache is not None:
+ key = self.frame.code.path
+ if key is not None:
+ astnode = astcache.get(key, None)
+ start = self.getfirstlinesource()
+ try:
+ astnode, _, end = getstatementrange_ast(self.lineno, source,
+ astnode=astnode)
+ except SyntaxError:
+ end = self.lineno + 1
+ else:
+ if key is not None:
+ astcache[key] = astnode
+ return source[start:end]
+
+ source = property(getsource)
+
+ def ishidden(self):
+ """ return True if the current frame has a var __tracebackhide__
+ resolving to True
+
+ mostly for internal use
+ """
+ try:
+ return self.frame.f_locals['__tracebackhide__']
+ except KeyError:
+ try:
+ return self.frame.f_globals['__tracebackhide__']
+ except KeyError:
+ return False
+
+ def __str__(self):
+ try:
+ fn = str(self.path)
+ except py.error.Error:
+ fn = '???'
+ name = self.frame.code.name
+ try:
+ line = str(self.statement).lstrip()
+ except KeyboardInterrupt:
+ raise
+ except:
+ line = "???"
+ return " File %r:%d in %s\n %s\n" %(fn, self.lineno+1, name, line)
+
+ def name(self):
+ return self.frame.code.raw.co_name
+ name = property(name, None, None, "co_name of underlaying code")
+
+class Traceback(list):
+ """ Traceback objects encapsulate and offer higher level
+ access to Traceback entries.
+ """
+ Entry = TracebackEntry
+ def __init__(self, tb):
+ """ initialize from given python traceback object. """
+ if hasattr(tb, 'tb_next'):
+ def f(cur):
+ while cur is not None:
+ yield self.Entry(cur)
+ cur = cur.tb_next
+ list.__init__(self, f(tb))
+ else:
+ list.__init__(self, tb)
+
+ def cut(self, path=None, lineno=None, firstlineno=None, excludepath=None):
+ """ return a Traceback instance wrapping part of this Traceback
+
+ by provding any combination of path, lineno and firstlineno, the
+ first frame to start the to-be-returned traceback is determined
+
+ this allows cutting the first part of a Traceback instance e.g.
+ for formatting reasons (removing some uninteresting bits that deal
+ with handling of the exception/traceback)
+ """
+ for x in self:
+ code = x.frame.code
+ codepath = code.path
+ if ((path is None or codepath == path) and
+ (excludepath is None or not hasattr(codepath, 'relto') or
+ not codepath.relto(excludepath)) and
+ (lineno is None or x.lineno == lineno) and
+ (firstlineno is None or x.frame.code.firstlineno == firstlineno)):
+ return Traceback(x._rawentry)
+ return self
+
+ def __getitem__(self, key):
+ val = super(Traceback, self).__getitem__(key)
+ if isinstance(key, type(slice(0))):
+ val = self.__class__(val)
+ return val
+
+ def filter(self, fn=lambda x: not x.ishidden()):
+ """ return a Traceback instance with certain items removed
+
+ fn is a function that gets a single argument, a TracebackItem
+ instance, and should return True when the item should be added
+ to the Traceback, False when not
+
+ by default this removes all the TracebackItems which are hidden
+ (see ishidden() above)
+ """
+ return Traceback(filter(fn, self))
+
+ def getcrashentry(self):
+ """ return last non-hidden traceback entry that lead
+ to the exception of a traceback.
+ """
+ for i in range(-1, -len(self)-1, -1):
+ entry = self[i]
+ if not entry.ishidden():
+ return entry
+ return self[-1]
+
+ def recursionindex(self):
+ """ return the index of the frame/TracebackItem where recursion
+ originates if appropriate, None if no recursion occurred
+ """
+ cache = {}
+ for i, entry in enumerate(self):
+ # id for the code.raw is needed to work around
+ # the strange metaprogramming in the decorator lib from pypi
+ # which generates code objects that have hash/value equality
+ #XXX needs a test
+ key = entry.frame.code.path, id(entry.frame.code.raw), entry.lineno
+ #print "checking for recursion at", key
+ l = cache.setdefault(key, [])
+ if l:
+ f = entry.frame
+ loc = f.f_locals
+ for otherloc in l:
+ if f.is_true(f.eval(co_equal,
+ __recursioncache_locals_1=loc,
+ __recursioncache_locals_2=otherloc)):
+ return i
+ l.append(entry.frame.f_locals)
+ return None
+
+co_equal = compile('__recursioncache_locals_1 == __recursioncache_locals_2',
+ '?', 'eval')
+
+class ExceptionInfo(object):
+ """ wraps sys.exc_info() objects and offers
+ help for navigating the traceback.
+ """
+ _striptext = ''
+ def __init__(self, tup=None, exprinfo=None):
+ if tup is None:
+ tup = sys.exc_info()
+ if exprinfo is None and isinstance(tup[1], AssertionError):
+ exprinfo = getattr(tup[1], 'msg', None)
+ if exprinfo is None:
+ exprinfo = str(tup[1])
+ if exprinfo and exprinfo.startswith('assert '):
+ self._striptext = 'AssertionError: '
+ self._excinfo = tup
+ #: the exception class
+ self.type = tup[0]
+ #: the exception instance
+ self.value = tup[1]
+ #: the exception raw traceback
+ self.tb = tup[2]
+ #: the exception type name
+ self.typename = self.type.__name__
+ #: the exception traceback (py.code.Traceback instance)
+ self.traceback = py.code.Traceback(self.tb)
+
+ def __repr__(self):
+ return "<ExceptionInfo %s tblen=%d>" % (self.typename, len(self.traceback))
+
+ def exconly(self, tryshort=False):
+ """ return the exception as a string
+
+ when 'tryshort' resolves to True, and the exception is a
+ py.code._AssertionError, only the actual exception part of
+ the exception representation is returned (so 'AssertionError: ' is
+ removed from the beginning)
+ """
+ lines = format_exception_only(self.type, self.value)
+ text = ''.join(lines)
+ text = text.rstrip()
+ if tryshort:
+ if text.startswith(self._striptext):
+ text = text[len(self._striptext):]
+ return text
+
+ def errisinstance(self, exc):
+ """ return True if the exception is an instance of exc """
+ return isinstance(self.value, exc)
+
+ def _getreprcrash(self):
+ exconly = self.exconly(tryshort=True)
+ entry = self.traceback.getcrashentry()
+ path, lineno = entry.frame.code.raw.co_filename, entry.lineno
+ return ReprFileLocation(path, lineno+1, exconly)
+
+ def getrepr(self, showlocals=False, style="long",
+ abspath=False, tbfilter=True, funcargs=False):
+ """ return str()able representation of this exception info.
+ showlocals: show locals per traceback entry
+ style: long|short|no|native traceback style
+ tbfilter: hide entries (where __tracebackhide__ is true)
+
+ in case of style==native, tbfilter and showlocals is ignored.
+ """
+ if style == 'native':
+ return ReprExceptionInfo(ReprTracebackNative(
+ py.std.traceback.format_exception(
+ self.type,
+ self.value,
+ self.traceback[0]._rawentry,
+ )), self._getreprcrash())
+
+ fmt = FormattedExcinfo(showlocals=showlocals, style=style,
+ abspath=abspath, tbfilter=tbfilter, funcargs=funcargs)
+ return fmt.repr_excinfo(self)
+
+ def __str__(self):
+ entry = self.traceback[-1]
+ loc = ReprFileLocation(entry.path, entry.lineno + 1, self.exconly())
+ return str(loc)
+
+ def __unicode__(self):
+ entry = self.traceback[-1]
+ loc = ReprFileLocation(entry.path, entry.lineno + 1, self.exconly())
+ return unicode(loc)
+
+
+class FormattedExcinfo(object):
+ """ presenting information about failing Functions and Generators. """
+ # for traceback entries
+ flow_marker = ">"
+ fail_marker = "E"
+
+ def __init__(self, showlocals=False, style="long", abspath=True, tbfilter=True, funcargs=False):
+ self.showlocals = showlocals
+ self.style = style
+ self.tbfilter = tbfilter
+ self.funcargs = funcargs
+ self.abspath = abspath
+ self.astcache = {}
+
+ def _getindent(self, source):
+ # figure out indent for given source
+ try:
+ s = str(source.getstatement(len(source)-1))
+ except KeyboardInterrupt:
+ raise
+ except:
+ try:
+ s = str(source[-1])
+ except KeyboardInterrupt:
+ raise
+ except:
+ return 0
+ return 4 + (len(s) - len(s.lstrip()))
+
+ def _getentrysource(self, entry):
+ source = entry.getsource(self.astcache)
+ if source is not None:
+ source = source.deindent()
+ return source
+
+ def _saferepr(self, obj):
+ return py.io.saferepr(obj)
+
+ def repr_args(self, entry):
+ if self.funcargs:
+ args = []
+ for argname, argvalue in entry.frame.getargs(var=True):
+ args.append((argname, self._saferepr(argvalue)))
+ return ReprFuncArgs(args)
+
+ def get_source(self, source, line_index=-1, excinfo=None, short=False):
+ """ return formatted and marked up source lines. """
+ lines = []
+ if source is None or line_index >= len(source.lines):
+ source = py.code.Source("???")
+ line_index = 0
+ if line_index < 0:
+ line_index += len(source)
+ space_prefix = " "
+ if short:
+ lines.append(space_prefix + source.lines[line_index].strip())
+ else:
+ for line in source.lines[:line_index]:
+ lines.append(space_prefix + line)
+ lines.append(self.flow_marker + " " + source.lines[line_index])
+ for line in source.lines[line_index+1:]:
+ lines.append(space_prefix + line)
+ if excinfo is not None:
+ indent = 4 if short else self._getindent(source)
+ lines.extend(self.get_exconly(excinfo, indent=indent, markall=True))
+ return lines
+
+ def get_exconly(self, excinfo, indent=4, markall=False):
+ lines = []
+ indent = " " * indent
+ # get the real exception information out
+ exlines = excinfo.exconly(tryshort=True).split('\n')
+ failindent = self.fail_marker + indent[1:]
+ for line in exlines:
+ lines.append(failindent + line)
+ if not markall:
+ failindent = indent
+ return lines
+
+ def repr_locals(self, locals):
+ if self.showlocals:
+ lines = []
+ keys = [loc for loc in locals if loc[0] != "@"]
+ keys.sort()
+ for name in keys:
+ value = locals[name]
+ if name == '__builtins__':
+ lines.append("__builtins__ = <builtins>")
+ else:
+ # This formatting could all be handled by the
+ # _repr() function, which is only reprlib.Repr in
+ # disguise, so is very configurable.
+ str_repr = self._saferepr(value)
+ #if len(str_repr) < 70 or not isinstance(value,
+ # (list, tuple, dict)):
+ lines.append("%-10s = %s" %(name, str_repr))
+ #else:
+ # self._line("%-10s =\\" % (name,))
+ # # XXX
+ # py.std.pprint.pprint(value, stream=self.excinfowriter)
+ return ReprLocals(lines)
+
+ def repr_traceback_entry(self, entry, excinfo=None):
+ source = self._getentrysource(entry)
+ if source is None:
+ source = py.code.Source("???")
+ line_index = 0
+ else:
+ # entry.getfirstlinesource() can be -1, should be 0 on jython
+ line_index = entry.lineno - max(entry.getfirstlinesource(), 0)
+
+ lines = []
+ style = entry._repr_style
+ if style is None:
+ style = self.style
+ if style in ("short", "long"):
+ short = style == "short"
+ reprargs = self.repr_args(entry) if not short else None
+ s = self.get_source(source, line_index, excinfo, short=short)
+ lines.extend(s)
+ if short:
+ message = "in %s" %(entry.name)
+ else:
+ message = excinfo and excinfo.typename or ""
+ path = self._makepath(entry.path)
+ filelocrepr = ReprFileLocation(path, entry.lineno+1, message)
+ localsrepr = None
+ if not short:
+ localsrepr = self.repr_locals(entry.locals)
+ return ReprEntry(lines, reprargs, localsrepr, filelocrepr, style)
+ if excinfo:
+ lines.extend(self.get_exconly(excinfo, indent=4))
+ return ReprEntry(lines, None, None, None, style)
+
+ def _makepath(self, path):
+ if not self.abspath:
+ try:
+ np = py.path.local().bestrelpath(path)
+ except OSError:
+ return path
+ if len(np) < len(str(path)):
+ path = np
+ return path
+
+ def repr_traceback(self, excinfo):
+ traceback = excinfo.traceback
+ if self.tbfilter:
+ traceback = traceback.filter()
+ recursionindex = None
+ if excinfo.errisinstance(RuntimeError):
+ if "maximum recursion depth exceeded" in str(excinfo.value):
+ recursionindex = traceback.recursionindex()
+ last = traceback[-1]
+ entries = []
+ extraline = None
+ for index, entry in enumerate(traceback):
+ einfo = (last == entry) and excinfo or None
+ reprentry = self.repr_traceback_entry(entry, einfo)
+ entries.append(reprentry)
+ if index == recursionindex:
+ extraline = "!!! Recursion detected (same locals & position)"
+ break
+ return ReprTraceback(entries, extraline, style=self.style)
+
+ def repr_excinfo(self, excinfo):
+ reprtraceback = self.repr_traceback(excinfo)
+ reprcrash = excinfo._getreprcrash()
+ return ReprExceptionInfo(reprtraceback, reprcrash)
+
+class TerminalRepr:
+ def __str__(self):
+ s = self.__unicode__()
+ if sys.version_info[0] < 3:
+ s = s.encode('utf-8')
+ return s
+
+ def __unicode__(self):
+ # FYI this is called from pytest-xdist's serialization of exception
+ # information.
+ io = py.io.TextIO()
+ tw = py.io.TerminalWriter(file=io)
+ self.toterminal(tw)
+ return io.getvalue().strip()
+
+ def __repr__(self):
+ return "<%s instance at %0x>" %(self.__class__, id(self))
+
+
+class ReprExceptionInfo(TerminalRepr):
+ def __init__(self, reprtraceback, reprcrash):
+ self.reprtraceback = reprtraceback
+ self.reprcrash = reprcrash
+ self.sections = []
+
+ def addsection(self, name, content, sep="-"):
+ self.sections.append((name, content, sep))
+
+ def toterminal(self, tw):
+ self.reprtraceback.toterminal(tw)
+ for name, content, sep in self.sections:
+ tw.sep(sep, name)
+ tw.line(content)
+
+class ReprTraceback(TerminalRepr):
+ entrysep = "_ "
+
+ def __init__(self, reprentries, extraline, style):
+ self.reprentries = reprentries
+ self.extraline = extraline
+ self.style = style
+
+ def toterminal(self, tw):
+ # the entries might have different styles
+ last_style = None
+ for i, entry in enumerate(self.reprentries):
+ if entry.style == "long":
+ tw.line("")
+ entry.toterminal(tw)
+ if i < len(self.reprentries) - 1:
+ next_entry = self.reprentries[i+1]
+ if entry.style == "long" or \
+ entry.style == "short" and next_entry.style == "long":
+ tw.sep(self.entrysep)
+
+ if self.extraline:
+ tw.line(self.extraline)
+
+class ReprTracebackNative(ReprTraceback):
+ def __init__(self, tblines):
+ self.style = "native"
+ self.reprentries = [ReprEntryNative(tblines)]
+ self.extraline = None
+
+class ReprEntryNative(TerminalRepr):
+ style = "native"
+
+ def __init__(self, tblines):
+ self.lines = tblines
+
+ def toterminal(self, tw):
+ tw.write("".join(self.lines))
+
+class ReprEntry(TerminalRepr):
+ localssep = "_ "
+
+ def __init__(self, lines, reprfuncargs, reprlocals, filelocrepr, style):
+ self.lines = lines
+ self.reprfuncargs = reprfuncargs
+ self.reprlocals = reprlocals
+ self.reprfileloc = filelocrepr
+ self.style = style
+
+ def toterminal(self, tw):
+ if self.style == "short":
+ self.reprfileloc.toterminal(tw)
+ for line in self.lines:
+ red = line.startswith("E ")
+ tw.line(line, bold=True, red=red)
+ #tw.line("")
+ return
+ if self.reprfuncargs:
+ self.reprfuncargs.toterminal(tw)
+ for line in self.lines:
+ red = line.startswith("E ")
+ tw.line(line, bold=True, red=red)
+ if self.reprlocals:
+ #tw.sep(self.localssep, "Locals")
+ tw.line("")
+ self.reprlocals.toterminal(tw)
+ if self.reprfileloc:
+ if self.lines:
+ tw.line("")
+ self.reprfileloc.toterminal(tw)
+
+ def __str__(self):
+ return "%s\n%s\n%s" % ("\n".join(self.lines),
+ self.reprlocals,
+ self.reprfileloc)
+
+class ReprFileLocation(TerminalRepr):
+ def __init__(self, path, lineno, message):
+ self.path = str(path)
+ self.lineno = lineno
+ self.message = message
+
+ def toterminal(self, tw):
+ # filename and lineno output for each entry,
+ # using an output format that most editors unterstand
+ msg = self.message
+ i = msg.find("\n")
+ if i != -1:
+ msg = msg[:i]
+ tw.line("%s:%s: %s" %(self.path, self.lineno, msg))
+
+class ReprLocals(TerminalRepr):
+ def __init__(self, lines):
+ self.lines = lines
+
+ def toterminal(self, tw):
+ for line in self.lines:
+ tw.line(line)
+
+class ReprFuncArgs(TerminalRepr):
+ def __init__(self, args):
+ self.args = args
+
+ def toterminal(self, tw):
+ if self.args:
+ linesofar = ""
+ for name, value in self.args:
+ ns = "%s = %s" %(name, value)
+ if len(ns) + len(linesofar) + 2 > tw.fullwidth:
+ if linesofar:
+ tw.line(linesofar)
+ linesofar = ns
+ else:
+ if linesofar:
+ linesofar += ", " + ns
+ else:
+ linesofar = ns
+ if linesofar:
+ tw.line(linesofar)
+ tw.line("")
+
+
+
+oldbuiltins = {}
+
+def patch_builtins(assertion=True, compile=True):
+ """ put compile and AssertionError builtins to Python's builtins. """
+ if assertion:
+ from py._code import assertion
+ l = oldbuiltins.setdefault('AssertionError', [])
+ l.append(py.builtin.builtins.AssertionError)
+ py.builtin.builtins.AssertionError = assertion.AssertionError
+ if compile:
+ l = oldbuiltins.setdefault('compile', [])
+ l.append(py.builtin.builtins.compile)
+ py.builtin.builtins.compile = py.code.compile
+
+def unpatch_builtins(assertion=True, compile=True):
+ """ remove compile and AssertionError builtins from Python builtins. """
+ if assertion:
+ py.builtin.builtins.AssertionError = oldbuiltins['AssertionError'].pop()
+ if compile:
+ py.builtin.builtins.compile = oldbuiltins['compile'].pop()
+
+def getrawcode(obj, trycall=True):
+ """ return code object for given function. """
+ try:
+ return obj.__code__
+ except AttributeError:
+ obj = getattr(obj, 'im_func', obj)
+ obj = getattr(obj, 'func_code', obj)
+ obj = getattr(obj, 'f_code', obj)
+ obj = getattr(obj, '__code__', obj)
+ if trycall and not hasattr(obj, 'co_firstlineno'):
+ if hasattr(obj, '__call__') and not py.std.inspect.isclass(obj):
+ x = getrawcode(obj.__call__, trycall=False)
+ if hasattr(x, 'co_firstlineno'):
+ return x
+ return obj
+
diff --git a/lib/spack/external/py/_code/source.py b/lib/spack/external/py/_code/source.py
new file mode 100644
index 0000000000..c8b668b2fb
--- /dev/null
+++ b/lib/spack/external/py/_code/source.py
@@ -0,0 +1,411 @@
+from __future__ import generators
+
+from bisect import bisect_right
+import sys
+import inspect, tokenize
+import py
+from types import ModuleType
+cpy_compile = compile
+
+try:
+ import _ast
+ from _ast import PyCF_ONLY_AST as _AST_FLAG
+except ImportError:
+ _AST_FLAG = 0
+ _ast = None
+
+
+class Source(object):
+ """ a immutable object holding a source code fragment,
+ possibly deindenting it.
+ """
+ _compilecounter = 0
+ def __init__(self, *parts, **kwargs):
+ self.lines = lines = []
+ de = kwargs.get('deindent', True)
+ rstrip = kwargs.get('rstrip', True)
+ for part in parts:
+ if not part:
+ partlines = []
+ if isinstance(part, Source):
+ partlines = part.lines
+ elif isinstance(part, (tuple, list)):
+ partlines = [x.rstrip("\n") for x in part]
+ elif isinstance(part, py.builtin._basestring):
+ partlines = part.split('\n')
+ if rstrip:
+ while partlines:
+ if partlines[-1].strip():
+ break
+ partlines.pop()
+ else:
+ partlines = getsource(part, deindent=de).lines
+ if de:
+ partlines = deindent(partlines)
+ lines.extend(partlines)
+
+ def __eq__(self, other):
+ try:
+ return self.lines == other.lines
+ except AttributeError:
+ if isinstance(other, str):
+ return str(self) == other
+ return False
+
+ def __getitem__(self, key):
+ if isinstance(key, int):
+ return self.lines[key]
+ else:
+ if key.step not in (None, 1):
+ raise IndexError("cannot slice a Source with a step")
+ return self.__getslice__(key.start, key.stop)
+
+ def __len__(self):
+ return len(self.lines)
+
+ def __getslice__(self, start, end):
+ newsource = Source()
+ newsource.lines = self.lines[start:end]
+ return newsource
+
+ def strip(self):
+ """ return new source object with trailing
+ and leading blank lines removed.
+ """
+ start, end = 0, len(self)
+ while start < end and not self.lines[start].strip():
+ start += 1
+ while end > start and not self.lines[end-1].strip():
+ end -= 1
+ source = Source()
+ source.lines[:] = self.lines[start:end]
+ return source
+
+ def putaround(self, before='', after='', indent=' ' * 4):
+ """ return a copy of the source object with
+ 'before' and 'after' wrapped around it.
+ """
+ before = Source(before)
+ after = Source(after)
+ newsource = Source()
+ lines = [ (indent + line) for line in self.lines]
+ newsource.lines = before.lines + lines + after.lines
+ return newsource
+
+ def indent(self, indent=' ' * 4):
+ """ return a copy of the source object with
+ all lines indented by the given indent-string.
+ """
+ newsource = Source()
+ newsource.lines = [(indent+line) for line in self.lines]
+ return newsource
+
+ def getstatement(self, lineno, assertion=False):
+ """ return Source statement which contains the
+ given linenumber (counted from 0).
+ """
+ start, end = self.getstatementrange(lineno, assertion)
+ return self[start:end]
+
+ def getstatementrange(self, lineno, assertion=False):
+ """ return (start, end) tuple which spans the minimal
+ statement region which containing the given lineno.
+ """
+ if not (0 <= lineno < len(self)):
+ raise IndexError("lineno out of range")
+ ast, start, end = getstatementrange_ast(lineno, self)
+ return start, end
+
+ def deindent(self, offset=None):
+ """ return a new source object deindented by offset.
+ If offset is None then guess an indentation offset from
+ the first non-blank line. Subsequent lines which have a
+ lower indentation offset will be copied verbatim as
+ they are assumed to be part of multilines.
+ """
+ # XXX maybe use the tokenizer to properly handle multiline
+ # strings etc.pp?
+ newsource = Source()
+ newsource.lines[:] = deindent(self.lines, offset)
+ return newsource
+
+ def isparseable(self, deindent=True):
+ """ return True if source is parseable, heuristically
+ deindenting it by default.
+ """
+ try:
+ import parser
+ except ImportError:
+ syntax_checker = lambda x: compile(x, 'asd', 'exec')
+ else:
+ syntax_checker = parser.suite
+
+ if deindent:
+ source = str(self.deindent())
+ else:
+ source = str(self)
+ try:
+ #compile(source+'\n', "x", "exec")
+ syntax_checker(source+'\n')
+ except KeyboardInterrupt:
+ raise
+ except Exception:
+ return False
+ else:
+ return True
+
+ def __str__(self):
+ return "\n".join(self.lines)
+
+ def compile(self, filename=None, mode='exec',
+ flag=generators.compiler_flag,
+ dont_inherit=0, _genframe=None):
+ """ return compiled code object. if filename is None
+ invent an artificial filename which displays
+ the source/line position of the caller frame.
+ """
+ if not filename or py.path.local(filename).check(file=0):
+ if _genframe is None:
+ _genframe = sys._getframe(1) # the caller
+ fn,lineno = _genframe.f_code.co_filename, _genframe.f_lineno
+ base = "<%d-codegen " % self._compilecounter
+ self.__class__._compilecounter += 1
+ if not filename:
+ filename = base + '%s:%d>' % (fn, lineno)
+ else:
+ filename = base + '%r %s:%d>' % (filename, fn, lineno)
+ source = "\n".join(self.lines) + '\n'
+ try:
+ co = cpy_compile(source, filename, mode, flag)
+ except SyntaxError:
+ ex = sys.exc_info()[1]
+ # re-represent syntax errors from parsing python strings
+ msglines = self.lines[:ex.lineno]
+ if ex.offset:
+ msglines.append(" "*ex.offset + '^')
+ msglines.append("(code was compiled probably from here: %s)" % filename)
+ newex = SyntaxError('\n'.join(msglines))
+ newex.offset = ex.offset
+ newex.lineno = ex.lineno
+ newex.text = ex.text
+ raise newex
+ else:
+ if flag & _AST_FLAG:
+ return co
+ lines = [(x + "\n") for x in self.lines]
+ py.std.linecache.cache[filename] = (1, None, lines, filename)
+ return co
+
+#
+# public API shortcut functions
+#
+
+def compile_(source, filename=None, mode='exec', flags=
+ generators.compiler_flag, dont_inherit=0):
+ """ compile the given source to a raw code object,
+ and maintain an internal cache which allows later
+ retrieval of the source code for the code object
+ and any recursively created code objects.
+ """
+ if _ast is not None and isinstance(source, _ast.AST):
+ # XXX should Source support having AST?
+ return cpy_compile(source, filename, mode, flags, dont_inherit)
+ _genframe = sys._getframe(1) # the caller
+ s = Source(source)
+ co = s.compile(filename, mode, flags, _genframe=_genframe)
+ return co
+
+
+def getfslineno(obj):
+ """ Return source location (path, lineno) for the given object.
+ If the source cannot be determined return ("", -1)
+ """
+ try:
+ code = py.code.Code(obj)
+ except TypeError:
+ try:
+ fn = (py.std.inspect.getsourcefile(obj) or
+ py.std.inspect.getfile(obj))
+ except TypeError:
+ return "", -1
+
+ fspath = fn and py.path.local(fn) or None
+ lineno = -1
+ if fspath:
+ try:
+ _, lineno = findsource(obj)
+ except IOError:
+ pass
+ else:
+ fspath = code.path
+ lineno = code.firstlineno
+ assert isinstance(lineno, int)
+ return fspath, lineno
+
+#
+# helper functions
+#
+
+def findsource(obj):
+ try:
+ sourcelines, lineno = py.std.inspect.findsource(obj)
+ except py.builtin._sysex:
+ raise
+ except:
+ return None, -1
+ source = Source()
+ source.lines = [line.rstrip() for line in sourcelines]
+ return source, lineno
+
+def getsource(obj, **kwargs):
+ obj = py.code.getrawcode(obj)
+ try:
+ strsrc = inspect.getsource(obj)
+ except IndentationError:
+ strsrc = "\"Buggy python version consider upgrading, cannot get source\""
+ assert isinstance(strsrc, str)
+ return Source(strsrc, **kwargs)
+
+def deindent(lines, offset=None):
+ if offset is None:
+ for line in lines:
+ line = line.expandtabs()
+ s = line.lstrip()
+ if s:
+ offset = len(line)-len(s)
+ break
+ else:
+ offset = 0
+ if offset == 0:
+ return list(lines)
+ newlines = []
+ def readline_generator(lines):
+ for line in lines:
+ yield line + '\n'
+ while True:
+ yield ''
+
+ it = readline_generator(lines)
+
+ try:
+ for _, _, (sline, _), (eline, _), _ in tokenize.generate_tokens(lambda: next(it)):
+ if sline > len(lines):
+ break # End of input reached
+ if sline > len(newlines):
+ line = lines[sline - 1].expandtabs()
+ if line.lstrip() and line[:offset].isspace():
+ line = line[offset:] # Deindent
+ newlines.append(line)
+
+ for i in range(sline, eline):
+ # Don't deindent continuing lines of
+ # multiline tokens (i.e. multiline strings)
+ newlines.append(lines[i])
+ except (IndentationError, tokenize.TokenError):
+ pass
+ # Add any lines we didn't see. E.g. if an exception was raised.
+ newlines.extend(lines[len(newlines):])
+ return newlines
+
+
+def get_statement_startend2(lineno, node):
+ import ast
+ # flatten all statements and except handlers into one lineno-list
+ # AST's line numbers start indexing at 1
+ l = []
+ for x in ast.walk(node):
+ if isinstance(x, _ast.stmt) or isinstance(x, _ast.ExceptHandler):
+ l.append(x.lineno - 1)
+ for name in "finalbody", "orelse":
+ val = getattr(x, name, None)
+ if val:
+ # treat the finally/orelse part as its own statement
+ l.append(val[0].lineno - 1 - 1)
+ l.sort()
+ insert_index = bisect_right(l, lineno)
+ start = l[insert_index - 1]
+ if insert_index >= len(l):
+ end = None
+ else:
+ end = l[insert_index]
+ return start, end
+
+
+def getstatementrange_ast(lineno, source, assertion=False, astnode=None):
+ if astnode is None:
+ content = str(source)
+ if sys.version_info < (2,7):
+ content += "\n"
+ try:
+ astnode = compile(content, "source", "exec", 1024) # 1024 for AST
+ except ValueError:
+ start, end = getstatementrange_old(lineno, source, assertion)
+ return None, start, end
+ start, end = get_statement_startend2(lineno, astnode)
+ # we need to correct the end:
+ # - ast-parsing strips comments
+ # - there might be empty lines
+ # - we might have lesser indented code blocks at the end
+ if end is None:
+ end = len(source.lines)
+
+ if end > start + 1:
+ # make sure we don't span differently indented code blocks
+ # by using the BlockFinder helper used which inspect.getsource() uses itself
+ block_finder = inspect.BlockFinder()
+ # if we start with an indented line, put blockfinder to "started" mode
+ block_finder.started = source.lines[start][0].isspace()
+ it = ((x + "\n") for x in source.lines[start:end])
+ try:
+ for tok in tokenize.generate_tokens(lambda: next(it)):
+ block_finder.tokeneater(*tok)
+ except (inspect.EndOfBlock, IndentationError):
+ end = block_finder.last + start
+ except Exception:
+ pass
+
+ # the end might still point to a comment or empty line, correct it
+ while end:
+ line = source.lines[end - 1].lstrip()
+ if line.startswith("#") or not line:
+ end -= 1
+ else:
+ break
+ return astnode, start, end
+
+
+def getstatementrange_old(lineno, source, assertion=False):
+ """ return (start, end) tuple which spans the minimal
+ statement region which containing the given lineno.
+ raise an IndexError if no such statementrange can be found.
+ """
+ # XXX this logic is only used on python2.4 and below
+ # 1. find the start of the statement
+ from codeop import compile_command
+ for start in range(lineno, -1, -1):
+ if assertion:
+ line = source.lines[start]
+ # the following lines are not fully tested, change with care
+ if 'super' in line and 'self' in line and '__init__' in line:
+ raise IndexError("likely a subclass")
+ if "assert" not in line and "raise" not in line:
+ continue
+ trylines = source.lines[start:lineno+1]
+ # quick hack to prepare parsing an indented line with
+ # compile_command() (which errors on "return" outside defs)
+ trylines.insert(0, 'def xxx():')
+ trysource = '\n '.join(trylines)
+ # ^ space here
+ try:
+ compile_command(trysource)
+ except (SyntaxError, OverflowError, ValueError):
+ continue
+
+ # 2. find the end of the statement
+ for end in range(lineno+1, len(source)+1):
+ trysource = source[start:end]
+ if trysource.isparseable():
+ return start, end
+ raise SyntaxError("no valid source range around line %d " % (lineno,))
+
+
diff --git a/lib/spack/external/py/_error.py b/lib/spack/external/py/_error.py
new file mode 100644
index 0000000000..8ca339beba
--- /dev/null
+++ b/lib/spack/external/py/_error.py
@@ -0,0 +1,89 @@
+"""
+create errno-specific classes for IO or os calls.
+
+"""
+import sys, os, errno
+
+class Error(EnvironmentError):
+ def __repr__(self):
+ return "%s.%s %r: %s " %(self.__class__.__module__,
+ self.__class__.__name__,
+ self.__class__.__doc__,
+ " ".join(map(str, self.args)),
+ #repr(self.args)
+ )
+
+ def __str__(self):
+ s = "[%s]: %s" %(self.__class__.__doc__,
+ " ".join(map(str, self.args)),
+ )
+ return s
+
+_winerrnomap = {
+ 2: errno.ENOENT,
+ 3: errno.ENOENT,
+ 17: errno.EEXIST,
+ 18: errno.EXDEV,
+ 13: errno.EBUSY, # empty cd drive, but ENOMEDIUM seems unavailiable
+ 22: errno.ENOTDIR,
+ 20: errno.ENOTDIR,
+ 267: errno.ENOTDIR,
+ 5: errno.EACCES, # anything better?
+}
+
+class ErrorMaker(object):
+ """ lazily provides Exception classes for each possible POSIX errno
+ (as defined per the 'errno' module). All such instances
+ subclass EnvironmentError.
+ """
+ Error = Error
+ _errno2class = {}
+
+ def __getattr__(self, name):
+ if name[0] == "_":
+ raise AttributeError(name)
+ eno = getattr(errno, name)
+ cls = self._geterrnoclass(eno)
+ setattr(self, name, cls)
+ return cls
+
+ def _geterrnoclass(self, eno):
+ try:
+ return self._errno2class[eno]
+ except KeyError:
+ clsname = errno.errorcode.get(eno, "UnknownErrno%d" %(eno,))
+ errorcls = type(Error)(clsname, (Error,),
+ {'__module__':'py.error',
+ '__doc__': os.strerror(eno)})
+ self._errno2class[eno] = errorcls
+ return errorcls
+
+ def checked_call(self, func, *args, **kwargs):
+ """ call a function and raise an errno-exception if applicable. """
+ __tracebackhide__ = True
+ try:
+ return func(*args, **kwargs)
+ except self.Error:
+ raise
+ except (OSError, EnvironmentError):
+ cls, value, tb = sys.exc_info()
+ if not hasattr(value, 'errno'):
+ raise
+ __tracebackhide__ = False
+ errno = value.errno
+ try:
+ if not isinstance(value, WindowsError):
+ raise NameError
+ except NameError:
+ # we are not on Windows, or we got a proper OSError
+ cls = self._geterrnoclass(errno)
+ else:
+ try:
+ cls = self._geterrnoclass(_winerrnomap[errno])
+ except KeyError:
+ raise value
+ raise cls("%s%r" % (func.__name__, args))
+ __tracebackhide__ = True
+
+
+error = ErrorMaker()
diff --git a/lib/spack/external/py/_iniconfig.py b/lib/spack/external/py/_iniconfig.py
new file mode 100644
index 0000000000..92b50bd853
--- /dev/null
+++ b/lib/spack/external/py/_iniconfig.py
@@ -0,0 +1,162 @@
+""" brain-dead simple parser for ini-style files.
+(C) Ronny Pfannschmidt, Holger Krekel -- MIT licensed
+"""
+__version__ = "0.2.dev2"
+
+__all__ = ['IniConfig', 'ParseError']
+
+COMMENTCHARS = "#;"
+
+class ParseError(Exception):
+ def __init__(self, path, lineno, msg):
+ Exception.__init__(self, path, lineno, msg)
+ self.path = path
+ self.lineno = lineno
+ self.msg = msg
+
+ def __str__(self):
+ return "%s:%s: %s" %(self.path, self.lineno+1, self.msg)
+
+class SectionWrapper(object):
+ def __init__(self, config, name):
+ self.config = config
+ self.name = name
+
+ def lineof(self, name):
+ return self.config.lineof(self.name, name)
+
+ def get(self, key, default=None, convert=str):
+ return self.config.get(self.name, key, convert=convert, default=default)
+
+ def __getitem__(self, key):
+ return self.config.sections[self.name][key]
+
+ def __iter__(self):
+ section = self.config.sections.get(self.name, [])
+ def lineof(key):
+ return self.config.lineof(self.name, key)
+ for name in sorted(section, key=lineof):
+ yield name
+
+ def items(self):
+ for name in self:
+ yield name, self[name]
+
+
+class IniConfig(object):
+ def __init__(self, path, data=None):
+ self.path = str(path) # convenience
+ if data is None:
+ f = open(self.path)
+ try:
+ tokens = self._parse(iter(f))
+ finally:
+ f.close()
+ else:
+ tokens = self._parse(data.splitlines(True))
+
+ self._sources = {}
+ self.sections = {}
+
+ for lineno, section, name, value in tokens:
+ if section is None:
+ self._raise(lineno, 'no section header defined')
+ self._sources[section, name] = lineno
+ if name is None:
+ if section in self.sections:
+ self._raise(lineno, 'duplicate section %r'%(section, ))
+ self.sections[section] = {}
+ else:
+ if name in self.sections[section]:
+ self._raise(lineno, 'duplicate name %r'%(name, ))
+ self.sections[section][name] = value
+
+ def _raise(self, lineno, msg):
+ raise ParseError(self.path, lineno, msg)
+
+ def _parse(self, line_iter):
+ result = []
+ section = None
+ for lineno, line in enumerate(line_iter):
+ name, data = self._parseline(line, lineno)
+ # new value
+ if name is not None and data is not None:
+ result.append((lineno, section, name, data))
+ # new section
+ elif name is not None and data is None:
+ if not name:
+ self._raise(lineno, 'empty section name')
+ section = name
+ result.append((lineno, section, None, None))
+ # continuation
+ elif name is None and data is not None:
+ if not result:
+ self._raise(lineno, 'unexpected value continuation')
+ last = result.pop()
+ last_name, last_data = last[-2:]
+ if last_name is None:
+ self._raise(lineno, 'unexpected value continuation')
+
+ if last_data:
+ data = '%s\n%s' % (last_data, data)
+ result.append(last[:-1] + (data,))
+ return result
+
+ def _parseline(self, line, lineno):
+ # blank lines
+ if iscommentline(line):
+ line = ""
+ else:
+ line = line.rstrip()
+ if not line:
+ return None, None
+ # section
+ if line[0] == '[':
+ realline = line
+ for c in COMMENTCHARS:
+ line = line.split(c)[0].rstrip()
+ if line[-1] == "]":
+ return line[1:-1], None
+ return None, realline.strip()
+ # value
+ elif not line[0].isspace():
+ try:
+ name, value = line.split('=', 1)
+ if ":" in name:
+ raise ValueError()
+ except ValueError:
+ try:
+ name, value = line.split(":", 1)
+ except ValueError:
+ self._raise(lineno, 'unexpected line: %r' % line)
+ return name.strip(), value.strip()
+ # continuation
+ else:
+ return None, line.strip()
+
+ def lineof(self, section, name=None):
+ lineno = self._sources.get((section, name))
+ if lineno is not None:
+ return lineno + 1
+
+ def get(self, section, name, default=None, convert=str):
+ try:
+ return convert(self.sections[section][name])
+ except KeyError:
+ return default
+
+ def __getitem__(self, name):
+ if name not in self.sections:
+ raise KeyError(name)
+ return SectionWrapper(self, name)
+
+ def __iter__(self):
+ for name in sorted(self.sections, key=self.lineof):
+ yield SectionWrapper(self, name)
+
+ def __contains__(self, arg):
+ return arg in self.sections
+
+def iscommentline(line):
+ c = line.lstrip()[:1]
+ return c in COMMENTCHARS
diff --git a/lib/spack/external/py/_io/__init__.py b/lib/spack/external/py/_io/__init__.py
new file mode 100644
index 0000000000..835f01f3ab
--- /dev/null
+++ b/lib/spack/external/py/_io/__init__.py
@@ -0,0 +1 @@
+""" input/output helping """
diff --git a/lib/spack/external/py/_io/capture.py b/lib/spack/external/py/_io/capture.py
new file mode 100644
index 0000000000..bc157ed978
--- /dev/null
+++ b/lib/spack/external/py/_io/capture.py
@@ -0,0 +1,371 @@
+import os
+import sys
+import py
+import tempfile
+
+try:
+ from io import StringIO
+except ImportError:
+ from StringIO import StringIO
+
+if sys.version_info < (3,0):
+ class TextIO(StringIO):
+ def write(self, data):
+ if not isinstance(data, unicode):
+ data = unicode(data, getattr(self, '_encoding', 'UTF-8'), 'replace')
+ StringIO.write(self, data)
+else:
+ TextIO = StringIO
+
+try:
+ from io import BytesIO
+except ImportError:
+ class BytesIO(StringIO):
+ def write(self, data):
+ if isinstance(data, unicode):
+ raise TypeError("not a byte value: %r" %(data,))
+ StringIO.write(self, data)
+
+patchsysdict = {0: 'stdin', 1: 'stdout', 2: 'stderr'}
+
+class FDCapture:
+ """ Capture IO to/from a given os-level filedescriptor. """
+
+ def __init__(self, targetfd, tmpfile=None, now=True, patchsys=False):
+ """ save targetfd descriptor, and open a new
+ temporary file there. If no tmpfile is
+ specified a tempfile.Tempfile() will be opened
+ in text mode.
+ """
+ self.targetfd = targetfd
+ if tmpfile is None and targetfd != 0:
+ f = tempfile.TemporaryFile('wb+')
+ tmpfile = dupfile(f, encoding="UTF-8")
+ f.close()
+ self.tmpfile = tmpfile
+ self._savefd = os.dup(self.targetfd)
+ if patchsys:
+ self._oldsys = getattr(sys, patchsysdict[targetfd])
+ if now:
+ self.start()
+
+ def start(self):
+ try:
+ os.fstat(self._savefd)
+ except OSError:
+ raise ValueError("saved filedescriptor not valid, "
+ "did you call start() twice?")
+ if self.targetfd == 0 and not self.tmpfile:
+ fd = os.open(devnullpath, os.O_RDONLY)
+ os.dup2(fd, 0)
+ os.close(fd)
+ if hasattr(self, '_oldsys'):
+ setattr(sys, patchsysdict[self.targetfd], DontReadFromInput())
+ else:
+ os.dup2(self.tmpfile.fileno(), self.targetfd)
+ if hasattr(self, '_oldsys'):
+ setattr(sys, patchsysdict[self.targetfd], self.tmpfile)
+
+ def done(self):
+ """ unpatch and clean up, returns the self.tmpfile (file object)
+ """
+ os.dup2(self._savefd, self.targetfd)
+ os.close(self._savefd)
+ if self.targetfd != 0:
+ self.tmpfile.seek(0)
+ if hasattr(self, '_oldsys'):
+ setattr(sys, patchsysdict[self.targetfd], self._oldsys)
+ return self.tmpfile
+
+ def writeorg(self, data):
+ """ write a string to the original file descriptor
+ """
+ tempfp = tempfile.TemporaryFile()
+ try:
+ os.dup2(self._savefd, tempfp.fileno())
+ tempfp.write(data)
+ finally:
+ tempfp.close()
+
+
+def dupfile(f, mode=None, buffering=0, raising=False, encoding=None):
+ """ return a new open file object that's a duplicate of f
+
+ mode is duplicated if not given, 'buffering' controls
+ buffer size (defaulting to no buffering) and 'raising'
+ defines whether an exception is raised when an incompatible
+ file object is passed in (if raising is False, the file
+ object itself will be returned)
+ """
+ try:
+ fd = f.fileno()
+ mode = mode or f.mode
+ except AttributeError:
+ if raising:
+ raise
+ return f
+ newfd = os.dup(fd)
+ if sys.version_info >= (3,0):
+ if encoding is not None:
+ mode = mode.replace("b", "")
+ buffering = True
+ return os.fdopen(newfd, mode, buffering, encoding, closefd=True)
+ else:
+ f = os.fdopen(newfd, mode, buffering)
+ if encoding is not None:
+ return EncodedFile(f, encoding)
+ return f
+
+class EncodedFile(object):
+ def __init__(self, _stream, encoding):
+ self._stream = _stream
+ self.encoding = encoding
+
+ def write(self, obj):
+ if isinstance(obj, unicode):
+ obj = obj.encode(self.encoding)
+ elif isinstance(obj, str):
+ pass
+ else:
+ obj = str(obj)
+ self._stream.write(obj)
+
+ def writelines(self, linelist):
+ data = ''.join(linelist)
+ self.write(data)
+
+ def __getattr__(self, name):
+ return getattr(self._stream, name)
+
+class Capture(object):
+ def call(cls, func, *args, **kwargs):
+ """ return a (res, out, err) tuple where
+ out and err represent the output/error output
+ during function execution.
+ call the given function with args/kwargs
+ and capture output/error during its execution.
+ """
+ so = cls()
+ try:
+ res = func(*args, **kwargs)
+ finally:
+ out, err = so.reset()
+ return res, out, err
+ call = classmethod(call)
+
+ def reset(self):
+ """ reset sys.stdout/stderr and return captured output as strings. """
+ if hasattr(self, '_reset'):
+ raise ValueError("was already reset")
+ self._reset = True
+ outfile, errfile = self.done(save=False)
+ out, err = "", ""
+ if outfile and not outfile.closed:
+ out = outfile.read()
+ outfile.close()
+ if errfile and errfile != outfile and not errfile.closed:
+ err = errfile.read()
+ errfile.close()
+ return out, err
+
+ def suspend(self):
+ """ return current snapshot captures, memorize tempfiles. """
+ outerr = self.readouterr()
+ outfile, errfile = self.done()
+ return outerr
+
+
+class StdCaptureFD(Capture):
+ """ This class allows to capture writes to FD1 and FD2
+ and may connect a NULL file to FD0 (and prevent
+ reads from sys.stdin). If any of the 0,1,2 file descriptors
+ is invalid it will not be captured.
+ """
+ def __init__(self, out=True, err=True, mixed=False,
+ in_=True, patchsys=True, now=True):
+ self._options = {
+ "out": out,
+ "err": err,
+ "mixed": mixed,
+ "in_": in_,
+ "patchsys": patchsys,
+ "now": now,
+ }
+ self._save()
+ if now:
+ self.startall()
+
+ def _save(self):
+ in_ = self._options['in_']
+ out = self._options['out']
+ err = self._options['err']
+ mixed = self._options['mixed']
+ patchsys = self._options['patchsys']
+ if in_:
+ try:
+ self.in_ = FDCapture(0, tmpfile=None, now=False,
+ patchsys=patchsys)
+ except OSError:
+ pass
+ if out:
+ tmpfile = None
+ if hasattr(out, 'write'):
+ tmpfile = out
+ try:
+ self.out = FDCapture(1, tmpfile=tmpfile,
+ now=False, patchsys=patchsys)
+ self._options['out'] = self.out.tmpfile
+ except OSError:
+ pass
+ if err:
+ if out and mixed:
+ tmpfile = self.out.tmpfile
+ elif hasattr(err, 'write'):
+ tmpfile = err
+ else:
+ tmpfile = None
+ try:
+ self.err = FDCapture(2, tmpfile=tmpfile,
+ now=False, patchsys=patchsys)
+ self._options['err'] = self.err.tmpfile
+ except OSError:
+ pass
+
+ def startall(self):
+ if hasattr(self, 'in_'):
+ self.in_.start()
+ if hasattr(self, 'out'):
+ self.out.start()
+ if hasattr(self, 'err'):
+ self.err.start()
+
+ def resume(self):
+ """ resume capturing with original temp files. """
+ self.startall()
+
+ def done(self, save=True):
+ """ return (outfile, errfile) and stop capturing. """
+ outfile = errfile = None
+ if hasattr(self, 'out') and not self.out.tmpfile.closed:
+ outfile = self.out.done()
+ if hasattr(self, 'err') and not self.err.tmpfile.closed:
+ errfile = self.err.done()
+ if hasattr(self, 'in_'):
+ tmpfile = self.in_.done()
+ if save:
+ self._save()
+ return outfile, errfile
+
+ def readouterr(self):
+ """ return snapshot value of stdout/stderr capturings. """
+ if hasattr(self, "out"):
+ out = self._readsnapshot(self.out.tmpfile)
+ else:
+ out = ""
+ if hasattr(self, "err"):
+ err = self._readsnapshot(self.err.tmpfile)
+ else:
+ err = ""
+ return [out, err]
+
+ def _readsnapshot(self, f):
+ f.seek(0)
+ res = f.read()
+ enc = getattr(f, "encoding", None)
+ if enc:
+ res = py.builtin._totext(res, enc, "replace")
+ f.truncate(0)
+ f.seek(0)
+ return res
+
+
+class StdCapture(Capture):
+ """ This class allows to capture writes to sys.stdout|stderr "in-memory"
+ and will raise errors on tries to read from sys.stdin. It only
+ modifies sys.stdout|stderr|stdin attributes and does not
+ touch underlying File Descriptors (use StdCaptureFD for that).
+ """
+ def __init__(self, out=True, err=True, in_=True, mixed=False, now=True):
+ self._oldout = sys.stdout
+ self._olderr = sys.stderr
+ self._oldin = sys.stdin
+ if out and not hasattr(out, 'file'):
+ out = TextIO()
+ self.out = out
+ if err:
+ if mixed:
+ err = out
+ elif not hasattr(err, 'write'):
+ err = TextIO()
+ self.err = err
+ self.in_ = in_
+ if now:
+ self.startall()
+
+ def startall(self):
+ if self.out:
+ sys.stdout = self.out
+ if self.err:
+ sys.stderr = self.err
+ if self.in_:
+ sys.stdin = self.in_ = DontReadFromInput()
+
+ def done(self, save=True):
+ """ return (outfile, errfile) and stop capturing. """
+ outfile = errfile = None
+ if self.out and not self.out.closed:
+ sys.stdout = self._oldout
+ outfile = self.out
+ outfile.seek(0)
+ if self.err and not self.err.closed:
+ sys.stderr = self._olderr
+ errfile = self.err
+ errfile.seek(0)
+ if self.in_:
+ sys.stdin = self._oldin
+ return outfile, errfile
+
+ def resume(self):
+ """ resume capturing with original temp files. """
+ self.startall()
+
+ def readouterr(self):
+ """ return snapshot value of stdout/stderr capturings. """
+ out = err = ""
+ if self.out:
+ out = self.out.getvalue()
+ self.out.truncate(0)
+ self.out.seek(0)
+ if self.err:
+ err = self.err.getvalue()
+ self.err.truncate(0)
+ self.err.seek(0)
+ return out, err
+
+class DontReadFromInput:
+ """Temporary stub class. Ideally when stdin is accessed, the
+ capturing should be turned off, with possibly all data captured
+ so far sent to the screen. This should be configurable, though,
+ because in automated test runs it is better to crash than
+ hang indefinitely.
+ """
+ def read(self, *args):
+ raise IOError("reading from stdin while output is captured")
+ readline = read
+ readlines = read
+ __iter__ = read
+
+ def fileno(self):
+ raise ValueError("redirected Stdin is pseudofile, has no fileno()")
+ def isatty(self):
+ return False
+ def close(self):
+ pass
+
+try:
+ devnullpath = os.devnull
+except AttributeError:
+ if os.name == 'nt':
+ devnullpath = 'NUL'
+ else:
+ devnullpath = '/dev/null'
diff --git a/lib/spack/external/py/_io/saferepr.py b/lib/spack/external/py/_io/saferepr.py
new file mode 100644
index 0000000000..8518290efd
--- /dev/null
+++ b/lib/spack/external/py/_io/saferepr.py
@@ -0,0 +1,71 @@
+import py
+import sys
+
+builtin_repr = repr
+
+reprlib = py.builtin._tryimport('repr', 'reprlib')
+
+class SafeRepr(reprlib.Repr):
+ """ subclass of repr.Repr that limits the resulting size of repr()
+ and includes information on exceptions raised during the call.
+ """
+ def repr(self, x):
+ return self._callhelper(reprlib.Repr.repr, self, x)
+
+ def repr_unicode(self, x, level):
+ # Strictly speaking wrong on narrow builds
+ def repr(u):
+ if "'" not in u:
+ return py.builtin._totext("'%s'") % u
+ elif '"' not in u:
+ return py.builtin._totext('"%s"') % u
+ else:
+ return py.builtin._totext("'%s'") % u.replace("'", r"\'")
+ s = repr(x[:self.maxstring])
+ if len(s) > self.maxstring:
+ i = max(0, (self.maxstring-3)//2)
+ j = max(0, self.maxstring-3-i)
+ s = repr(x[:i] + x[len(x)-j:])
+ s = s[:i] + '...' + s[len(s)-j:]
+ return s
+
+ def repr_instance(self, x, level):
+ return self._callhelper(builtin_repr, x)
+
+ def _callhelper(self, call, x, *args):
+ try:
+ # Try the vanilla repr and make sure that the result is a string
+ s = call(x, *args)
+ except py.builtin._sysex:
+ raise
+ except:
+ cls, e, tb = sys.exc_info()
+ exc_name = getattr(cls, '__name__', 'unknown')
+ try:
+ exc_info = str(e)
+ except py.builtin._sysex:
+ raise
+ except:
+ exc_info = 'unknown'
+ return '<[%s("%s") raised in repr()] %s object at 0x%x>' % (
+ exc_name, exc_info, x.__class__.__name__, id(x))
+ else:
+ if len(s) > self.maxsize:
+ i = max(0, (self.maxsize-3)//2)
+ j = max(0, self.maxsize-3-i)
+ s = s[:i] + '...' + s[len(s)-j:]
+ return s
+
+def saferepr(obj, maxsize=240):
+ """ return a size-limited safe repr-string for the given object.
+ Failing __repr__ functions of user instances will be represented
+ with a short exception info and 'saferepr' generally takes
+ care to never raise exceptions itself. This function is a wrapper
+ around the Repr/reprlib functionality of the standard 2.6 lib.
+ """
+ # review exception handling
+ srepr = SafeRepr()
+ srepr.maxstring = maxsize
+ srepr.maxsize = maxsize
+ srepr.maxother = 160
+ return srepr.repr(obj)
diff --git a/lib/spack/external/py/_io/terminalwriter.py b/lib/spack/external/py/_io/terminalwriter.py
new file mode 100644
index 0000000000..390e8ca7b9
--- /dev/null
+++ b/lib/spack/external/py/_io/terminalwriter.py
@@ -0,0 +1,357 @@
+"""
+
+Helper functions for writing to terminals and files.
+
+"""
+
+
+import sys, os
+import py
+py3k = sys.version_info[0] >= 3
+from py.builtin import text, bytes
+
+win32_and_ctypes = False
+colorama = None
+if sys.platform == "win32":
+ try:
+ import colorama
+ except ImportError:
+ try:
+ import ctypes
+ win32_and_ctypes = True
+ except ImportError:
+ pass
+
+
+def _getdimensions():
+ import termios,fcntl,struct
+ call = fcntl.ioctl(1,termios.TIOCGWINSZ,"\000"*8)
+ height,width = struct.unpack( "hhhh", call ) [:2]
+ return height, width
+
+
+def get_terminal_width():
+ height = width = 0
+ try:
+ height, width = _getdimensions()
+ except py.builtin._sysex:
+ raise
+ except:
+ # pass to fallback below
+ pass
+
+ if width == 0:
+ # FALLBACK:
+ # * some exception happened
+ # * or this is emacs terminal which reports (0,0)
+ width = int(os.environ.get('COLUMNS', 80))
+
+ # XXX the windows getdimensions may be bogus, let's sanify a bit
+ if width < 40:
+ width = 80
+ return width
+
+terminal_width = get_terminal_width()
+
+# XXX unify with _escaped func below
+def ansi_print(text, esc, file=None, newline=True, flush=False):
+ if file is None:
+ file = sys.stderr
+ text = text.rstrip()
+ if esc and not isinstance(esc, tuple):
+ esc = (esc,)
+ if esc and sys.platform != "win32" and file.isatty():
+ text = (''.join(['\x1b[%sm' % cod for cod in esc]) +
+ text +
+ '\x1b[0m') # ANSI color code "reset"
+ if newline:
+ text += '\n'
+
+ if esc and win32_and_ctypes and file.isatty():
+ if 1 in esc:
+ bold = True
+ esc = tuple([x for x in esc if x != 1])
+ else:
+ bold = False
+ esctable = {() : FOREGROUND_WHITE, # normal
+ (31,): FOREGROUND_RED, # red
+ (32,): FOREGROUND_GREEN, # green
+ (33,): FOREGROUND_GREEN|FOREGROUND_RED, # yellow
+ (34,): FOREGROUND_BLUE, # blue
+ (35,): FOREGROUND_BLUE|FOREGROUND_RED, # purple
+ (36,): FOREGROUND_BLUE|FOREGROUND_GREEN, # cyan
+ (37,): FOREGROUND_WHITE, # white
+ (39,): FOREGROUND_WHITE, # reset
+ }
+ attr = esctable.get(esc, FOREGROUND_WHITE)
+ if bold:
+ attr |= FOREGROUND_INTENSITY
+ STD_OUTPUT_HANDLE = -11
+ STD_ERROR_HANDLE = -12
+ if file is sys.stderr:
+ handle = GetStdHandle(STD_ERROR_HANDLE)
+ else:
+ handle = GetStdHandle(STD_OUTPUT_HANDLE)
+ oldcolors = GetConsoleInfo(handle).wAttributes
+ attr |= (oldcolors & 0x0f0)
+ SetConsoleTextAttribute(handle, attr)
+ while len(text) > 32768:
+ file.write(text[:32768])
+ text = text[32768:]
+ if text:
+ file.write(text)
+ SetConsoleTextAttribute(handle, oldcolors)
+ else:
+ file.write(text)
+
+ if flush:
+ file.flush()
+
+def should_do_markup(file):
+ if os.environ.get('PY_COLORS') == '1':
+ return True
+ if os.environ.get('PY_COLORS') == '0':
+ return False
+ return hasattr(file, 'isatty') and file.isatty() \
+ and os.environ.get('TERM') != 'dumb' \
+ and not (sys.platform.startswith('java') and os._name == 'nt')
+
+class TerminalWriter(object):
+ _esctable = dict(black=30, red=31, green=32, yellow=33,
+ blue=34, purple=35, cyan=36, white=37,
+ Black=40, Red=41, Green=42, Yellow=43,
+ Blue=44, Purple=45, Cyan=46, White=47,
+ bold=1, light=2, blink=5, invert=7)
+
+ # XXX deprecate stringio argument
+ def __init__(self, file=None, stringio=False, encoding=None):
+ if file is None:
+ if stringio:
+ self.stringio = file = py.io.TextIO()
+ else:
+ file = py.std.sys.stdout
+ elif py.builtin.callable(file) and not (
+ hasattr(file, "write") and hasattr(file, "flush")):
+ file = WriteFile(file, encoding=encoding)
+ if hasattr(file, "isatty") and file.isatty() and colorama:
+ file = colorama.AnsiToWin32(file).stream
+ self.encoding = encoding or getattr(file, 'encoding', "utf-8")
+ self._file = file
+ self.hasmarkup = should_do_markup(file)
+ self._lastlen = 0
+
+ @property
+ def fullwidth(self):
+ if hasattr(self, '_terminal_width'):
+ return self._terminal_width
+ return get_terminal_width()
+
+ @fullwidth.setter
+ def fullwidth(self, value):
+ self._terminal_width = value
+
+ def _escaped(self, text, esc):
+ if esc and self.hasmarkup:
+ text = (''.join(['\x1b[%sm' % cod for cod in esc]) +
+ text +'\x1b[0m')
+ return text
+
+ def markup(self, text, **kw):
+ esc = []
+ for name in kw:
+ if name not in self._esctable:
+ raise ValueError("unknown markup: %r" %(name,))
+ if kw[name]:
+ esc.append(self._esctable[name])
+ return self._escaped(text, tuple(esc))
+
+ def sep(self, sepchar, title=None, fullwidth=None, **kw):
+ if fullwidth is None:
+ fullwidth = self.fullwidth
+ # the goal is to have the line be as long as possible
+ # under the condition that len(line) <= fullwidth
+ if sys.platform == "win32":
+ # if we print in the last column on windows we are on a
+ # new line but there is no way to verify/neutralize this
+ # (we may not know the exact line width)
+ # so let's be defensive to avoid empty lines in the output
+ fullwidth -= 1
+ if title is not None:
+ # we want 2 + 2*len(fill) + len(title) <= fullwidth
+ # i.e. 2 + 2*len(sepchar)*N + len(title) <= fullwidth
+ # 2*len(sepchar)*N <= fullwidth - len(title) - 2
+ # N <= (fullwidth - len(title) - 2) // (2*len(sepchar))
+ N = (fullwidth - len(title) - 2) // (2*len(sepchar))
+ fill = sepchar * N
+ line = "%s %s %s" % (fill, title, fill)
+ else:
+ # we want len(sepchar)*N <= fullwidth
+ # i.e. N <= fullwidth // len(sepchar)
+ line = sepchar * (fullwidth // len(sepchar))
+ # in some situations there is room for an extra sepchar at the right,
+ # in particular if we consider that with a sepchar like "_ " the
+ # trailing space is not important at the end of the line
+ if len(line) + len(sepchar.rstrip()) <= fullwidth:
+ line += sepchar.rstrip()
+
+ self.line(line, **kw)
+
+ def write(self, msg, **kw):
+ if msg:
+ if not isinstance(msg, (bytes, text)):
+ msg = text(msg)
+ if self.hasmarkup and kw:
+ markupmsg = self.markup(msg, **kw)
+ else:
+ markupmsg = msg
+ write_out(self._file, markupmsg)
+
+ def line(self, s='', **kw):
+ self.write(s, **kw)
+ self._checkfill(s)
+ self.write('\n')
+
+ def reline(self, line, **kw):
+ if not self.hasmarkup:
+ raise ValueError("cannot use rewrite-line without terminal")
+ self.write(line, **kw)
+ self._checkfill(line)
+ self.write('\r')
+ self._lastlen = len(line)
+
+ def _checkfill(self, line):
+ diff2last = self._lastlen - len(line)
+ if diff2last > 0:
+ self.write(" " * diff2last)
+
+class Win32ConsoleWriter(TerminalWriter):
+ def write(self, msg, **kw):
+ if msg:
+ if not isinstance(msg, (bytes, text)):
+ msg = text(msg)
+ oldcolors = None
+ if self.hasmarkup and kw:
+ handle = GetStdHandle(STD_OUTPUT_HANDLE)
+ oldcolors = GetConsoleInfo(handle).wAttributes
+ default_bg = oldcolors & 0x00F0
+ attr = default_bg
+ if kw.pop('bold', False):
+ attr |= FOREGROUND_INTENSITY
+
+ if kw.pop('red', False):
+ attr |= FOREGROUND_RED
+ elif kw.pop('blue', False):
+ attr |= FOREGROUND_BLUE
+ elif kw.pop('green', False):
+ attr |= FOREGROUND_GREEN
+ elif kw.pop('yellow', False):
+ attr |= FOREGROUND_GREEN|FOREGROUND_RED
+ else:
+ attr |= oldcolors & 0x0007
+
+ SetConsoleTextAttribute(handle, attr)
+ write_out(self._file, msg)
+ if oldcolors:
+ SetConsoleTextAttribute(handle, oldcolors)
+
+class WriteFile(object):
+ def __init__(self, writemethod, encoding=None):
+ self.encoding = encoding
+ self._writemethod = writemethod
+
+ def write(self, data):
+ if self.encoding:
+ data = data.encode(self.encoding, "replace")
+ self._writemethod(data)
+
+ def flush(self):
+ return
+
+
+if win32_and_ctypes:
+ TerminalWriter = Win32ConsoleWriter
+ import ctypes
+ from ctypes import wintypes
+
+ # ctypes access to the Windows console
+ STD_OUTPUT_HANDLE = -11
+ STD_ERROR_HANDLE = -12
+ FOREGROUND_BLACK = 0x0000 # black text
+ FOREGROUND_BLUE = 0x0001 # text color contains blue.
+ FOREGROUND_GREEN = 0x0002 # text color contains green.
+ FOREGROUND_RED = 0x0004 # text color contains red.
+ FOREGROUND_WHITE = 0x0007
+ FOREGROUND_INTENSITY = 0x0008 # text color is intensified.
+ BACKGROUND_BLACK = 0x0000 # background color black
+ BACKGROUND_BLUE = 0x0010 # background color contains blue.
+ BACKGROUND_GREEN = 0x0020 # background color contains green.
+ BACKGROUND_RED = 0x0040 # background color contains red.
+ BACKGROUND_WHITE = 0x0070
+ BACKGROUND_INTENSITY = 0x0080 # background color is intensified.
+
+ SHORT = ctypes.c_short
+ class COORD(ctypes.Structure):
+ _fields_ = [('X', SHORT),
+ ('Y', SHORT)]
+ class SMALL_RECT(ctypes.Structure):
+ _fields_ = [('Left', SHORT),
+ ('Top', SHORT),
+ ('Right', SHORT),
+ ('Bottom', SHORT)]
+ class CONSOLE_SCREEN_BUFFER_INFO(ctypes.Structure):
+ _fields_ = [('dwSize', COORD),
+ ('dwCursorPosition', COORD),
+ ('wAttributes', wintypes.WORD),
+ ('srWindow', SMALL_RECT),
+ ('dwMaximumWindowSize', COORD)]
+
+ _GetStdHandle = ctypes.windll.kernel32.GetStdHandle
+ _GetStdHandle.argtypes = [wintypes.DWORD]
+ _GetStdHandle.restype = wintypes.HANDLE
+ def GetStdHandle(kind):
+ return _GetStdHandle(kind)
+
+ SetConsoleTextAttribute = ctypes.windll.kernel32.SetConsoleTextAttribute
+ SetConsoleTextAttribute.argtypes = [wintypes.HANDLE, wintypes.WORD]
+ SetConsoleTextAttribute.restype = wintypes.BOOL
+
+ _GetConsoleScreenBufferInfo = \
+ ctypes.windll.kernel32.GetConsoleScreenBufferInfo
+ _GetConsoleScreenBufferInfo.argtypes = [wintypes.HANDLE,
+ ctypes.POINTER(CONSOLE_SCREEN_BUFFER_INFO)]
+ _GetConsoleScreenBufferInfo.restype = wintypes.BOOL
+ def GetConsoleInfo(handle):
+ info = CONSOLE_SCREEN_BUFFER_INFO()
+ _GetConsoleScreenBufferInfo(handle, ctypes.byref(info))
+ return info
+
+ def _getdimensions():
+ handle = GetStdHandle(STD_OUTPUT_HANDLE)
+ info = GetConsoleInfo(handle)
+ # Substract one from the width, otherwise the cursor wraps
+ # and the ending \n causes an empty line to display.
+ return info.dwSize.Y, info.dwSize.X - 1
+
+def write_out(fil, msg):
+ # XXX sometimes "msg" is of type bytes, sometimes text which
+ # complicates the situation. Should we try to enforce unicode?
+ try:
+ # on py27 and above writing out to sys.stdout with an encoding
+ # should usually work for unicode messages (if the encoding is
+ # capable of it)
+ fil.write(msg)
+ except UnicodeEncodeError:
+ # on py26 it might not work because stdout expects bytes
+ if fil.encoding:
+ try:
+ fil.write(msg.encode(fil.encoding))
+ except UnicodeEncodeError:
+ # it might still fail if the encoding is not capable
+ pass
+ else:
+ fil.flush()
+ return
+ # fallback: escape all unicode characters
+ msg = msg.encode("unicode-escape").decode("ascii")
+ fil.write(msg)
+ fil.flush()
diff --git a/lib/spack/external/py/_log/__init__.py b/lib/spack/external/py/_log/__init__.py
new file mode 100644
index 0000000000..fad62e960d
--- /dev/null
+++ b/lib/spack/external/py/_log/__init__.py
@@ -0,0 +1,2 @@
+""" logging API ('producers' and 'consumers' connected via keywords) """
+
diff --git a/lib/spack/external/py/_log/log.py b/lib/spack/external/py/_log/log.py
new file mode 100644
index 0000000000..ce47e8c754
--- /dev/null
+++ b/lib/spack/external/py/_log/log.py
@@ -0,0 +1,186 @@
+"""
+basic logging functionality based on a producer/consumer scheme.
+
+XXX implement this API: (maybe put it into slogger.py?)
+
+ log = Logger(
+ info=py.log.STDOUT,
+ debug=py.log.STDOUT,
+ command=None)
+ log.info("hello", "world")
+ log.command("hello", "world")
+
+ log = Logger(info=Logger(something=...),
+ debug=py.log.STDOUT,
+ command=None)
+"""
+import py, sys
+
+class Message(object):
+ def __init__(self, keywords, args):
+ self.keywords = keywords
+ self.args = args
+
+ def content(self):
+ return " ".join(map(str, self.args))
+
+ def prefix(self):
+ return "[%s] " % (":".join(self.keywords))
+
+ def __str__(self):
+ return self.prefix() + self.content()
+
+
+class Producer(object):
+ """ (deprecated) Log producer API which sends messages to be logged
+ to a 'consumer' object, which then prints them to stdout,
+ stderr, files, etc. Used extensively by PyPy-1.1.
+ """
+
+ Message = Message # to allow later customization
+ keywords2consumer = {}
+
+ def __init__(self, keywords, keywordmapper=None, **kw):
+ if hasattr(keywords, 'split'):
+ keywords = tuple(keywords.split())
+ self._keywords = keywords
+ if keywordmapper is None:
+ keywordmapper = default_keywordmapper
+ self._keywordmapper = keywordmapper
+
+ def __repr__(self):
+ return "<py.log.Producer %s>" % ":".join(self._keywords)
+
+ def __getattr__(self, name):
+ if '_' in name:
+ raise AttributeError(name)
+ producer = self.__class__(self._keywords + (name,))
+ setattr(self, name, producer)
+ return producer
+
+ def __call__(self, *args):
+ """ write a message to the appropriate consumer(s) """
+ func = self._keywordmapper.getconsumer(self._keywords)
+ if func is not None:
+ func(self.Message(self._keywords, args))
+
+class KeywordMapper:
+ def __init__(self):
+ self.keywords2consumer = {}
+
+ def getstate(self):
+ return self.keywords2consumer.copy()
+ def setstate(self, state):
+ self.keywords2consumer.clear()
+ self.keywords2consumer.update(state)
+
+ def getconsumer(self, keywords):
+ """ return a consumer matching the given keywords.
+
+ tries to find the most suitable consumer by walking, starting from
+ the back, the list of keywords, the first consumer matching a
+ keyword is returned (falling back to py.log.default)
+ """
+ for i in range(len(keywords), 0, -1):
+ try:
+ return self.keywords2consumer[keywords[:i]]
+ except KeyError:
+ continue
+ return self.keywords2consumer.get('default', default_consumer)
+
+ def setconsumer(self, keywords, consumer):
+ """ set a consumer for a set of keywords. """
+ # normalize to tuples
+ if isinstance(keywords, str):
+ keywords = tuple(filter(None, keywords.split()))
+ elif hasattr(keywords, '_keywords'):
+ keywords = keywords._keywords
+ elif not isinstance(keywords, tuple):
+ raise TypeError("key %r is not a string or tuple" % (keywords,))
+ if consumer is not None and not py.builtin.callable(consumer):
+ if not hasattr(consumer, 'write'):
+ raise TypeError(
+ "%r should be None, callable or file-like" % (consumer,))
+ consumer = File(consumer)
+ self.keywords2consumer[keywords] = consumer
+
+def default_consumer(msg):
+ """ the default consumer, prints the message to stdout (using 'print') """
+ sys.stderr.write(str(msg)+"\n")
+
+default_keywordmapper = KeywordMapper()
+
+def setconsumer(keywords, consumer):
+ default_keywordmapper.setconsumer(keywords, consumer)
+
+def setstate(state):
+ default_keywordmapper.setstate(state)
+def getstate():
+ return default_keywordmapper.getstate()
+
+#
+# Consumers
+#
+
+class File(object):
+ """ log consumer wrapping a file(-like) object """
+ def __init__(self, f):
+ assert hasattr(f, 'write')
+ #assert isinstance(f, file) or not hasattr(f, 'open')
+ self._file = f
+
+ def __call__(self, msg):
+ """ write a message to the log """
+ self._file.write(str(msg) + "\n")
+ if hasattr(self._file, 'flush'):
+ self._file.flush()
+
+class Path(object):
+ """ log consumer that opens and writes to a Path """
+ def __init__(self, filename, append=False,
+ delayed_create=False, buffering=False):
+ self._append = append
+ self._filename = str(filename)
+ self._buffering = buffering
+ if not delayed_create:
+ self._openfile()
+
+ def _openfile(self):
+ mode = self._append and 'a' or 'w'
+ f = open(self._filename, mode)
+ self._file = f
+
+ def __call__(self, msg):
+ """ write a message to the log """
+ if not hasattr(self, "_file"):
+ self._openfile()
+ self._file.write(str(msg) + "\n")
+ if not self._buffering:
+ self._file.flush()
+
+def STDOUT(msg):
+ """ consumer that writes to sys.stdout """
+ sys.stdout.write(str(msg)+"\n")
+
+def STDERR(msg):
+ """ consumer that writes to sys.stderr """
+ sys.stderr.write(str(msg)+"\n")
+
+class Syslog:
+ """ consumer that writes to the syslog daemon """
+
+ def __init__(self, priority = None):
+ if priority is None:
+ priority = self.LOG_INFO
+ self.priority = priority
+
+ def __call__(self, msg):
+ """ write a message to the log """
+ py.std.syslog.syslog(self.priority, str(msg))
+
+for _prio in "EMERG ALERT CRIT ERR WARNING NOTICE INFO DEBUG".split():
+ _prio = "LOG_" + _prio
+ try:
+ setattr(Syslog, _prio, getattr(py.std.syslog, _prio))
+ except AttributeError:
+ pass
diff --git a/lib/spack/external/py/_log/warning.py b/lib/spack/external/py/_log/warning.py
new file mode 100644
index 0000000000..722e31e910
--- /dev/null
+++ b/lib/spack/external/py/_log/warning.py
@@ -0,0 +1,76 @@
+import py, sys
+
+class DeprecationWarning(DeprecationWarning):
+ def __init__(self, msg, path, lineno):
+ self.msg = msg
+ self.path = path
+ self.lineno = lineno
+ def __repr__(self):
+ return "%s:%d: %s" %(self.path, self.lineno+1, self.msg)
+ def __str__(self):
+ return self.msg
+
+def _apiwarn(startversion, msg, stacklevel=2, function=None):
+ # below is mostly COPIED from python2.4/warnings.py's def warn()
+ # Get context information
+ if isinstance(stacklevel, str):
+ frame = sys._getframe(1)
+ level = 1
+ found = frame.f_code.co_filename.find(stacklevel) != -1
+ while frame:
+ co = frame.f_code
+ if co.co_filename.find(stacklevel) == -1:
+ if found:
+ stacklevel = level
+ break
+ else:
+ found = True
+ level += 1
+ frame = frame.f_back
+ else:
+ stacklevel = 1
+ msg = "%s (since version %s)" %(msg, startversion)
+ warn(msg, stacklevel=stacklevel+1, function=function)
+
+def warn(msg, stacklevel=1, function=None):
+ if function is not None:
+ filename = py.std.inspect.getfile(function)
+ lineno = py.code.getrawcode(function).co_firstlineno
+ else:
+ try:
+ caller = sys._getframe(stacklevel)
+ except ValueError:
+ globals = sys.__dict__
+ lineno = 1
+ else:
+ globals = caller.f_globals
+ lineno = caller.f_lineno
+ if '__name__' in globals:
+ module = globals['__name__']
+ else:
+ module = "<string>"
+ filename = globals.get('__file__')
+ if filename:
+ fnl = filename.lower()
+ if fnl.endswith(".pyc") or fnl.endswith(".pyo"):
+ filename = filename[:-1]
+ elif fnl.endswith("$py.class"):
+ filename = filename.replace('$py.class', '.py')
+ else:
+ if module == "__main__":
+ try:
+ filename = sys.argv[0]
+ except AttributeError:
+ # embedded interpreters don't have sys.argv, see bug #839151
+ filename = '__main__'
+ if not filename:
+ filename = module
+ path = py.path.local(filename)
+ warning = DeprecationWarning(msg, path, lineno)
+ py.std.warnings.warn_explicit(warning, category=Warning,
+ filename=str(warning.path),
+ lineno=warning.lineno,
+ registry=py.std.warnings.__dict__.setdefault(
+ "__warningsregistry__", {})
+ )
+
diff --git a/lib/spack/external/py/_path/__init__.py b/lib/spack/external/py/_path/__init__.py
new file mode 100644
index 0000000000..51f3246f80
--- /dev/null
+++ b/lib/spack/external/py/_path/__init__.py
@@ -0,0 +1 @@
+""" unified file system api """
diff --git a/lib/spack/external/py/_path/cacheutil.py b/lib/spack/external/py/_path/cacheutil.py
new file mode 100644
index 0000000000..9922504750
--- /dev/null
+++ b/lib/spack/external/py/_path/cacheutil.py
@@ -0,0 +1,114 @@
+"""
+This module contains multithread-safe cache implementations.
+
+All Caches have
+
+ getorbuild(key, builder)
+ delentry(key)
+
+methods and allow configuration when instantiating the cache class.
+"""
+from time import time as gettime
+
+class BasicCache(object):
+ def __init__(self, maxentries=128):
+ self.maxentries = maxentries
+ self.prunenum = int(maxentries - maxentries/8)
+ self._dict = {}
+
+ def clear(self):
+ self._dict.clear()
+
+ def _getentry(self, key):
+ return self._dict[key]
+
+ def _putentry(self, key, entry):
+ self._prunelowestweight()
+ self._dict[key] = entry
+
+ def delentry(self, key, raising=False):
+ try:
+ del self._dict[key]
+ except KeyError:
+ if raising:
+ raise
+
+ def getorbuild(self, key, builder):
+ try:
+ entry = self._getentry(key)
+ except KeyError:
+ entry = self._build(key, builder)
+ self._putentry(key, entry)
+ return entry.value
+
+ def _prunelowestweight(self):
+ """ prune out entries with lowest weight. """
+ numentries = len(self._dict)
+ if numentries >= self.maxentries:
+ # evict according to entry's weight
+ items = [(entry.weight, key)
+ for key, entry in self._dict.items()]
+ items.sort()
+ index = numentries - self.prunenum
+ if index > 0:
+ for weight, key in items[:index]:
+ # in MT situations the element might be gone
+ self.delentry(key, raising=False)
+
+class BuildcostAccessCache(BasicCache):
+ """ A BuildTime/Access-counting cache implementation.
+ the weight of a value is computed as the product of
+
+ num-accesses-of-a-value * time-to-build-the-value
+
+ The values with the least such weights are evicted
+ if the cache maxentries threshold is superceded.
+ For implementation flexibility more than one object
+ might be evicted at a time.
+ """
+ # time function to use for measuring build-times
+
+ def _build(self, key, builder):
+ start = gettime()
+ val = builder()
+ end = gettime()
+ return WeightedCountingEntry(val, end-start)
+
+
+class WeightedCountingEntry(object):
+ def __init__(self, value, oneweight):
+ self._value = value
+ self.weight = self._oneweight = oneweight
+
+ def value(self):
+ self.weight += self._oneweight
+ return self._value
+ value = property(value)
+
+class AgingCache(BasicCache):
+ """ This cache prunes out cache entries that are too old.
+ """
+ def __init__(self, maxentries=128, maxseconds=10.0):
+ super(AgingCache, self).__init__(maxentries)
+ self.maxseconds = maxseconds
+
+ def _getentry(self, key):
+ entry = self._dict[key]
+ if entry.isexpired():
+ self.delentry(key)
+ raise KeyError(key)
+ return entry
+
+ def _build(self, key, builder):
+ val = builder()
+ entry = AgingEntry(val, gettime() + self.maxseconds)
+ return entry
+
+class AgingEntry(object):
+ def __init__(self, value, expirationtime):
+ self.value = value
+ self.weight = expirationtime
+
+ def isexpired(self):
+ t = gettime()
+ return t >= self.weight
diff --git a/lib/spack/external/py/_path/common.py b/lib/spack/external/py/_path/common.py
new file mode 100644
index 0000000000..bf42ed5092
--- /dev/null
+++ b/lib/spack/external/py/_path/common.py
@@ -0,0 +1,439 @@
+"""
+"""
+import os, sys, posixpath
+import py
+
+# Moved from local.py.
+iswin32 = sys.platform == "win32" or (getattr(os, '_name', False) == 'nt')
+
+try:
+ from os import fspath
+except ImportError:
+ def fspath(path):
+ """
+ Return the string representation of the path.
+ If str or bytes is passed in, it is returned unchanged.
+ This code comes from PEP 519, modified to support earlier versions of
+ python.
+
+ This is required for python < 3.6.
+ """
+ if isinstance(path, (py.builtin.text, py.builtin.bytes)):
+ return path
+
+ # Work from the object's type to match method resolution of other magic
+ # methods.
+ path_type = type(path)
+ try:
+ return path_type.__fspath__(path)
+ except AttributeError:
+ if hasattr(path_type, '__fspath__'):
+ raise
+ try:
+ import pathlib
+ except ImportError:
+ pass
+ else:
+ if isinstance(path, pathlib.PurePath):
+ return py.builtin.text(path)
+
+ raise TypeError("expected str, bytes or os.PathLike object, not "
+ + path_type.__name__)
+
+class Checkers:
+ _depend_on_existence = 'exists', 'link', 'dir', 'file'
+
+ def __init__(self, path):
+ self.path = path
+
+ def dir(self):
+ raise NotImplementedError
+
+ def file(self):
+ raise NotImplementedError
+
+ def dotfile(self):
+ return self.path.basename.startswith('.')
+
+ def ext(self, arg):
+ if not arg.startswith('.'):
+ arg = '.' + arg
+ return self.path.ext == arg
+
+ def exists(self):
+ raise NotImplementedError
+
+ def basename(self, arg):
+ return self.path.basename == arg
+
+ def basestarts(self, arg):
+ return self.path.basename.startswith(arg)
+
+ def relto(self, arg):
+ return self.path.relto(arg)
+
+ def fnmatch(self, arg):
+ return self.path.fnmatch(arg)
+
+ def endswith(self, arg):
+ return str(self.path).endswith(arg)
+
+ def _evaluate(self, kw):
+ for name, value in kw.items():
+ invert = False
+ meth = None
+ try:
+ meth = getattr(self, name)
+ except AttributeError:
+ if name[:3] == 'not':
+ invert = True
+ try:
+ meth = getattr(self, name[3:])
+ except AttributeError:
+ pass
+ if meth is None:
+ raise TypeError(
+ "no %r checker available for %r" % (name, self.path))
+ try:
+ if py.code.getrawcode(meth).co_argcount > 1:
+ if (not meth(value)) ^ invert:
+ return False
+ else:
+ if bool(value) ^ bool(meth()) ^ invert:
+ return False
+ except (py.error.ENOENT, py.error.ENOTDIR, py.error.EBUSY):
+ # EBUSY feels not entirely correct,
+ # but its kind of necessary since ENOMEDIUM
+ # is not accessible in python
+ for name in self._depend_on_existence:
+ if name in kw:
+ if kw.get(name):
+ return False
+ name = 'not' + name
+ if name in kw:
+ if not kw.get(name):
+ return False
+ return True
+
+class NeverRaised(Exception):
+ pass
+
+class PathBase(object):
+ """ shared implementation for filesystem path objects."""
+ Checkers = Checkers
+
+ def __div__(self, other):
+ return self.join(fspath(other))
+ __truediv__ = __div__ # py3k
+
+ def basename(self):
+ """ basename part of path. """
+ return self._getbyspec('basename')[0]
+ basename = property(basename, None, None, basename.__doc__)
+
+ def dirname(self):
+ """ dirname part of path. """
+ return self._getbyspec('dirname')[0]
+ dirname = property(dirname, None, None, dirname.__doc__)
+
+ def purebasename(self):
+ """ pure base name of the path."""
+ return self._getbyspec('purebasename')[0]
+ purebasename = property(purebasename, None, None, purebasename.__doc__)
+
+ def ext(self):
+ """ extension of the path (including the '.')."""
+ return self._getbyspec('ext')[0]
+ ext = property(ext, None, None, ext.__doc__)
+
+ def dirpath(self, *args, **kwargs):
+ """ return the directory path joined with any given path arguments. """
+ return self.new(basename='').join(*args, **kwargs)
+
+ def read_binary(self):
+ """ read and return a bytestring from reading the path. """
+ with self.open('rb') as f:
+ return f.read()
+
+ def read_text(self, encoding):
+ """ read and return a Unicode string from reading the path. """
+ with self.open("r", encoding=encoding) as f:
+ return f.read()
+
+
+ def read(self, mode='r'):
+ """ read and return a bytestring from reading the path. """
+ with self.open(mode) as f:
+ return f.read()
+
+ def readlines(self, cr=1):
+ """ read and return a list of lines from the path. if cr is False, the
+newline will be removed from the end of each line. """
+ if not cr:
+ content = self.read('rU')
+ return content.split('\n')
+ else:
+ f = self.open('rU')
+ try:
+ return f.readlines()
+ finally:
+ f.close()
+
+ def load(self):
+ """ (deprecated) return object unpickled from self.read() """
+ f = self.open('rb')
+ try:
+ return py.error.checked_call(py.std.pickle.load, f)
+ finally:
+ f.close()
+
+ def move(self, target):
+ """ move this path to target. """
+ if target.relto(self):
+ raise py.error.EINVAL(target,
+ "cannot move path into a subdirectory of itself")
+ try:
+ self.rename(target)
+ except py.error.EXDEV: # invalid cross-device link
+ self.copy(target)
+ self.remove()
+
+ def __repr__(self):
+ """ return a string representation of this path. """
+ return repr(str(self))
+
+ def check(self, **kw):
+ """ check a path for existence and properties.
+
+ Without arguments, return True if the path exists, otherwise False.
+
+ valid checkers::
+
+ file=1 # is a file
+ file=0 # is not a file (may not even exist)
+ dir=1 # is a dir
+ link=1 # is a link
+ exists=1 # exists
+
+ You can specify multiple checker definitions, for example::
+
+ path.check(file=1, link=1) # a link pointing to a file
+ """
+ if not kw:
+ kw = {'exists' : 1}
+ return self.Checkers(self)._evaluate(kw)
+
+ def fnmatch(self, pattern):
+ """return true if the basename/fullname matches the glob-'pattern'.
+
+ valid pattern characters::
+
+ * matches everything
+ ? matches any single character
+ [seq] matches any character in seq
+ [!seq] matches any char not in seq
+
+ If the pattern contains a path-separator then the full path
+ is used for pattern matching and a '*' is prepended to the
+ pattern.
+
+ if the pattern doesn't contain a path-separator the pattern
+ is only matched against the basename.
+ """
+ return FNMatcher(pattern)(self)
+
+ def relto(self, relpath):
+ """ return a string which is the relative part of the path
+ to the given 'relpath'.
+ """
+ if not isinstance(relpath, (str, PathBase)):
+ raise TypeError("%r: not a string or path object" %(relpath,))
+ strrelpath = str(relpath)
+ if strrelpath and strrelpath[-1] != self.sep:
+ strrelpath += self.sep
+ #assert strrelpath[-1] == self.sep
+ #assert strrelpath[-2] != self.sep
+ strself = self.strpath
+ if sys.platform == "win32" or getattr(os, '_name', None) == 'nt':
+ if os.path.normcase(strself).startswith(
+ os.path.normcase(strrelpath)):
+ return strself[len(strrelpath):]
+ elif strself.startswith(strrelpath):
+ return strself[len(strrelpath):]
+ return ""
+
+ def ensure_dir(self, *args):
+ """ ensure the path joined with args is a directory. """
+ return self.ensure(*args, **{"dir": True})
+
+ def bestrelpath(self, dest):
+ """ return a string which is a relative path from self
+ (assumed to be a directory) to dest such that
+ self.join(bestrelpath) == dest and if not such
+ path can be determined return dest.
+ """
+ try:
+ if self == dest:
+ return os.curdir
+ base = self.common(dest)
+ if not base: # can be the case on windows
+ return str(dest)
+ self2base = self.relto(base)
+ reldest = dest.relto(base)
+ if self2base:
+ n = self2base.count(self.sep) + 1
+ else:
+ n = 0
+ l = [os.pardir] * n
+ if reldest:
+ l.append(reldest)
+ target = dest.sep.join(l)
+ return target
+ except AttributeError:
+ return str(dest)
+
+ def exists(self):
+ return self.check()
+
+ def isdir(self):
+ return self.check(dir=1)
+
+ def isfile(self):
+ return self.check(file=1)
+
+ def parts(self, reverse=False):
+ """ return a root-first list of all ancestor directories
+ plus the path itself.
+ """
+ current = self
+ l = [self]
+ while 1:
+ last = current
+ current = current.dirpath()
+ if last == current:
+ break
+ l.append(current)
+ if not reverse:
+ l.reverse()
+ return l
+
+ def common(self, other):
+ """ return the common part shared with the other path
+ or None if there is no common part.
+ """
+ last = None
+ for x, y in zip(self.parts(), other.parts()):
+ if x != y:
+ return last
+ last = x
+ return last
+
+ def __add__(self, other):
+ """ return new path object with 'other' added to the basename"""
+ return self.new(basename=self.basename+str(other))
+
+ def __cmp__(self, other):
+ """ return sort value (-1, 0, +1). """
+ try:
+ return cmp(self.strpath, other.strpath)
+ except AttributeError:
+ return cmp(str(self), str(other)) # self.path, other.path)
+
+ def __lt__(self, other):
+ try:
+ return self.strpath < other.strpath
+ except AttributeError:
+ return str(self) < str(other)
+
+ def visit(self, fil=None, rec=None, ignore=NeverRaised, bf=False, sort=False):
+ """ yields all paths below the current one
+
+ fil is a filter (glob pattern or callable), if not matching the
+ path will not be yielded, defaulting to None (everything is
+ returned)
+
+ rec is a filter (glob pattern or callable) that controls whether
+ a node is descended, defaulting to None
+
+ ignore is an Exception class that is ignoredwhen calling dirlist()
+ on any of the paths (by default, all exceptions are reported)
+
+ bf if True will cause a breadthfirst search instead of the
+ default depthfirst. Default: False
+
+ sort if True will sort entries within each directory level.
+ """
+ for x in Visitor(fil, rec, ignore, bf, sort).gen(self):
+ yield x
+
+ def _sortlist(self, res, sort):
+ if sort:
+ if hasattr(sort, '__call__'):
+ res.sort(sort)
+ else:
+ res.sort()
+
+ def samefile(self, other):
+ """ return True if other refers to the same stat object as self. """
+ return self.strpath == str(other)
+
+ def __fspath__(self):
+ return str(self)
+
+class Visitor:
+ def __init__(self, fil, rec, ignore, bf, sort):
+ if isinstance(fil, py.builtin._basestring):
+ fil = FNMatcher(fil)
+ if isinstance(rec, py.builtin._basestring):
+ self.rec = FNMatcher(rec)
+ elif not hasattr(rec, '__call__') and rec:
+ self.rec = lambda path: True
+ else:
+ self.rec = rec
+ self.fil = fil
+ self.ignore = ignore
+ self.breadthfirst = bf
+ self.optsort = sort and sorted or (lambda x: x)
+
+ def gen(self, path):
+ try:
+ entries = path.listdir()
+ except self.ignore:
+ return
+ rec = self.rec
+ dirs = self.optsort([p for p in entries
+ if p.check(dir=1) and (rec is None or rec(p))])
+ if not self.breadthfirst:
+ for subdir in dirs:
+ for p in self.gen(subdir):
+ yield p
+ for p in self.optsort(entries):
+ if self.fil is None or self.fil(p):
+ yield p
+ if self.breadthfirst:
+ for subdir in dirs:
+ for p in self.gen(subdir):
+ yield p
+
+class FNMatcher:
+ def __init__(self, pattern):
+ self.pattern = pattern
+
+ def __call__(self, path):
+ pattern = self.pattern
+
+ if (pattern.find(path.sep) == -1 and
+ iswin32 and
+ pattern.find(posixpath.sep) != -1):
+ # Running on Windows, the pattern has no Windows path separators,
+ # and the pattern has one or more Posix path separators. Replace
+ # the Posix path separators with the Windows path separator.
+ pattern = pattern.replace(posixpath.sep, path.sep)
+
+ if pattern.find(path.sep) == -1:
+ name = path.basename
+ else:
+ name = str(path) # path.strpath # XXX svn?
+ if not os.path.isabs(pattern):
+ pattern = '*' + path.sep + pattern
+ return py.std.fnmatch.fnmatch(name, pattern)
diff --git a/lib/spack/external/py/_path/local.py b/lib/spack/external/py/_path/local.py
new file mode 100644
index 0000000000..0d4e4c93d1
--- /dev/null
+++ b/lib/spack/external/py/_path/local.py
@@ -0,0 +1,928 @@
+"""
+local path implementation.
+"""
+from __future__ import with_statement
+
+from contextlib import contextmanager
+import sys, os, re, atexit, io
+import py
+from py._path import common
+from py._path.common import iswin32, fspath
+from stat import S_ISLNK, S_ISDIR, S_ISREG
+
+from os.path import abspath, normpath, isabs, exists, isdir, isfile, islink, dirname
+
+if sys.version_info > (3,0):
+ def map_as_list(func, iter):
+ return list(map(func, iter))
+else:
+ map_as_list = map
+
+class Stat(object):
+ def __getattr__(self, name):
+ return getattr(self._osstatresult, "st_" + name)
+
+ def __init__(self, path, osstatresult):
+ self.path = path
+ self._osstatresult = osstatresult
+
+ @property
+ def owner(self):
+ if iswin32:
+ raise NotImplementedError("XXX win32")
+ import pwd
+ entry = py.error.checked_call(pwd.getpwuid, self.uid)
+ return entry[0]
+
+ @property
+ def group(self):
+ """ return group name of file. """
+ if iswin32:
+ raise NotImplementedError("XXX win32")
+ import grp
+ entry = py.error.checked_call(grp.getgrgid, self.gid)
+ return entry[0]
+
+ def isdir(self):
+ return S_ISDIR(self._osstatresult.st_mode)
+
+ def isfile(self):
+ return S_ISREG(self._osstatresult.st_mode)
+
+ def islink(self):
+ st = self.path.lstat()
+ return S_ISLNK(self._osstatresult.st_mode)
+
+class PosixPath(common.PathBase):
+ def chown(self, user, group, rec=0):
+ """ change ownership to the given user and group.
+ user and group may be specified by a number or
+ by a name. if rec is True change ownership
+ recursively.
+ """
+ uid = getuserid(user)
+ gid = getgroupid(group)
+ if rec:
+ for x in self.visit(rec=lambda x: x.check(link=0)):
+ if x.check(link=0):
+ py.error.checked_call(os.chown, str(x), uid, gid)
+ py.error.checked_call(os.chown, str(self), uid, gid)
+
+ def readlink(self):
+ """ return value of a symbolic link. """
+ return py.error.checked_call(os.readlink, self.strpath)
+
+ def mklinkto(self, oldname):
+ """ posix style hard link to another name. """
+ py.error.checked_call(os.link, str(oldname), str(self))
+
+ def mksymlinkto(self, value, absolute=1):
+ """ create a symbolic link with the given value (pointing to another name). """
+ if absolute:
+ py.error.checked_call(os.symlink, str(value), self.strpath)
+ else:
+ base = self.common(value)
+ # with posix local paths '/' is always a common base
+ relsource = self.__class__(value).relto(base)
+ reldest = self.relto(base)
+ n = reldest.count(self.sep)
+ target = self.sep.join(('..', )*n + (relsource, ))
+ py.error.checked_call(os.symlink, target, self.strpath)
+
+def getuserid(user):
+ import pwd
+ if not isinstance(user, int):
+ user = pwd.getpwnam(user)[2]
+ return user
+
+def getgroupid(group):
+ import grp
+ if not isinstance(group, int):
+ group = grp.getgrnam(group)[2]
+ return group
+
+FSBase = not iswin32 and PosixPath or common.PathBase
+
+class LocalPath(FSBase):
+ """ object oriented interface to os.path and other local filesystem
+ related information.
+ """
+ class ImportMismatchError(ImportError):
+ """ raised on pyimport() if there is a mismatch of __file__'s"""
+
+ sep = os.sep
+ class Checkers(common.Checkers):
+ def _stat(self):
+ try:
+ return self._statcache
+ except AttributeError:
+ try:
+ self._statcache = self.path.stat()
+ except py.error.ELOOP:
+ self._statcache = self.path.lstat()
+ return self._statcache
+
+ def dir(self):
+ return S_ISDIR(self._stat().mode)
+
+ def file(self):
+ return S_ISREG(self._stat().mode)
+
+ def exists(self):
+ return self._stat()
+
+ def link(self):
+ st = self.path.lstat()
+ return S_ISLNK(st.mode)
+
+ def __init__(self, path=None, expanduser=False):
+ """ Initialize and return a local Path instance.
+
+ Path can be relative to the current directory.
+ If path is None it defaults to the current working directory.
+ If expanduser is True, tilde-expansion is performed.
+ Note that Path instances always carry an absolute path.
+ Note also that passing in a local path object will simply return
+ the exact same path object. Use new() to get a new copy.
+ """
+ if path is None:
+ self.strpath = py.error.checked_call(os.getcwd)
+ else:
+ try:
+ path = fspath(path)
+ except TypeError:
+ raise ValueError("can only pass None, Path instances "
+ "or non-empty strings to LocalPath")
+ if expanduser:
+ path = os.path.expanduser(path)
+ self.strpath = abspath(path)
+
+ def __hash__(self):
+ return hash(self.strpath)
+
+ def __eq__(self, other):
+ s1 = fspath(self)
+ try:
+ s2 = fspath(other)
+ except TypeError:
+ return False
+ if iswin32:
+ s1 = s1.lower()
+ try:
+ s2 = s2.lower()
+ except AttributeError:
+ return False
+ return s1 == s2
+
+ def __ne__(self, other):
+ return not (self == other)
+
+ def __lt__(self, other):
+ return fspath(self) < fspath(other)
+
+ def __gt__(self, other):
+ return fspath(self) > fspath(other)
+
+ def samefile(self, other):
+ """ return True if 'other' references the same file as 'self'.
+ """
+ other = fspath(other)
+ if not isabs(other):
+ other = abspath(other)
+ if self == other:
+ return True
+ if iswin32:
+ return False # there is no samefile
+ return py.error.checked_call(
+ os.path.samefile, self.strpath, other)
+
+ def remove(self, rec=1, ignore_errors=False):
+ """ remove a file or directory (or a directory tree if rec=1).
+ if ignore_errors is True, errors while removing directories will
+ be ignored.
+ """
+ if self.check(dir=1, link=0):
+ if rec:
+ # force remove of readonly files on windows
+ if iswin32:
+ self.chmod(448, rec=1) # octcal 0700
+ py.error.checked_call(py.std.shutil.rmtree, self.strpath,
+ ignore_errors=ignore_errors)
+ else:
+ py.error.checked_call(os.rmdir, self.strpath)
+ else:
+ if iswin32:
+ self.chmod(448) # octcal 0700
+ py.error.checked_call(os.remove, self.strpath)
+
+ def computehash(self, hashtype="md5", chunksize=524288):
+ """ return hexdigest of hashvalue for this file. """
+ try:
+ try:
+ import hashlib as mod
+ except ImportError:
+ if hashtype == "sha1":
+ hashtype = "sha"
+ mod = __import__(hashtype)
+ hash = getattr(mod, hashtype)()
+ except (AttributeError, ImportError):
+ raise ValueError("Don't know how to compute %r hash" %(hashtype,))
+ f = self.open('rb')
+ try:
+ while 1:
+ buf = f.read(chunksize)
+ if not buf:
+ return hash.hexdigest()
+ hash.update(buf)
+ finally:
+ f.close()
+
+ def new(self, **kw):
+ """ create a modified version of this path.
+ the following keyword arguments modify various path parts::
+
+ a:/some/path/to/a/file.ext
+ xx drive
+ xxxxxxxxxxxxxxxxx dirname
+ xxxxxxxx basename
+ xxxx purebasename
+ xxx ext
+ """
+ obj = object.__new__(self.__class__)
+ if not kw:
+ obj.strpath = self.strpath
+ return obj
+ drive, dirname, basename, purebasename,ext = self._getbyspec(
+ "drive,dirname,basename,purebasename,ext")
+ if 'basename' in kw:
+ if 'purebasename' in kw or 'ext' in kw:
+ raise ValueError("invalid specification %r" % kw)
+ else:
+ pb = kw.setdefault('purebasename', purebasename)
+ try:
+ ext = kw['ext']
+ except KeyError:
+ pass
+ else:
+ if ext and not ext.startswith('.'):
+ ext = '.' + ext
+ kw['basename'] = pb + ext
+
+ if ('dirname' in kw and not kw['dirname']):
+ kw['dirname'] = drive
+ else:
+ kw.setdefault('dirname', dirname)
+ kw.setdefault('sep', self.sep)
+ obj.strpath = normpath(
+ "%(dirname)s%(sep)s%(basename)s" % kw)
+ return obj
+
+ def _getbyspec(self, spec):
+ """ see new for what 'spec' can be. """
+ res = []
+ parts = self.strpath.split(self.sep)
+
+ args = filter(None, spec.split(',') )
+ append = res.append
+ for name in args:
+ if name == 'drive':
+ append(parts[0])
+ elif name == 'dirname':
+ append(self.sep.join(parts[:-1]))
+ else:
+ basename = parts[-1]
+ if name == 'basename':
+ append(basename)
+ else:
+ i = basename.rfind('.')
+ if i == -1:
+ purebasename, ext = basename, ''
+ else:
+ purebasename, ext = basename[:i], basename[i:]
+ if name == 'purebasename':
+ append(purebasename)
+ elif name == 'ext':
+ append(ext)
+ else:
+ raise ValueError("invalid part specification %r" % name)
+ return res
+
+ def dirpath(self, *args, **kwargs):
+ """ return the directory path joined with any given path arguments. """
+ if not kwargs:
+ path = object.__new__(self.__class__)
+ path.strpath = dirname(self.strpath)
+ if args:
+ path = path.join(*args)
+ return path
+ return super(LocalPath, self).dirpath(*args, **kwargs)
+
+ def join(self, *args, **kwargs):
+ """ return a new path by appending all 'args' as path
+ components. if abs=1 is used restart from root if any
+ of the args is an absolute path.
+ """
+ sep = self.sep
+ strargs = [fspath(arg) for arg in args]
+ strpath = self.strpath
+ if kwargs.get('abs'):
+ newargs = []
+ for arg in reversed(strargs):
+ if isabs(arg):
+ strpath = arg
+ strargs = newargs
+ break
+ newargs.insert(0, arg)
+ for arg in strargs:
+ arg = arg.strip(sep)
+ if iswin32:
+ # allow unix style paths even on windows.
+ arg = arg.strip('/')
+ arg = arg.replace('/', sep)
+ strpath = strpath + sep + arg
+ obj = object.__new__(self.__class__)
+ obj.strpath = normpath(strpath)
+ return obj
+
+ def open(self, mode='r', ensure=False, encoding=None):
+ """ return an opened file with the given mode.
+
+ If ensure is True, create parent directories if needed.
+ """
+ if ensure:
+ self.dirpath().ensure(dir=1)
+ if encoding:
+ return py.error.checked_call(io.open, self.strpath, mode, encoding=encoding)
+ return py.error.checked_call(open, self.strpath, mode)
+
+ def _fastjoin(self, name):
+ child = object.__new__(self.__class__)
+ child.strpath = self.strpath + self.sep + name
+ return child
+
+ def islink(self):
+ return islink(self.strpath)
+
+ def check(self, **kw):
+ if not kw:
+ return exists(self.strpath)
+ if len(kw) == 1:
+ if "dir" in kw:
+ return not kw["dir"] ^ isdir(self.strpath)
+ if "file" in kw:
+ return not kw["file"] ^ isfile(self.strpath)
+ return super(LocalPath, self).check(**kw)
+
+ _patternchars = set("*?[" + os.path.sep)
+ def listdir(self, fil=None, sort=None):
+ """ list directory contents, possibly filter by the given fil func
+ and possibly sorted.
+ """
+ if fil is None and sort is None:
+ names = py.error.checked_call(os.listdir, self.strpath)
+ return map_as_list(self._fastjoin, names)
+ if isinstance(fil, py.builtin._basestring):
+ if not self._patternchars.intersection(fil):
+ child = self._fastjoin(fil)
+ if exists(child.strpath):
+ return [child]
+ return []
+ fil = common.FNMatcher(fil)
+ names = py.error.checked_call(os.listdir, self.strpath)
+ res = []
+ for name in names:
+ child = self._fastjoin(name)
+ if fil is None or fil(child):
+ res.append(child)
+ self._sortlist(res, sort)
+ return res
+
+ def size(self):
+ """ return size of the underlying file object """
+ return self.stat().size
+
+ def mtime(self):
+ """ return last modification time of the path. """
+ return self.stat().mtime
+
+ def copy(self, target, mode=False, stat=False):
+ """ copy path to target.
+
+ If mode is True, will copy copy permission from path to target.
+ If stat is True, copy permission, last modification
+ time, last access time, and flags from path to target.
+ """
+ if self.check(file=1):
+ if target.check(dir=1):
+ target = target.join(self.basename)
+ assert self!=target
+ copychunked(self, target)
+ if mode:
+ copymode(self.strpath, target.strpath)
+ if stat:
+ copystat(self, target)
+ else:
+ def rec(p):
+ return p.check(link=0)
+ for x in self.visit(rec=rec):
+ relpath = x.relto(self)
+ newx = target.join(relpath)
+ newx.dirpath().ensure(dir=1)
+ if x.check(link=1):
+ newx.mksymlinkto(x.readlink())
+ continue
+ elif x.check(file=1):
+ copychunked(x, newx)
+ elif x.check(dir=1):
+ newx.ensure(dir=1)
+ if mode:
+ copymode(x.strpath, newx.strpath)
+ if stat:
+ copystat(x, newx)
+
+ def rename(self, target):
+ """ rename this path to target. """
+ target = fspath(target)
+ return py.error.checked_call(os.rename, self.strpath, target)
+
+ def dump(self, obj, bin=1):
+ """ pickle object into path location"""
+ f = self.open('wb')
+ try:
+ py.error.checked_call(py.std.pickle.dump, obj, f, bin)
+ finally:
+ f.close()
+
+ def mkdir(self, *args):
+ """ create & return the directory joined with args. """
+ p = self.join(*args)
+ py.error.checked_call(os.mkdir, fspath(p))
+ return p
+
+ def write_binary(self, data, ensure=False):
+ """ write binary data into path. If ensure is True create
+ missing parent directories.
+ """
+ if ensure:
+ self.dirpath().ensure(dir=1)
+ with self.open('wb') as f:
+ f.write(data)
+
+ def write_text(self, data, encoding, ensure=False):
+ """ write text data into path using the specified encoding.
+ If ensure is True create missing parent directories.
+ """
+ if ensure:
+ self.dirpath().ensure(dir=1)
+ with self.open('w', encoding=encoding) as f:
+ f.write(data)
+
+ def write(self, data, mode='w', ensure=False):
+ """ write data into path. If ensure is True create
+ missing parent directories.
+ """
+ if ensure:
+ self.dirpath().ensure(dir=1)
+ if 'b' in mode:
+ if not py.builtin._isbytes(data):
+ raise ValueError("can only process bytes")
+ else:
+ if not py.builtin._istext(data):
+ if not py.builtin._isbytes(data):
+ data = str(data)
+ else:
+ data = py.builtin._totext(data, sys.getdefaultencoding())
+ f = self.open(mode)
+ try:
+ f.write(data)
+ finally:
+ f.close()
+
+ def _ensuredirs(self):
+ parent = self.dirpath()
+ if parent == self:
+ return self
+ if parent.check(dir=0):
+ parent._ensuredirs()
+ if self.check(dir=0):
+ try:
+ self.mkdir()
+ except py.error.EEXIST:
+ # race condition: file/dir created by another thread/process.
+ # complain if it is not a dir
+ if self.check(dir=0):
+ raise
+ return self
+
+ def ensure(self, *args, **kwargs):
+ """ ensure that an args-joined path exists (by default as
+ a file). if you specify a keyword argument 'dir=True'
+ then the path is forced to be a directory path.
+ """
+ p = self.join(*args)
+ if kwargs.get('dir', 0):
+ return p._ensuredirs()
+ else:
+ p.dirpath()._ensuredirs()
+ if not p.check(file=1):
+ p.open('w').close()
+ return p
+
+ def stat(self, raising=True):
+ """ Return an os.stat() tuple. """
+ if raising == True:
+ return Stat(self, py.error.checked_call(os.stat, self.strpath))
+ try:
+ return Stat(self, os.stat(self.strpath))
+ except KeyboardInterrupt:
+ raise
+ except Exception:
+ return None
+
+ def lstat(self):
+ """ Return an os.lstat() tuple. """
+ return Stat(self, py.error.checked_call(os.lstat, self.strpath))
+
+ def setmtime(self, mtime=None):
+ """ set modification time for the given path. if 'mtime' is None
+ (the default) then the file's mtime is set to current time.
+
+ Note that the resolution for 'mtime' is platform dependent.
+ """
+ if mtime is None:
+ return py.error.checked_call(os.utime, self.strpath, mtime)
+ try:
+ return py.error.checked_call(os.utime, self.strpath, (-1, mtime))
+ except py.error.EINVAL:
+ return py.error.checked_call(os.utime, self.strpath, (self.atime(), mtime))
+
+ def chdir(self):
+ """ change directory to self and return old current directory """
+ try:
+ old = self.__class__()
+ except py.error.ENOENT:
+ old = None
+ py.error.checked_call(os.chdir, self.strpath)
+ return old
+
+
+ @contextmanager
+ def as_cwd(self):
+ """ return context manager which changes to current dir during the
+ managed "with" context. On __enter__ it returns the old dir.
+ """
+ old = self.chdir()
+ try:
+ yield old
+ finally:
+ old.chdir()
+
+ def realpath(self):
+ """ return a new path which contains no symbolic links."""
+ return self.__class__(os.path.realpath(self.strpath))
+
+ def atime(self):
+ """ return last access time of the path. """
+ return self.stat().atime
+
+ def __repr__(self):
+ return 'local(%r)' % self.strpath
+
+ def __str__(self):
+ """ return string representation of the Path. """
+ return self.strpath
+
+ def chmod(self, mode, rec=0):
+ """ change permissions to the given mode. If mode is an
+ integer it directly encodes the os-specific modes.
+ if rec is True perform recursively.
+ """
+ if not isinstance(mode, int):
+ raise TypeError("mode %r must be an integer" % (mode,))
+ if rec:
+ for x in self.visit(rec=rec):
+ py.error.checked_call(os.chmod, str(x), mode)
+ py.error.checked_call(os.chmod, self.strpath, mode)
+
+ def pypkgpath(self):
+ """ return the Python package path by looking for the last
+ directory upwards which still contains an __init__.py.
+ Return None if a pkgpath can not be determined.
+ """
+ pkgpath = None
+ for parent in self.parts(reverse=True):
+ if parent.isdir():
+ if not parent.join('__init__.py').exists():
+ break
+ if not isimportable(parent.basename):
+ break
+ pkgpath = parent
+ return pkgpath
+
+ def _ensuresyspath(self, ensuremode, path):
+ if ensuremode:
+ s = str(path)
+ if ensuremode == "append":
+ if s not in sys.path:
+ sys.path.append(s)
+ else:
+ if s != sys.path[0]:
+ sys.path.insert(0, s)
+
+ def pyimport(self, modname=None, ensuresyspath=True):
+ """ return path as an imported python module.
+
+ If modname is None, look for the containing package
+ and construct an according module name.
+ The module will be put/looked up in sys.modules.
+ if ensuresyspath is True then the root dir for importing
+ the file (taking __init__.py files into account) will
+ be prepended to sys.path if it isn't there already.
+ If ensuresyspath=="append" the root dir will be appended
+ if it isn't already contained in sys.path.
+ if ensuresyspath is False no modification of syspath happens.
+ """
+ if not self.check():
+ raise py.error.ENOENT(self)
+
+ pkgpath = None
+ if modname is None:
+ pkgpath = self.pypkgpath()
+ if pkgpath is not None:
+ pkgroot = pkgpath.dirpath()
+ names = self.new(ext="").relto(pkgroot).split(self.sep)
+ if names[-1] == "__init__":
+ names.pop()
+ modname = ".".join(names)
+ else:
+ pkgroot = self.dirpath()
+ modname = self.purebasename
+
+ self._ensuresyspath(ensuresyspath, pkgroot)
+ __import__(modname)
+ mod = sys.modules[modname]
+ if self.basename == "__init__.py":
+ return mod # we don't check anything as we might
+ # we in a namespace package ... too icky to check
+ modfile = mod.__file__
+ if modfile[-4:] in ('.pyc', '.pyo'):
+ modfile = modfile[:-1]
+ elif modfile.endswith('$py.class'):
+ modfile = modfile[:-9] + '.py'
+ if modfile.endswith(os.path.sep + "__init__.py"):
+ if self.basename != "__init__.py":
+ modfile = modfile[:-12]
+ try:
+ issame = self.samefile(modfile)
+ except py.error.ENOENT:
+ issame = False
+ if not issame:
+ raise self.ImportMismatchError(modname, modfile, self)
+ return mod
+ else:
+ try:
+ return sys.modules[modname]
+ except KeyError:
+ # we have a custom modname, do a pseudo-import
+ mod = py.std.types.ModuleType(modname)
+ mod.__file__ = str(self)
+ sys.modules[modname] = mod
+ try:
+ py.builtin.execfile(str(self), mod.__dict__)
+ except:
+ del sys.modules[modname]
+ raise
+ return mod
+
+ def sysexec(self, *argv, **popen_opts):
+ """ return stdout text from executing a system child process,
+ where the 'self' path points to executable.
+ The process is directly invoked and not through a system shell.
+ """
+ from subprocess import Popen, PIPE
+ argv = map_as_list(str, argv)
+ popen_opts['stdout'] = popen_opts['stderr'] = PIPE
+ proc = Popen([str(self)] + argv, **popen_opts)
+ stdout, stderr = proc.communicate()
+ ret = proc.wait()
+ if py.builtin._isbytes(stdout):
+ stdout = py.builtin._totext(stdout, sys.getdefaultencoding())
+ if ret != 0:
+ if py.builtin._isbytes(stderr):
+ stderr = py.builtin._totext(stderr, sys.getdefaultencoding())
+ raise py.process.cmdexec.Error(ret, ret, str(self),
+ stdout, stderr,)
+ return stdout
+
+ def sysfind(cls, name, checker=None, paths=None):
+ """ return a path object found by looking at the systems
+ underlying PATH specification. If the checker is not None
+ it will be invoked to filter matching paths. If a binary
+ cannot be found, None is returned
+ Note: This is probably not working on plain win32 systems
+ but may work on cygwin.
+ """
+ if isabs(name):
+ p = py.path.local(name)
+ if p.check(file=1):
+ return p
+ else:
+ if paths is None:
+ if iswin32:
+ paths = py.std.os.environ['Path'].split(';')
+ if '' not in paths and '.' not in paths:
+ paths.append('.')
+ try:
+ systemroot = os.environ['SYSTEMROOT']
+ except KeyError:
+ pass
+ else:
+ paths = [re.sub('%SystemRoot%', systemroot, path)
+ for path in paths]
+ else:
+ paths = py.std.os.environ['PATH'].split(':')
+ tryadd = []
+ if iswin32:
+ tryadd += os.environ['PATHEXT'].split(os.pathsep)
+ tryadd.append("")
+
+ for x in paths:
+ for addext in tryadd:
+ p = py.path.local(x).join(name, abs=True) + addext
+ try:
+ if p.check(file=1):
+ if checker:
+ if not checker(p):
+ continue
+ return p
+ except py.error.EACCES:
+ pass
+ return None
+ sysfind = classmethod(sysfind)
+
+ def _gethomedir(cls):
+ try:
+ x = os.environ['HOME']
+ except KeyError:
+ try:
+ x = os.environ["HOMEDRIVE"] + os.environ['HOMEPATH']
+ except KeyError:
+ return None
+ return cls(x)
+ _gethomedir = classmethod(_gethomedir)
+
+ #"""
+ #special class constructors for local filesystem paths
+ #"""
+ def get_temproot(cls):
+ """ return the system's temporary directory
+ (where tempfiles are usually created in)
+ """
+ return py.path.local(py.std.tempfile.gettempdir())
+ get_temproot = classmethod(get_temproot)
+
+ def mkdtemp(cls, rootdir=None):
+ """ return a Path object pointing to a fresh new temporary directory
+ (which we created ourself).
+ """
+ import tempfile
+ if rootdir is None:
+ rootdir = cls.get_temproot()
+ return cls(py.error.checked_call(tempfile.mkdtemp, dir=str(rootdir)))
+ mkdtemp = classmethod(mkdtemp)
+
+ def make_numbered_dir(cls, prefix='session-', rootdir=None, keep=3,
+ lock_timeout = 172800): # two days
+ """ return unique directory with a number greater than the current
+ maximum one. The number is assumed to start directly after prefix.
+ if keep is true directories with a number less than (maxnum-keep)
+ will be removed.
+ """
+ if rootdir is None:
+ rootdir = cls.get_temproot()
+
+ def parse_num(path):
+ """ parse the number out of a path (if it matches the prefix) """
+ bn = path.basename
+ if bn.startswith(prefix):
+ try:
+ return int(bn[len(prefix):])
+ except ValueError:
+ pass
+
+ # compute the maximum number currently in use with the
+ # prefix
+ lastmax = None
+ while True:
+ maxnum = -1
+ for path in rootdir.listdir():
+ num = parse_num(path)
+ if num is not None:
+ maxnum = max(maxnum, num)
+
+ # make the new directory
+ try:
+ udir = rootdir.mkdir(prefix + str(maxnum+1))
+ except py.error.EEXIST:
+ # race condition: another thread/process created the dir
+ # in the meantime. Try counting again
+ if lastmax == maxnum:
+ raise
+ lastmax = maxnum
+ continue
+ break
+
+ # put a .lock file in the new directory that will be removed at
+ # process exit
+ if lock_timeout:
+ lockfile = udir.join('.lock')
+ mypid = os.getpid()
+ if hasattr(lockfile, 'mksymlinkto'):
+ lockfile.mksymlinkto(str(mypid))
+ else:
+ lockfile.write(str(mypid))
+ def try_remove_lockfile():
+ # in a fork() situation, only the last process should
+ # remove the .lock, otherwise the other processes run the
+ # risk of seeing their temporary dir disappear. For now
+ # we remove the .lock in the parent only (i.e. we assume
+ # that the children finish before the parent).
+ if os.getpid() != mypid:
+ return
+ try:
+ lockfile.remove()
+ except py.error.Error:
+ pass
+ atexit.register(try_remove_lockfile)
+
+ # prune old directories
+ if keep:
+ for path in rootdir.listdir():
+ num = parse_num(path)
+ if num is not None and num <= (maxnum - keep):
+ lf = path.join('.lock')
+ try:
+ t1 = lf.lstat().mtime
+ t2 = lockfile.lstat().mtime
+ if not lock_timeout or abs(t2-t1) < lock_timeout:
+ continue # skip directories still locked
+ except py.error.Error:
+ pass # assume that it means that there is no 'lf'
+ try:
+ path.remove(rec=1)
+ except KeyboardInterrupt:
+ raise
+ except: # this might be py.error.Error, WindowsError ...
+ pass
+
+ # make link...
+ try:
+ username = os.environ['USER'] #linux, et al
+ except KeyError:
+ try:
+ username = os.environ['USERNAME'] #windows
+ except KeyError:
+ username = 'current'
+
+ src = str(udir)
+ dest = src[:src.rfind('-')] + '-' + username
+ try:
+ os.unlink(dest)
+ except OSError:
+ pass
+ try:
+ os.symlink(src, dest)
+ except (OSError, AttributeError, NotImplementedError):
+ pass
+
+ return udir
+ make_numbered_dir = classmethod(make_numbered_dir)
+
+def copymode(src, dest):
+ """ copy permission from src to dst. """
+ py.std.shutil.copymode(src, dest)
+
+def copystat(src, dest):
+ """ copy permission, last modification time, last access time, and flags from src to dst."""
+ py.std.shutil.copystat(str(src), str(dest))
+
+def copychunked(src, dest):
+ chunksize = 524288 # half a meg of bytes
+ fsrc = src.open('rb')
+ try:
+ fdest = dest.open('wb')
+ try:
+ while 1:
+ buf = fsrc.read(chunksize)
+ if not buf:
+ break
+ fdest.write(buf)
+ finally:
+ fdest.close()
+ finally:
+ fsrc.close()
+
+def isimportable(name):
+ if name and (name[0].isalpha() or name[0] == '_'):
+ name = name.replace("_", '')
+ return not name or name.isalnum()
diff --git a/lib/spack/external/py/_path/svnurl.py b/lib/spack/external/py/_path/svnurl.py
new file mode 100644
index 0000000000..78d71317ac
--- /dev/null
+++ b/lib/spack/external/py/_path/svnurl.py
@@ -0,0 +1,380 @@
+"""
+module defining a subversion path object based on the external
+command 'svn'. This modules aims to work with svn 1.3 and higher
+but might also interact well with earlier versions.
+"""
+
+import os, sys, time, re
+import py
+from py import path, process
+from py._path import common
+from py._path import svnwc as svncommon
+from py._path.cacheutil import BuildcostAccessCache, AgingCache
+
+DEBUG=False
+
+class SvnCommandPath(svncommon.SvnPathBase):
+ """ path implementation that offers access to (possibly remote) subversion
+ repositories. """
+
+ _lsrevcache = BuildcostAccessCache(maxentries=128)
+ _lsnorevcache = AgingCache(maxentries=1000, maxseconds=60.0)
+
+ def __new__(cls, path, rev=None, auth=None):
+ self = object.__new__(cls)
+ if isinstance(path, cls):
+ rev = path.rev
+ auth = path.auth
+ path = path.strpath
+ svncommon.checkbadchars(path)
+ path = path.rstrip('/')
+ self.strpath = path
+ self.rev = rev
+ self.auth = auth
+ return self
+
+ def __repr__(self):
+ if self.rev == -1:
+ return 'svnurl(%r)' % self.strpath
+ else:
+ return 'svnurl(%r, %r)' % (self.strpath, self.rev)
+
+ def _svnwithrev(self, cmd, *args):
+ """ execute an svn command, append our own url and revision """
+ if self.rev is None:
+ return self._svnwrite(cmd, *args)
+ else:
+ args = ['-r', self.rev] + list(args)
+ return self._svnwrite(cmd, *args)
+
+ def _svnwrite(self, cmd, *args):
+ """ execute an svn command, append our own url """
+ l = ['svn %s' % cmd]
+ args = ['"%s"' % self._escape(item) for item in args]
+ l.extend(args)
+ l.append('"%s"' % self._encodedurl())
+ # fixing the locale because we can't otherwise parse
+ string = " ".join(l)
+ if DEBUG:
+ print("execing %s" % string)
+ out = self._svncmdexecauth(string)
+ return out
+
+ def _svncmdexecauth(self, cmd):
+ """ execute an svn command 'as is' """
+ cmd = svncommon.fixlocale() + cmd
+ if self.auth is not None:
+ cmd += ' ' + self.auth.makecmdoptions()
+ return self._cmdexec(cmd)
+
+ def _cmdexec(self, cmd):
+ try:
+ out = process.cmdexec(cmd)
+ except py.process.cmdexec.Error:
+ e = sys.exc_info()[1]
+ if (e.err.find('File Exists') != -1 or
+ e.err.find('File already exists') != -1):
+ raise py.error.EEXIST(self)
+ raise
+ return out
+
+ def _svnpopenauth(self, cmd):
+ """ execute an svn command, return a pipe for reading stdin """
+ cmd = svncommon.fixlocale() + cmd
+ if self.auth is not None:
+ cmd += ' ' + self.auth.makecmdoptions()
+ return self._popen(cmd)
+
+ def _popen(self, cmd):
+ return os.popen(cmd)
+
+ def _encodedurl(self):
+ return self._escape(self.strpath)
+
+ def _norev_delentry(self, path):
+ auth = self.auth and self.auth.makecmdoptions() or None
+ self._lsnorevcache.delentry((str(path), auth))
+
+ def open(self, mode='r'):
+ """ return an opened file with the given mode. """
+ if mode not in ("r", "rU",):
+ raise ValueError("mode %r not supported" % (mode,))
+ assert self.check(file=1) # svn cat returns an empty file otherwise
+ if self.rev is None:
+ return self._svnpopenauth('svn cat "%s"' % (
+ self._escape(self.strpath), ))
+ else:
+ return self._svnpopenauth('svn cat -r %s "%s"' % (
+ self.rev, self._escape(self.strpath)))
+
+ def dirpath(self, *args, **kwargs):
+ """ return the directory path of the current path joined
+ with any given path arguments.
+ """
+ l = self.strpath.split(self.sep)
+ if len(l) < 4:
+ raise py.error.EINVAL(self, "base is not valid")
+ elif len(l) == 4:
+ return self.join(*args, **kwargs)
+ else:
+ return self.new(basename='').join(*args, **kwargs)
+
+ # modifying methods (cache must be invalidated)
+ def mkdir(self, *args, **kwargs):
+ """ create & return the directory joined with args.
+ pass a 'msg' keyword argument to set the commit message.
+ """
+ commit_msg = kwargs.get('msg', "mkdir by py lib invocation")
+ createpath = self.join(*args)
+ createpath._svnwrite('mkdir', '-m', commit_msg)
+ self._norev_delentry(createpath.dirpath())
+ return createpath
+
+ def copy(self, target, msg='copied by py lib invocation'):
+ """ copy path to target with checkin message msg."""
+ if getattr(target, 'rev', None) is not None:
+ raise py.error.EINVAL(target, "revisions are immutable")
+ self._svncmdexecauth('svn copy -m "%s" "%s" "%s"' %(msg,
+ self._escape(self), self._escape(target)))
+ self._norev_delentry(target.dirpath())
+
+ def rename(self, target, msg="renamed by py lib invocation"):
+ """ rename this path to target with checkin message msg. """
+ if getattr(self, 'rev', None) is not None:
+ raise py.error.EINVAL(self, "revisions are immutable")
+ self._svncmdexecauth('svn move -m "%s" --force "%s" "%s"' %(
+ msg, self._escape(self), self._escape(target)))
+ self._norev_delentry(self.dirpath())
+ self._norev_delentry(self)
+
+ def remove(self, rec=1, msg='removed by py lib invocation'):
+ """ remove a file or directory (or a directory tree if rec=1) with
+checkin message msg."""
+ if self.rev is not None:
+ raise py.error.EINVAL(self, "revisions are immutable")
+ self._svncmdexecauth('svn rm -m "%s" "%s"' %(msg, self._escape(self)))
+ self._norev_delentry(self.dirpath())
+
+ def export(self, topath):
+ """ export to a local path
+
+ topath should not exist prior to calling this, returns a
+ py.path.local instance
+ """
+ topath = py.path.local(topath)
+ args = ['"%s"' % (self._escape(self),),
+ '"%s"' % (self._escape(topath),)]
+ if self.rev is not None:
+ args = ['-r', str(self.rev)] + args
+ self._svncmdexecauth('svn export %s' % (' '.join(args),))
+ return topath
+
+ def ensure(self, *args, **kwargs):
+ """ ensure that an args-joined path exists (by default as
+ a file). If you specify a keyword argument 'dir=True'
+ then the path is forced to be a directory path.
+ """
+ if getattr(self, 'rev', None) is not None:
+ raise py.error.EINVAL(self, "revisions are immutable")
+ target = self.join(*args)
+ dir = kwargs.get('dir', 0)
+ for x in target.parts(reverse=True):
+ if x.check():
+ break
+ else:
+ raise py.error.ENOENT(target, "has not any valid base!")
+ if x == target:
+ if not x.check(dir=dir):
+ raise dir and py.error.ENOTDIR(x) or py.error.EISDIR(x)
+ return x
+ tocreate = target.relto(x)
+ basename = tocreate.split(self.sep, 1)[0]
+ tempdir = py.path.local.mkdtemp()
+ try:
+ tempdir.ensure(tocreate, dir=dir)
+ cmd = 'svn import -m "%s" "%s" "%s"' % (
+ "ensure %s" % self._escape(tocreate),
+ self._escape(tempdir.join(basename)),
+ x.join(basename)._encodedurl())
+ self._svncmdexecauth(cmd)
+ self._norev_delentry(x)
+ finally:
+ tempdir.remove()
+ return target
+
+ # end of modifying methods
+ def _propget(self, name):
+ res = self._svnwithrev('propget', name)
+ return res[:-1] # strip trailing newline
+
+ def _proplist(self):
+ res = self._svnwithrev('proplist')
+ lines = res.split('\n')
+ lines = [x.strip() for x in lines[1:]]
+ return svncommon.PropListDict(self, lines)
+
+ def info(self):
+ """ return an Info structure with svn-provided information. """
+ parent = self.dirpath()
+ nameinfo_seq = parent._listdir_nameinfo()
+ bn = self.basename
+ for name, info in nameinfo_seq:
+ if name == bn:
+ return info
+ raise py.error.ENOENT(self)
+
+
+ def _listdir_nameinfo(self):
+ """ return sequence of name-info directory entries of self """
+ def builder():
+ try:
+ res = self._svnwithrev('ls', '-v')
+ except process.cmdexec.Error:
+ e = sys.exc_info()[1]
+ if e.err.find('non-existent in that revision') != -1:
+ raise py.error.ENOENT(self, e.err)
+ elif e.err.find("E200009:") != -1:
+ raise py.error.ENOENT(self, e.err)
+ elif e.err.find('File not found') != -1:
+ raise py.error.ENOENT(self, e.err)
+ elif e.err.find('not part of a repository')!=-1:
+ raise py.error.ENOENT(self, e.err)
+ elif e.err.find('Unable to open')!=-1:
+ raise py.error.ENOENT(self, e.err)
+ elif e.err.lower().find('method not allowed')!=-1:
+ raise py.error.EACCES(self, e.err)
+ raise py.error.Error(e.err)
+ lines = res.split('\n')
+ nameinfo_seq = []
+ for lsline in lines:
+ if lsline:
+ info = InfoSvnCommand(lsline)
+ if info._name != '.': # svn 1.5 produces '.' dirs,
+ nameinfo_seq.append((info._name, info))
+ nameinfo_seq.sort()
+ return nameinfo_seq
+ auth = self.auth and self.auth.makecmdoptions() or None
+ if self.rev is not None:
+ return self._lsrevcache.getorbuild((self.strpath, self.rev, auth),
+ builder)
+ else:
+ return self._lsnorevcache.getorbuild((self.strpath, auth),
+ builder)
+
+ def listdir(self, fil=None, sort=None):
+ """ list directory contents, possibly filter by the given fil func
+ and possibly sorted.
+ """
+ if isinstance(fil, str):
+ fil = common.FNMatcher(fil)
+ nameinfo_seq = self._listdir_nameinfo()
+ if len(nameinfo_seq) == 1:
+ name, info = nameinfo_seq[0]
+ if name == self.basename and info.kind == 'file':
+ #if not self.check(dir=1):
+ raise py.error.ENOTDIR(self)
+ paths = [self.join(name) for (name, info) in nameinfo_seq]
+ if fil:
+ paths = [x for x in paths if fil(x)]
+ self._sortlist(paths, sort)
+ return paths
+
+
+ def log(self, rev_start=None, rev_end=1, verbose=False):
+ """ return a list of LogEntry instances for this path.
+rev_start is the starting revision (defaulting to the first one).
+rev_end is the last revision (defaulting to HEAD).
+if verbose is True, then the LogEntry instances also know which files changed.
+"""
+ assert self.check() #make it simpler for the pipe
+ rev_start = rev_start is None and "HEAD" or rev_start
+ rev_end = rev_end is None and "HEAD" or rev_end
+
+ if rev_start == "HEAD" and rev_end == 1:
+ rev_opt = ""
+ else:
+ rev_opt = "-r %s:%s" % (rev_start, rev_end)
+ verbose_opt = verbose and "-v" or ""
+ xmlpipe = self._svnpopenauth('svn log --xml %s %s "%s"' %
+ (rev_opt, verbose_opt, self.strpath))
+ from xml.dom import minidom
+ tree = minidom.parse(xmlpipe)
+ result = []
+ for logentry in filter(None, tree.firstChild.childNodes):
+ if logentry.nodeType == logentry.ELEMENT_NODE:
+ result.append(svncommon.LogEntry(logentry))
+ return result
+
+#01234567890123456789012345678901234567890123467
+# 2256 hpk 165 Nov 24 17:55 __init__.py
+# XXX spotted by Guido, SVN 1.3.0 has different aligning, breaks the code!!!
+# 1312 johnny 1627 May 05 14:32 test_decorators.py
+#
+class InfoSvnCommand:
+ # the '0?' part in the middle is an indication of whether the resource is
+ # locked, see 'svn help ls'
+ lspattern = re.compile(
+ r'^ *(?P<rev>\d+) +(?P<author>.+?) +(0? *(?P<size>\d+))? '
+ '*(?P<date>\w+ +\d{2} +[\d:]+) +(?P<file>.*)$')
+ def __init__(self, line):
+ # this is a typical line from 'svn ls http://...'
+ #_ 1127 jum 0 Jul 13 15:28 branch/
+ match = self.lspattern.match(line)
+ data = match.groupdict()
+ self._name = data['file']
+ if self._name[-1] == '/':
+ self._name = self._name[:-1]
+ self.kind = 'dir'
+ else:
+ self.kind = 'file'
+ #self.has_props = l.pop(0) == 'P'
+ self.created_rev = int(data['rev'])
+ self.last_author = data['author']
+ self.size = data['size'] and int(data['size']) or 0
+ self.mtime = parse_time_with_missing_year(data['date'])
+ self.time = self.mtime * 1000000
+
+ def __eq__(self, other):
+ return self.__dict__ == other.__dict__
+
+
+#____________________________________________________
+#
+# helper functions
+#____________________________________________________
+def parse_time_with_missing_year(timestr):
+ """ analyze the time part from a single line of "svn ls -v"
+ the svn output doesn't show the year makes the 'timestr'
+ ambigous.
+ """
+ import calendar
+ t_now = time.gmtime()
+
+ tparts = timestr.split()
+ month = time.strptime(tparts.pop(0), '%b')[1]
+ day = time.strptime(tparts.pop(0), '%d')[2]
+ last = tparts.pop(0) # year or hour:minute
+ try:
+ if ":" in last:
+ raise ValueError()
+ year = time.strptime(last, '%Y')[0]
+ hour = minute = 0
+ except ValueError:
+ hour, minute = time.strptime(last, '%H:%M')[3:5]
+ year = t_now[0]
+
+ t_result = (year, month, day, hour, minute, 0,0,0,0)
+ if t_result > t_now:
+ year -= 1
+ t_result = (year, month, day, hour, minute, 0,0,0,0)
+ return calendar.timegm(t_result)
+
+class PathEntry:
+ def __init__(self, ppart):
+ self.strpath = ppart.firstChild.nodeValue.encode('UTF-8')
+ self.action = ppart.getAttribute('action').encode('UTF-8')
+ if self.action == 'A':
+ self.copyfrom_path = ppart.getAttribute('copyfrom-path').encode('UTF-8')
+ if self.copyfrom_path:
+ self.copyfrom_rev = int(ppart.getAttribute('copyfrom-rev'))
+
diff --git a/lib/spack/external/py/_path/svnwc.py b/lib/spack/external/py/_path/svnwc.py
new file mode 100644
index 0000000000..00d3b4bbaf
--- /dev/null
+++ b/lib/spack/external/py/_path/svnwc.py
@@ -0,0 +1,1240 @@
+"""
+svn-Command based Implementation of a Subversion WorkingCopy Path.
+
+ SvnWCCommandPath is the main class.
+
+"""
+
+import os, sys, time, re, calendar
+import py
+import subprocess
+from py._path import common
+
+#-----------------------------------------------------------
+# Caching latest repository revision and repo-paths
+# (getting them is slow with the current implementations)
+#
+# XXX make mt-safe
+#-----------------------------------------------------------
+
+class cache:
+ proplist = {}
+ info = {}
+ entries = {}
+ prop = {}
+
+class RepoEntry:
+ def __init__(self, url, rev, timestamp):
+ self.url = url
+ self.rev = rev
+ self.timestamp = timestamp
+
+ def __str__(self):
+ return "repo: %s;%s %s" %(self.url, self.rev, self.timestamp)
+
+class RepoCache:
+ """ The Repocache manages discovered repository paths
+ and their revisions. If inside a timeout the cache
+ will even return the revision of the root.
+ """
+ timeout = 20 # seconds after which we forget that we know the last revision
+
+ def __init__(self):
+ self.repos = []
+
+ def clear(self):
+ self.repos = []
+
+ def put(self, url, rev, timestamp=None):
+ if rev is None:
+ return
+ if timestamp is None:
+ timestamp = time.time()
+
+ for entry in self.repos:
+ if url == entry.url:
+ entry.timestamp = timestamp
+ entry.rev = rev
+ #print "set repo", entry
+ break
+ else:
+ entry = RepoEntry(url, rev, timestamp)
+ self.repos.append(entry)
+ #print "appended repo", entry
+
+ def get(self, url):
+ now = time.time()
+ for entry in self.repos:
+ if url.startswith(entry.url):
+ if now < entry.timestamp + self.timeout:
+ #print "returning immediate Etrny", entry
+ return entry.url, entry.rev
+ return entry.url, -1
+ return url, -1
+
+repositories = RepoCache()
+
+
+# svn support code
+
+ALLOWED_CHARS = "_ -/\\=$.~+%" #add characters as necessary when tested
+if sys.platform == "win32":
+ ALLOWED_CHARS += ":"
+ALLOWED_CHARS_HOST = ALLOWED_CHARS + '@:'
+
+def _getsvnversion(ver=[]):
+ try:
+ return ver[0]
+ except IndexError:
+ v = py.process.cmdexec("svn -q --version")
+ v.strip()
+ v = '.'.join(v.split('.')[:2])
+ ver.append(v)
+ return v
+
+def _escape_helper(text):
+ text = str(text)
+ if py.std.sys.platform != 'win32':
+ text = str(text).replace('$', '\\$')
+ return text
+
+def _check_for_bad_chars(text, allowed_chars=ALLOWED_CHARS):
+ for c in str(text):
+ if c.isalnum():
+ continue
+ if c in allowed_chars:
+ continue
+ return True
+ return False
+
+def checkbadchars(url):
+ # (hpk) not quite sure about the exact purpose, guido w.?
+ proto, uri = url.split("://", 1)
+ if proto != "file":
+ host, uripath = uri.split('/', 1)
+ # only check for bad chars in the non-protocol parts
+ if (_check_for_bad_chars(host, ALLOWED_CHARS_HOST) \
+ or _check_for_bad_chars(uripath, ALLOWED_CHARS)):
+ raise ValueError("bad char in %r" % (url, ))
+
+
+#_______________________________________________________________
+
+class SvnPathBase(common.PathBase):
+ """ Base implementation for SvnPath implementations. """
+ sep = '/'
+
+ def _geturl(self):
+ return self.strpath
+ url = property(_geturl, None, None, "url of this svn-path.")
+
+ def __str__(self):
+ """ return a string representation (including rev-number) """
+ return self.strpath
+
+ def __hash__(self):
+ return hash(self.strpath)
+
+ def new(self, **kw):
+ """ create a modified version of this path. A 'rev' argument
+ indicates a new revision.
+ the following keyword arguments modify various path parts::
+
+ http://host.com/repo/path/file.ext
+ |-----------------------| dirname
+ |------| basename
+ |--| purebasename
+ |--| ext
+ """
+ obj = object.__new__(self.__class__)
+ obj.rev = kw.get('rev', self.rev)
+ obj.auth = kw.get('auth', self.auth)
+ dirname, basename, purebasename, ext = self._getbyspec(
+ "dirname,basename,purebasename,ext")
+ if 'basename' in kw:
+ if 'purebasename' in kw or 'ext' in kw:
+ raise ValueError("invalid specification %r" % kw)
+ else:
+ pb = kw.setdefault('purebasename', purebasename)
+ ext = kw.setdefault('ext', ext)
+ if ext and not ext.startswith('.'):
+ ext = '.' + ext
+ kw['basename'] = pb + ext
+
+ kw.setdefault('dirname', dirname)
+ kw.setdefault('sep', self.sep)
+ if kw['basename']:
+ obj.strpath = "%(dirname)s%(sep)s%(basename)s" % kw
+ else:
+ obj.strpath = "%(dirname)s" % kw
+ return obj
+
+ def _getbyspec(self, spec):
+ """ get specified parts of the path. 'arg' is a string
+ with comma separated path parts. The parts are returned
+ in exactly the order of the specification.
+
+ you may specify the following parts:
+
+ http://host.com/repo/path/file.ext
+ |-----------------------| dirname
+ |------| basename
+ |--| purebasename
+ |--| ext
+ """
+ res = []
+ parts = self.strpath.split(self.sep)
+ for name in spec.split(','):
+ name = name.strip()
+ if name == 'dirname':
+ res.append(self.sep.join(parts[:-1]))
+ elif name == 'basename':
+ res.append(parts[-1])
+ else:
+ basename = parts[-1]
+ i = basename.rfind('.')
+ if i == -1:
+ purebasename, ext = basename, ''
+ else:
+ purebasename, ext = basename[:i], basename[i:]
+ if name == 'purebasename':
+ res.append(purebasename)
+ elif name == 'ext':
+ res.append(ext)
+ else:
+ raise NameError("Don't know part %r" % name)
+ return res
+
+ def __eq__(self, other):
+ """ return true if path and rev attributes each match """
+ return (str(self) == str(other) and
+ (self.rev == other.rev or self.rev == other.rev))
+
+ def __ne__(self, other):
+ return not self == other
+
+ def join(self, *args):
+ """ return a new Path (with the same revision) which is composed
+ of the self Path followed by 'args' path components.
+ """
+ if not args:
+ return self
+
+ args = tuple([arg.strip(self.sep) for arg in args])
+ parts = (self.strpath, ) + args
+ newpath = self.__class__(self.sep.join(parts), self.rev, self.auth)
+ return newpath
+
+ def propget(self, name):
+ """ return the content of the given property. """
+ value = self._propget(name)
+ return value
+
+ def proplist(self):
+ """ list all property names. """
+ content = self._proplist()
+ return content
+
+ def size(self):
+ """ Return the size of the file content of the Path. """
+ return self.info().size
+
+ def mtime(self):
+ """ Return the last modification time of the file. """
+ return self.info().mtime
+
+ # shared help methods
+
+ def _escape(self, cmd):
+ return _escape_helper(cmd)
+
+
+ #def _childmaxrev(self):
+ # """ return maximum revision number of childs (or self.rev if no childs) """
+ # rev = self.rev
+ # for name, info in self._listdir_nameinfo():
+ # rev = max(rev, info.created_rev)
+ # return rev
+
+ #def _getlatestrevision(self):
+ # """ return latest repo-revision for this path. """
+ # url = self.strpath
+ # path = self.__class__(url, None)
+ #
+ # # we need a long walk to find the root-repo and revision
+ # while 1:
+ # try:
+ # rev = max(rev, path._childmaxrev())
+ # previous = path
+ # path = path.dirpath()
+ # except (IOError, process.cmdexec.Error):
+ # break
+ # if rev is None:
+ # raise IOError, "could not determine newest repo revision for %s" % self
+ # return rev
+
+ class Checkers(common.Checkers):
+ def dir(self):
+ try:
+ return self.path.info().kind == 'dir'
+ except py.error.Error:
+ return self._listdirworks()
+
+ def _listdirworks(self):
+ try:
+ self.path.listdir()
+ except py.error.ENOENT:
+ return False
+ else:
+ return True
+
+ def file(self):
+ try:
+ return self.path.info().kind == 'file'
+ except py.error.ENOENT:
+ return False
+
+ def exists(self):
+ try:
+ return self.path.info()
+ except py.error.ENOENT:
+ return self._listdirworks()
+
+def parse_apr_time(timestr):
+ i = timestr.rfind('.')
+ if i == -1:
+ raise ValueError("could not parse %s" % timestr)
+ timestr = timestr[:i]
+ parsedtime = time.strptime(timestr, "%Y-%m-%dT%H:%M:%S")
+ return time.mktime(parsedtime)
+
+class PropListDict(dict):
+ """ a Dictionary which fetches values (InfoSvnCommand instances) lazily"""
+ def __init__(self, path, keynames):
+ dict.__init__(self, [(x, None) for x in keynames])
+ self.path = path
+
+ def __getitem__(self, key):
+ value = dict.__getitem__(self, key)
+ if value is None:
+ value = self.path.propget(key)
+ dict.__setitem__(self, key, value)
+ return value
+
+def fixlocale():
+ if sys.platform != 'win32':
+ return 'LC_ALL=C '
+ return ''
+
+# some nasty chunk of code to solve path and url conversion and quoting issues
+ILLEGAL_CHARS = '* | \ / : < > ? \t \n \x0b \x0c \r'.split(' ')
+if os.sep in ILLEGAL_CHARS:
+ ILLEGAL_CHARS.remove(os.sep)
+ISWINDOWS = sys.platform == 'win32'
+_reg_allow_disk = re.compile(r'^([a-z]\:\\)?[^:]+$', re.I)
+def _check_path(path):
+ illegal = ILLEGAL_CHARS[:]
+ sp = path.strpath
+ if ISWINDOWS:
+ illegal.remove(':')
+ if not _reg_allow_disk.match(sp):
+ raise ValueError('path may not contain a colon (:)')
+ for char in sp:
+ if char not in string.printable or char in illegal:
+ raise ValueError('illegal character %r in path' % (char,))
+
+def path_to_fspath(path, addat=True):
+ _check_path(path)
+ sp = path.strpath
+ if addat and path.rev != -1:
+ sp = '%s@%s' % (sp, path.rev)
+ elif addat:
+ sp = '%s@HEAD' % (sp,)
+ return sp
+
+def url_from_path(path):
+ fspath = path_to_fspath(path, False)
+ quote = py.std.urllib.quote
+ if ISWINDOWS:
+ match = _reg_allow_disk.match(fspath)
+ fspath = fspath.replace('\\', '/')
+ if match.group(1):
+ fspath = '/%s%s' % (match.group(1).replace('\\', '/'),
+ quote(fspath[len(match.group(1)):]))
+ else:
+ fspath = quote(fspath)
+ else:
+ fspath = quote(fspath)
+ if path.rev != -1:
+ fspath = '%s@%s' % (fspath, path.rev)
+ else:
+ fspath = '%s@HEAD' % (fspath,)
+ return 'file://%s' % (fspath,)
+
+class SvnAuth(object):
+ """ container for auth information for Subversion """
+ def __init__(self, username, password, cache_auth=True, interactive=True):
+ self.username = username
+ self.password = password
+ self.cache_auth = cache_auth
+ self.interactive = interactive
+
+ def makecmdoptions(self):
+ uname = self.username.replace('"', '\\"')
+ passwd = self.password.replace('"', '\\"')
+ ret = []
+ if uname:
+ ret.append('--username="%s"' % (uname,))
+ if passwd:
+ ret.append('--password="%s"' % (passwd,))
+ if not self.cache_auth:
+ ret.append('--no-auth-cache')
+ if not self.interactive:
+ ret.append('--non-interactive')
+ return ' '.join(ret)
+
+ def __str__(self):
+ return "<SvnAuth username=%s ...>" %(self.username,)
+
+rex_blame = re.compile(r'\s*(\d+)\s*(\S+) (.*)')
+
+class SvnWCCommandPath(common.PathBase):
+ """ path implementation offering access/modification to svn working copies.
+ It has methods similar to the functions in os.path and similar to the
+ commands of the svn client.
+ """
+ sep = os.sep
+
+ def __new__(cls, wcpath=None, auth=None):
+ self = object.__new__(cls)
+ if isinstance(wcpath, cls):
+ if wcpath.__class__ == cls:
+ return wcpath
+ wcpath = wcpath.localpath
+ if _check_for_bad_chars(str(wcpath),
+ ALLOWED_CHARS):
+ raise ValueError("bad char in wcpath %s" % (wcpath, ))
+ self.localpath = py.path.local(wcpath)
+ self.auth = auth
+ return self
+
+ strpath = property(lambda x: str(x.localpath), None, None, "string path")
+ rev = property(lambda x: x.info(usecache=0).rev, None, None, "revision")
+
+ def __eq__(self, other):
+ return self.localpath == getattr(other, 'localpath', None)
+
+ def _geturl(self):
+ if getattr(self, '_url', None) is None:
+ info = self.info()
+ self._url = info.url #SvnPath(info.url, info.rev)
+ assert isinstance(self._url, py.builtin._basestring)
+ return self._url
+
+ url = property(_geturl, None, None, "url of this WC item")
+
+ def _escape(self, cmd):
+ return _escape_helper(cmd)
+
+ def dump(self, obj):
+ """ pickle object into path location"""
+ return self.localpath.dump(obj)
+
+ def svnurl(self):
+ """ return current SvnPath for this WC-item. """
+ info = self.info()
+ return py.path.svnurl(info.url)
+
+ def __repr__(self):
+ return "svnwc(%r)" % (self.strpath) # , self._url)
+
+ def __str__(self):
+ return str(self.localpath)
+
+ def _makeauthoptions(self):
+ if self.auth is None:
+ return ''
+ return self.auth.makecmdoptions()
+
+ def _authsvn(self, cmd, args=None):
+ args = args and list(args) or []
+ args.append(self._makeauthoptions())
+ return self._svn(cmd, *args)
+
+ def _svn(self, cmd, *args):
+ l = ['svn %s' % cmd]
+ args = [self._escape(item) for item in args]
+ l.extend(args)
+ l.append('"%s"' % self._escape(self.strpath))
+ # try fixing the locale because we can't otherwise parse
+ string = fixlocale() + " ".join(l)
+ try:
+ try:
+ key = 'LC_MESSAGES'
+ hold = os.environ.get(key)
+ os.environ[key] = 'C'
+ out = py.process.cmdexec(string)
+ finally:
+ if hold:
+ os.environ[key] = hold
+ else:
+ del os.environ[key]
+ except py.process.cmdexec.Error:
+ e = sys.exc_info()[1]
+ strerr = e.err.lower()
+ if strerr.find('not found') != -1:
+ raise py.error.ENOENT(self)
+ elif strerr.find("E200009:") != -1:
+ raise py.error.ENOENT(self)
+ if (strerr.find('file exists') != -1 or
+ strerr.find('file already exists') != -1 or
+ strerr.find('w150002:') != -1 or
+ strerr.find("can't create directory") != -1):
+ raise py.error.EEXIST(strerr) #self)
+ raise
+ return out
+
+ def switch(self, url):
+ """ switch to given URL. """
+ self._authsvn('switch', [url])
+
+ def checkout(self, url=None, rev=None):
+ """ checkout from url to local wcpath. """
+ args = []
+ if url is None:
+ url = self.url
+ if rev is None or rev == -1:
+ if (py.std.sys.platform != 'win32' and
+ _getsvnversion() == '1.3'):
+ url += "@HEAD"
+ else:
+ if _getsvnversion() == '1.3':
+ url += "@%d" % rev
+ else:
+ args.append('-r' + str(rev))
+ args.append(url)
+ self._authsvn('co', args)
+
+ def update(self, rev='HEAD', interactive=True):
+ """ update working copy item to given revision. (None -> HEAD). """
+ opts = ['-r', rev]
+ if not interactive:
+ opts.append("--non-interactive")
+ self._authsvn('up', opts)
+
+ def write(self, content, mode='w'):
+ """ write content into local filesystem wc. """
+ self.localpath.write(content, mode)
+
+ def dirpath(self, *args):
+ """ return the directory Path of the current Path. """
+ return self.__class__(self.localpath.dirpath(*args), auth=self.auth)
+
+ def _ensuredirs(self):
+ parent = self.dirpath()
+ if parent.check(dir=0):
+ parent._ensuredirs()
+ if self.check(dir=0):
+ self.mkdir()
+ return self
+
+ def ensure(self, *args, **kwargs):
+ """ ensure that an args-joined path exists (by default as
+ a file). if you specify a keyword argument 'directory=True'
+ then the path is forced to be a directory path.
+ """
+ p = self.join(*args)
+ if p.check():
+ if p.check(versioned=False):
+ p.add()
+ return p
+ if kwargs.get('dir', 0):
+ return p._ensuredirs()
+ parent = p.dirpath()
+ parent._ensuredirs()
+ p.write("")
+ p.add()
+ return p
+
+ def mkdir(self, *args):
+ """ create & return the directory joined with args. """
+ if args:
+ return self.join(*args).mkdir()
+ else:
+ self._svn('mkdir')
+ return self
+
+ def add(self):
+ """ add ourself to svn """
+ self._svn('add')
+
+ def remove(self, rec=1, force=1):
+ """ remove a file or a directory tree. 'rec'ursive is
+ ignored and considered always true (because of
+ underlying svn semantics.
+ """
+ assert rec, "svn cannot remove non-recursively"
+ if not self.check(versioned=True):
+ # not added to svn (anymore?), just remove
+ py.path.local(self).remove()
+ return
+ flags = []
+ if force:
+ flags.append('--force')
+ self._svn('remove', *flags)
+
+ def copy(self, target):
+ """ copy path to target."""
+ py.process.cmdexec("svn copy %s %s" %(str(self), str(target)))
+
+ def rename(self, target):
+ """ rename this path to target. """
+ py.process.cmdexec("svn move --force %s %s" %(str(self), str(target)))
+
+ def lock(self):
+ """ set a lock (exclusive) on the resource """
+ out = self._authsvn('lock').strip()
+ if not out:
+ # warning or error, raise exception
+ raise ValueError("unknown error in svn lock command")
+
+ def unlock(self):
+ """ unset a previously set lock """
+ out = self._authsvn('unlock').strip()
+ if out.startswith('svn:'):
+ # warning or error, raise exception
+ raise Exception(out[4:])
+
+ def cleanup(self):
+ """ remove any locks from the resource """
+ # XXX should be fixed properly!!!
+ try:
+ self.unlock()
+ except:
+ pass
+
+ def status(self, updates=0, rec=0, externals=0):
+ """ return (collective) Status object for this file. """
+ # http://svnbook.red-bean.com/book.html#svn-ch-3-sect-4.3.1
+ # 2201 2192 jum test
+ # XXX
+ if externals:
+ raise ValueError("XXX cannot perform status() "
+ "on external items yet")
+ else:
+ #1.2 supports: externals = '--ignore-externals'
+ externals = ''
+ if rec:
+ rec= ''
+ else:
+ rec = '--non-recursive'
+
+ # XXX does not work on all subversion versions
+ #if not externals:
+ # externals = '--ignore-externals'
+
+ if updates:
+ updates = '-u'
+ else:
+ updates = ''
+
+ try:
+ cmd = 'status -v --xml --no-ignore %s %s %s' % (
+ updates, rec, externals)
+ out = self._authsvn(cmd)
+ except py.process.cmdexec.Error:
+ cmd = 'status -v --no-ignore %s %s %s' % (
+ updates, rec, externals)
+ out = self._authsvn(cmd)
+ rootstatus = WCStatus(self).fromstring(out, self)
+ else:
+ rootstatus = XMLWCStatus(self).fromstring(out, self)
+ return rootstatus
+
+ def diff(self, rev=None):
+ """ return a diff of the current path against revision rev (defaulting
+ to the last one).
+ """
+ args = []
+ if rev is not None:
+ args.append("-r %d" % rev)
+ out = self._authsvn('diff', args)
+ return out
+
+ def blame(self):
+ """ return a list of tuples of three elements:
+ (revision, commiter, line)
+ """
+ out = self._svn('blame')
+ result = []
+ blamelines = out.splitlines()
+ reallines = py.path.svnurl(self.url).readlines()
+ for i, (blameline, line) in enumerate(
+ zip(blamelines, reallines)):
+ m = rex_blame.match(blameline)
+ if not m:
+ raise ValueError("output line %r of svn blame does not match "
+ "expected format" % (line, ))
+ rev, name, _ = m.groups()
+ result.append((int(rev), name, line))
+ return result
+
+ _rex_commit = re.compile(r'.*Committed revision (\d+)\.$', re.DOTALL)
+ def commit(self, msg='', rec=1):
+ """ commit with support for non-recursive commits """
+ # XXX i guess escaping should be done better here?!?
+ cmd = 'commit -m "%s" --force-log' % (msg.replace('"', '\\"'),)
+ if not rec:
+ cmd += ' -N'
+ out = self._authsvn(cmd)
+ try:
+ del cache.info[self]
+ except KeyError:
+ pass
+ if out:
+ m = self._rex_commit.match(out)
+ return int(m.group(1))
+
+ def propset(self, name, value, *args):
+ """ set property name to value on this path. """
+ d = py.path.local.mkdtemp()
+ try:
+ p = d.join('value')
+ p.write(value)
+ self._svn('propset', name, '--file', str(p), *args)
+ finally:
+ d.remove()
+
+ def propget(self, name):
+ """ get property name on this path. """
+ res = self._svn('propget', name)
+ return res[:-1] # strip trailing newline
+
+ def propdel(self, name):
+ """ delete property name on this path. """
+ res = self._svn('propdel', name)
+ return res[:-1] # strip trailing newline
+
+ def proplist(self, rec=0):
+ """ return a mapping of property names to property values.
+If rec is True, then return a dictionary mapping sub-paths to such mappings.
+"""
+ if rec:
+ res = self._svn('proplist -R')
+ return make_recursive_propdict(self, res)
+ else:
+ res = self._svn('proplist')
+ lines = res.split('\n')
+ lines = [x.strip() for x in lines[1:]]
+ return PropListDict(self, lines)
+
+ def revert(self, rec=0):
+ """ revert the local changes of this path. if rec is True, do so
+recursively. """
+ if rec:
+ result = self._svn('revert -R')
+ else:
+ result = self._svn('revert')
+ return result
+
+ def new(self, **kw):
+ """ create a modified version of this path. A 'rev' argument
+ indicates a new revision.
+ the following keyword arguments modify various path parts:
+
+ http://host.com/repo/path/file.ext
+ |-----------------------| dirname
+ |------| basename
+ |--| purebasename
+ |--| ext
+ """
+ if kw:
+ localpath = self.localpath.new(**kw)
+ else:
+ localpath = self.localpath
+ return self.__class__(localpath, auth=self.auth)
+
+ def join(self, *args, **kwargs):
+ """ return a new Path (with the same revision) which is composed
+ of the self Path followed by 'args' path components.
+ """
+ if not args:
+ return self
+ localpath = self.localpath.join(*args, **kwargs)
+ return self.__class__(localpath, auth=self.auth)
+
+ def info(self, usecache=1):
+ """ return an Info structure with svn-provided information. """
+ info = usecache and cache.info.get(self)
+ if not info:
+ try:
+ output = self._svn('info')
+ except py.process.cmdexec.Error:
+ e = sys.exc_info()[1]
+ if e.err.find('Path is not a working copy directory') != -1:
+ raise py.error.ENOENT(self, e.err)
+ elif e.err.find("is not under version control") != -1:
+ raise py.error.ENOENT(self, e.err)
+ raise
+ # XXX SVN 1.3 has output on stderr instead of stdout (while it does
+ # return 0!), so a bit nasty, but we assume no output is output
+ # to stderr...
+ if (output.strip() == '' or
+ output.lower().find('not a versioned resource') != -1):
+ raise py.error.ENOENT(self, output)
+ info = InfoSvnWCCommand(output)
+
+ # Can't reliably compare on Windows without access to win32api
+ if py.std.sys.platform != 'win32':
+ if info.path != self.localpath:
+ raise py.error.ENOENT(self, "not a versioned resource:" +
+ " %s != %s" % (info.path, self.localpath))
+ cache.info[self] = info
+ return info
+
+ def listdir(self, fil=None, sort=None):
+ """ return a sequence of Paths.
+
+ listdir will return either a tuple or a list of paths
+ depending on implementation choices.
+ """
+ if isinstance(fil, str):
+ fil = common.FNMatcher(fil)
+ # XXX unify argument naming with LocalPath.listdir
+ def notsvn(path):
+ return path.basename != '.svn'
+
+ paths = []
+ for localpath in self.localpath.listdir(notsvn):
+ p = self.__class__(localpath, auth=self.auth)
+ if notsvn(p) and (not fil or fil(p)):
+ paths.append(p)
+ self._sortlist(paths, sort)
+ return paths
+
+ def open(self, mode='r'):
+ """ return an opened file with the given mode. """
+ return open(self.strpath, mode)
+
+ def _getbyspec(self, spec):
+ return self.localpath._getbyspec(spec)
+
+ class Checkers(py.path.local.Checkers):
+ def __init__(self, path):
+ self.svnwcpath = path
+ self.path = path.localpath
+ def versioned(self):
+ try:
+ s = self.svnwcpath.info()
+ except (py.error.ENOENT, py.error.EEXIST):
+ return False
+ except py.process.cmdexec.Error:
+ e = sys.exc_info()[1]
+ if e.err.find('is not a working copy')!=-1:
+ return False
+ if e.err.lower().find('not a versioned resource') != -1:
+ return False
+ raise
+ else:
+ return True
+
+ def log(self, rev_start=None, rev_end=1, verbose=False):
+ """ return a list of LogEntry instances for this path.
+rev_start is the starting revision (defaulting to the first one).
+rev_end is the last revision (defaulting to HEAD).
+if verbose is True, then the LogEntry instances also know which files changed.
+"""
+ assert self.check() # make it simpler for the pipe
+ rev_start = rev_start is None and "HEAD" or rev_start
+ rev_end = rev_end is None and "HEAD" or rev_end
+ if rev_start == "HEAD" and rev_end == 1:
+ rev_opt = ""
+ else:
+ rev_opt = "-r %s:%s" % (rev_start, rev_end)
+ verbose_opt = verbose and "-v" or ""
+ locale_env = fixlocale()
+ # some blather on stderr
+ auth_opt = self._makeauthoptions()
+ #stdin, stdout, stderr = os.popen3(locale_env +
+ # 'svn log --xml %s %s %s "%s"' % (
+ # rev_opt, verbose_opt, auth_opt,
+ # self.strpath))
+ cmd = locale_env + 'svn log --xml %s %s %s "%s"' % (
+ rev_opt, verbose_opt, auth_opt, self.strpath)
+
+ popen = subprocess.Popen(cmd,
+ stdout=subprocess.PIPE,
+ stderr=subprocess.PIPE,
+ shell=True,
+ )
+ stdout, stderr = popen.communicate()
+ stdout = py.builtin._totext(stdout, sys.getdefaultencoding())
+ minidom,ExpatError = importxml()
+ try:
+ tree = minidom.parseString(stdout)
+ except ExpatError:
+ raise ValueError('no such revision')
+ result = []
+ for logentry in filter(None, tree.firstChild.childNodes):
+ if logentry.nodeType == logentry.ELEMENT_NODE:
+ result.append(LogEntry(logentry))
+ return result
+
+ def size(self):
+ """ Return the size of the file content of the Path. """
+ return self.info().size
+
+ def mtime(self):
+ """ Return the last modification time of the file. """
+ return self.info().mtime
+
+ def __hash__(self):
+ return hash((self.strpath, self.__class__, self.auth))
+
+
+class WCStatus:
+ attrnames = ('modified','added', 'conflict', 'unchanged', 'external',
+ 'deleted', 'prop_modified', 'unknown', 'update_available',
+ 'incomplete', 'kindmismatch', 'ignored', 'locked', 'replaced'
+ )
+
+ def __init__(self, wcpath, rev=None, modrev=None, author=None):
+ self.wcpath = wcpath
+ self.rev = rev
+ self.modrev = modrev
+ self.author = author
+
+ for name in self.attrnames:
+ setattr(self, name, [])
+
+ def allpath(self, sort=True, **kw):
+ d = {}
+ for name in self.attrnames:
+ if name not in kw or kw[name]:
+ for path in getattr(self, name):
+ d[path] = 1
+ l = d.keys()
+ if sort:
+ l.sort()
+ return l
+
+ # XXX a bit scary to assume there's always 2 spaces between username and
+ # path, however with win32 allowing spaces in user names there doesn't
+ # seem to be a more solid approach :(
+ _rex_status = re.compile(r'\s+(\d+|-)\s+(\S+)\s+(.+?)\s{2,}(.*)')
+
+ def fromstring(data, rootwcpath, rev=None, modrev=None, author=None):
+ """ return a new WCStatus object from data 's'
+ """
+ rootstatus = WCStatus(rootwcpath, rev, modrev, author)
+ update_rev = None
+ for line in data.split('\n'):
+ if not line.strip():
+ continue
+ #print "processing %r" % line
+ flags, rest = line[:8], line[8:]
+ # first column
+ c0,c1,c2,c3,c4,c5,x6,c7 = flags
+ #if '*' in line:
+ # print "flags", repr(flags), "rest", repr(rest)
+
+ if c0 in '?XI':
+ fn = line.split(None, 1)[1]
+ if c0 == '?':
+ wcpath = rootwcpath.join(fn, abs=1)
+ rootstatus.unknown.append(wcpath)
+ elif c0 == 'X':
+ wcpath = rootwcpath.__class__(
+ rootwcpath.localpath.join(fn, abs=1),
+ auth=rootwcpath.auth)
+ rootstatus.external.append(wcpath)
+ elif c0 == 'I':
+ wcpath = rootwcpath.join(fn, abs=1)
+ rootstatus.ignored.append(wcpath)
+
+ continue
+
+ #elif c0 in '~!' or c4 == 'S':
+ # raise NotImplementedError("received flag %r" % c0)
+
+ m = WCStatus._rex_status.match(rest)
+ if not m:
+ if c7 == '*':
+ fn = rest.strip()
+ wcpath = rootwcpath.join(fn, abs=1)
+ rootstatus.update_available.append(wcpath)
+ continue
+ if line.lower().find('against revision:')!=-1:
+ update_rev = int(rest.split(':')[1].strip())
+ continue
+ if line.lower().find('status on external') > -1:
+ # XXX not sure what to do here... perhaps we want to
+ # store some state instead of just continuing, as right
+ # now it makes the top-level external get added twice
+ # (once as external, once as 'normal' unchanged item)
+ # because of the way SVN presents external items
+ continue
+ # keep trying
+ raise ValueError("could not parse line %r" % line)
+ else:
+ rev, modrev, author, fn = m.groups()
+ wcpath = rootwcpath.join(fn, abs=1)
+ #assert wcpath.check()
+ if c0 == 'M':
+ assert wcpath.check(file=1), "didn't expect a directory with changed content here"
+ rootstatus.modified.append(wcpath)
+ elif c0 == 'A' or c3 == '+' :
+ rootstatus.added.append(wcpath)
+ elif c0 == 'D':
+ rootstatus.deleted.append(wcpath)
+ elif c0 == 'C':
+ rootstatus.conflict.append(wcpath)
+ elif c0 == '~':
+ rootstatus.kindmismatch.append(wcpath)
+ elif c0 == '!':
+ rootstatus.incomplete.append(wcpath)
+ elif c0 == 'R':
+ rootstatus.replaced.append(wcpath)
+ elif not c0.strip():
+ rootstatus.unchanged.append(wcpath)
+ else:
+ raise NotImplementedError("received flag %r" % c0)
+
+ if c1 == 'M':
+ rootstatus.prop_modified.append(wcpath)
+ # XXX do we cover all client versions here?
+ if c2 == 'L' or c5 == 'K':
+ rootstatus.locked.append(wcpath)
+ if c7 == '*':
+ rootstatus.update_available.append(wcpath)
+
+ if wcpath == rootwcpath:
+ rootstatus.rev = rev
+ rootstatus.modrev = modrev
+ rootstatus.author = author
+ if update_rev:
+ rootstatus.update_rev = update_rev
+ continue
+ return rootstatus
+ fromstring = staticmethod(fromstring)
+
+class XMLWCStatus(WCStatus):
+ def fromstring(data, rootwcpath, rev=None, modrev=None, author=None):
+ """ parse 'data' (XML string as outputted by svn st) into a status obj
+ """
+ # XXX for externals, the path is shown twice: once
+ # with external information, and once with full info as if
+ # the item was a normal non-external... the current way of
+ # dealing with this issue is by ignoring it - this does make
+ # externals appear as external items as well as 'normal',
+ # unchanged ones in the status object so this is far from ideal
+ rootstatus = WCStatus(rootwcpath, rev, modrev, author)
+ update_rev = None
+ minidom, ExpatError = importxml()
+ try:
+ doc = minidom.parseString(data)
+ except ExpatError:
+ e = sys.exc_info()[1]
+ raise ValueError(str(e))
+ urevels = doc.getElementsByTagName('against')
+ if urevels:
+ rootstatus.update_rev = urevels[-1].getAttribute('revision')
+ for entryel in doc.getElementsByTagName('entry'):
+ path = entryel.getAttribute('path')
+ statusel = entryel.getElementsByTagName('wc-status')[0]
+ itemstatus = statusel.getAttribute('item')
+
+ if itemstatus == 'unversioned':
+ wcpath = rootwcpath.join(path, abs=1)
+ rootstatus.unknown.append(wcpath)
+ continue
+ elif itemstatus == 'external':
+ wcpath = rootwcpath.__class__(
+ rootwcpath.localpath.join(path, abs=1),
+ auth=rootwcpath.auth)
+ rootstatus.external.append(wcpath)
+ continue
+ elif itemstatus == 'ignored':
+ wcpath = rootwcpath.join(path, abs=1)
+ rootstatus.ignored.append(wcpath)
+ continue
+ elif itemstatus == 'incomplete':
+ wcpath = rootwcpath.join(path, abs=1)
+ rootstatus.incomplete.append(wcpath)
+ continue
+
+ rev = statusel.getAttribute('revision')
+ if itemstatus == 'added' or itemstatus == 'none':
+ rev = '0'
+ modrev = '?'
+ author = '?'
+ date = ''
+ elif itemstatus == "replaced":
+ pass
+ else:
+ #print entryel.toxml()
+ commitel = entryel.getElementsByTagName('commit')[0]
+ if commitel:
+ modrev = commitel.getAttribute('revision')
+ author = ''
+ author_els = commitel.getElementsByTagName('author')
+ if author_els:
+ for c in author_els[0].childNodes:
+ author += c.nodeValue
+ date = ''
+ for c in commitel.getElementsByTagName('date')[0]\
+ .childNodes:
+ date += c.nodeValue
+
+ wcpath = rootwcpath.join(path, abs=1)
+
+ assert itemstatus != 'modified' or wcpath.check(file=1), (
+ 'did\'t expect a directory with changed content here')
+
+ itemattrname = {
+ 'normal': 'unchanged',
+ 'unversioned': 'unknown',
+ 'conflicted': 'conflict',
+ 'none': 'added',
+ }.get(itemstatus, itemstatus)
+
+ attr = getattr(rootstatus, itemattrname)
+ attr.append(wcpath)
+
+ propsstatus = statusel.getAttribute('props')
+ if propsstatus not in ('none', 'normal'):
+ rootstatus.prop_modified.append(wcpath)
+
+ if wcpath == rootwcpath:
+ rootstatus.rev = rev
+ rootstatus.modrev = modrev
+ rootstatus.author = author
+ rootstatus.date = date
+
+ # handle repos-status element (remote info)
+ rstatusels = entryel.getElementsByTagName('repos-status')
+ if rstatusels:
+ rstatusel = rstatusels[0]
+ ritemstatus = rstatusel.getAttribute('item')
+ if ritemstatus in ('added', 'modified'):
+ rootstatus.update_available.append(wcpath)
+
+ lockels = entryel.getElementsByTagName('lock')
+ if len(lockels):
+ rootstatus.locked.append(wcpath)
+
+ return rootstatus
+ fromstring = staticmethod(fromstring)
+
+class InfoSvnWCCommand:
+ def __init__(self, output):
+ # Path: test
+ # URL: http://codespeak.net/svn/std.path/trunk/dist/std.path/test
+ # Repository UUID: fd0d7bf2-dfb6-0310-8d31-b7ecfe96aada
+ # Revision: 2151
+ # Node Kind: directory
+ # Schedule: normal
+ # Last Changed Author: hpk
+ # Last Changed Rev: 2100
+ # Last Changed Date: 2003-10-27 20:43:14 +0100 (Mon, 27 Oct 2003)
+ # Properties Last Updated: 2003-11-03 14:47:48 +0100 (Mon, 03 Nov 2003)
+
+ d = {}
+ for line in output.split('\n'):
+ if not line.strip():
+ continue
+ key, value = line.split(':', 1)
+ key = key.lower().replace(' ', '')
+ value = value.strip()
+ d[key] = value
+ try:
+ self.url = d['url']
+ except KeyError:
+ raise ValueError("Not a versioned resource")
+ #raise ValueError, "Not a versioned resource %r" % path
+ self.kind = d['nodekind'] == 'directory' and 'dir' or d['nodekind']
+ try:
+ self.rev = int(d['revision'])
+ except KeyError:
+ self.rev = None
+
+ self.path = py.path.local(d['path'])
+ self.size = self.path.size()
+ if 'lastchangedrev' in d:
+ self.created_rev = int(d['lastchangedrev'])
+ if 'lastchangedauthor' in d:
+ self.last_author = d['lastchangedauthor']
+ if 'lastchangeddate' in d:
+ self.mtime = parse_wcinfotime(d['lastchangeddate'])
+ self.time = self.mtime * 1000000
+
+ def __eq__(self, other):
+ return self.__dict__ == other.__dict__
+
+def parse_wcinfotime(timestr):
+ """ Returns seconds since epoch, UTC. """
+ # example: 2003-10-27 20:43:14 +0100 (Mon, 27 Oct 2003)
+ m = re.match(r'(\d+-\d+-\d+ \d+:\d+:\d+) ([+-]\d+) .*', timestr)
+ if not m:
+ raise ValueError("timestring %r does not match" % timestr)
+ timestr, timezone = m.groups()
+ # do not handle timezone specially, return value should be UTC
+ parsedtime = time.strptime(timestr, "%Y-%m-%d %H:%M:%S")
+ return calendar.timegm(parsedtime)
+
+def make_recursive_propdict(wcroot,
+ output,
+ rex = re.compile("Properties on '(.*)':")):
+ """ Return a dictionary of path->PropListDict mappings. """
+ lines = [x for x in output.split('\n') if x]
+ pdict = {}
+ while lines:
+ line = lines.pop(0)
+ m = rex.match(line)
+ if not m:
+ raise ValueError("could not parse propget-line: %r" % line)
+ path = m.groups()[0]
+ wcpath = wcroot.join(path, abs=1)
+ propnames = []
+ while lines and lines[0].startswith(' '):
+ propname = lines.pop(0).strip()
+ propnames.append(propname)
+ assert propnames, "must have found properties!"
+ pdict[wcpath] = PropListDict(wcpath, propnames)
+ return pdict
+
+
+def importxml(cache=[]):
+ if cache:
+ return cache
+ from xml.dom import minidom
+ from xml.parsers.expat import ExpatError
+ cache.extend([minidom, ExpatError])
+ return cache
+
+class LogEntry:
+ def __init__(self, logentry):
+ self.rev = int(logentry.getAttribute('revision'))
+ for lpart in filter(None, logentry.childNodes):
+ if lpart.nodeType == lpart.ELEMENT_NODE:
+ if lpart.nodeName == 'author':
+ self.author = lpart.firstChild.nodeValue
+ elif lpart.nodeName == 'msg':
+ if lpart.firstChild:
+ self.msg = lpart.firstChild.nodeValue
+ else:
+ self.msg = ''
+ elif lpart.nodeName == 'date':
+ #2003-07-29T20:05:11.598637Z
+ timestr = lpart.firstChild.nodeValue
+ self.date = parse_apr_time(timestr)
+ elif lpart.nodeName == 'paths':
+ self.strpaths = []
+ for ppart in filter(None, lpart.childNodes):
+ if ppart.nodeType == ppart.ELEMENT_NODE:
+ self.strpaths.append(PathEntry(ppart))
+ def __repr__(self):
+ return '<Logentry rev=%d author=%s date=%s>' % (
+ self.rev, self.author, self.date)
+
+
diff --git a/lib/spack/external/py/_process/__init__.py b/lib/spack/external/py/_process/__init__.py
new file mode 100644
index 0000000000..86c714ad1a
--- /dev/null
+++ b/lib/spack/external/py/_process/__init__.py
@@ -0,0 +1 @@
+""" high-level sub-process handling """
diff --git a/lib/spack/external/py/_process/cmdexec.py b/lib/spack/external/py/_process/cmdexec.py
new file mode 100644
index 0000000000..f83a249402
--- /dev/null
+++ b/lib/spack/external/py/_process/cmdexec.py
@@ -0,0 +1,49 @@
+import sys
+import subprocess
+import py
+from subprocess import Popen, PIPE
+
+def cmdexec(cmd):
+ """ return unicode output of executing 'cmd' in a separate process.
+
+ raise cmdexec.Error exeception if the command failed.
+ the exception will provide an 'err' attribute containing
+ the error-output from the command.
+ if the subprocess module does not provide a proper encoding/unicode strings
+ sys.getdefaultencoding() will be used, if that does not exist, 'UTF-8'.
+ """
+ process = subprocess.Popen(cmd, shell=True,
+ universal_newlines=True,
+ stdout=subprocess.PIPE, stderr=subprocess.PIPE)
+ out, err = process.communicate()
+ if sys.version_info[0] < 3: # on py3 we get unicode strings, on py2 not
+ try:
+ default_encoding = sys.getdefaultencoding() # jython may not have it
+ except AttributeError:
+ default_encoding = sys.stdout.encoding or 'UTF-8'
+ out = unicode(out, process.stdout.encoding or default_encoding)
+ err = unicode(err, process.stderr.encoding or default_encoding)
+ status = process.poll()
+ if status:
+ raise ExecutionFailed(status, status, cmd, out, err)
+ return out
+
+class ExecutionFailed(py.error.Error):
+ def __init__(self, status, systemstatus, cmd, out, err):
+ Exception.__init__(self)
+ self.status = status
+ self.systemstatus = systemstatus
+ self.cmd = cmd
+ self.err = err
+ self.out = out
+
+ def __str__(self):
+ return "ExecutionFailed: %d %s\n%s" %(self.status, self.cmd, self.err)
+
+# export the exception under the name 'py.process.cmdexec.Error'
+cmdexec.Error = ExecutionFailed
+try:
+ ExecutionFailed.__module__ = 'py.process.cmdexec'
+ ExecutionFailed.__name__ = 'Error'
+except (AttributeError, TypeError):
+ pass
diff --git a/lib/spack/external/py/_process/forkedfunc.py b/lib/spack/external/py/_process/forkedfunc.py
new file mode 100644
index 0000000000..1c28530688
--- /dev/null
+++ b/lib/spack/external/py/_process/forkedfunc.py
@@ -0,0 +1,120 @@
+
+"""
+ ForkedFunc provides a way to run a function in a forked process
+ and get at its return value, stdout and stderr output as well
+ as signals and exitstatusus.
+"""
+
+import py
+import os
+import sys
+import marshal
+
+
+def get_unbuffered_io(fd, filename):
+ f = open(str(filename), "w")
+ if fd != f.fileno():
+ os.dup2(f.fileno(), fd)
+ class AutoFlush:
+ def write(self, data):
+ f.write(data)
+ f.flush()
+ def __getattr__(self, name):
+ return getattr(f, name)
+ return AutoFlush()
+
+
+class ForkedFunc:
+ EXITSTATUS_EXCEPTION = 3
+
+
+ def __init__(self, fun, args=None, kwargs=None, nice_level=0,
+ child_on_start=None, child_on_exit=None):
+ if args is None:
+ args = []
+ if kwargs is None:
+ kwargs = {}
+ self.fun = fun
+ self.args = args
+ self.kwargs = kwargs
+ self.tempdir = tempdir = py.path.local.mkdtemp()
+ self.RETVAL = tempdir.ensure('retval')
+ self.STDOUT = tempdir.ensure('stdout')
+ self.STDERR = tempdir.ensure('stderr')
+
+ pid = os.fork()
+ if pid: # in parent process
+ self.pid = pid
+ else: # in child process
+ self.pid = None
+ self._child(nice_level, child_on_start, child_on_exit)
+
+ def _child(self, nice_level, child_on_start, child_on_exit):
+ # right now we need to call a function, but first we need to
+ # map all IO that might happen
+ sys.stdout = stdout = get_unbuffered_io(1, self.STDOUT)
+ sys.stderr = stderr = get_unbuffered_io(2, self.STDERR)
+ retvalf = self.RETVAL.open("wb")
+ EXITSTATUS = 0
+ try:
+ if nice_level:
+ os.nice(nice_level)
+ try:
+ if child_on_start is not None:
+ child_on_start()
+ retval = self.fun(*self.args, **self.kwargs)
+ retvalf.write(marshal.dumps(retval))
+ if child_on_exit is not None:
+ child_on_exit()
+ except:
+ excinfo = py.code.ExceptionInfo()
+ stderr.write(str(excinfo._getreprcrash()))
+ EXITSTATUS = self.EXITSTATUS_EXCEPTION
+ finally:
+ stdout.close()
+ stderr.close()
+ retvalf.close()
+ os.close(1)
+ os.close(2)
+ os._exit(EXITSTATUS)
+
+ def waitfinish(self, waiter=os.waitpid):
+ pid, systemstatus = waiter(self.pid, 0)
+ if systemstatus:
+ if os.WIFSIGNALED(systemstatus):
+ exitstatus = os.WTERMSIG(systemstatus) + 128
+ else:
+ exitstatus = os.WEXITSTATUS(systemstatus)
+ else:
+ exitstatus = 0
+ signal = systemstatus & 0x7f
+ if not exitstatus and not signal:
+ retval = self.RETVAL.open('rb')
+ try:
+ retval_data = retval.read()
+ finally:
+ retval.close()
+ retval = marshal.loads(retval_data)
+ else:
+ retval = None
+ stdout = self.STDOUT.read()
+ stderr = self.STDERR.read()
+ self._removetemp()
+ return Result(exitstatus, signal, retval, stdout, stderr)
+
+ def _removetemp(self):
+ if self.tempdir.check():
+ self.tempdir.remove()
+
+ def __del__(self):
+ if self.pid is not None: # only clean up in main process
+ self._removetemp()
+
+
+class Result(object):
+ def __init__(self, exitstatus, signal, retval, stdout, stderr):
+ self.exitstatus = exitstatus
+ self.signal = signal
+ self.retval = retval
+ self.out = stdout
+ self.err = stderr
diff --git a/lib/spack/external/py/_process/killproc.py b/lib/spack/external/py/_process/killproc.py
new file mode 100644
index 0000000000..18e8310b5f
--- /dev/null
+++ b/lib/spack/external/py/_process/killproc.py
@@ -0,0 +1,23 @@
+import py
+import os, sys
+
+if sys.platform == "win32" or getattr(os, '_name', '') == 'nt':
+ try:
+ import ctypes
+ except ImportError:
+ def dokill(pid):
+ py.process.cmdexec("taskkill /F /PID %d" %(pid,))
+ else:
+ def dokill(pid):
+ PROCESS_TERMINATE = 1
+ handle = ctypes.windll.kernel32.OpenProcess(
+ PROCESS_TERMINATE, False, pid)
+ ctypes.windll.kernel32.TerminateProcess(handle, -1)
+ ctypes.windll.kernel32.CloseHandle(handle)
+else:
+ def dokill(pid):
+ os.kill(pid, 15)
+
+def kill(pid):
+ """ kill process by id. """
+ dokill(pid)
diff --git a/lib/spack/external/py/_std.py b/lib/spack/external/py/_std.py
new file mode 100644
index 0000000000..97a9853323
--- /dev/null
+++ b/lib/spack/external/py/_std.py
@@ -0,0 +1,18 @@
+import sys
+
+class Std(object):
+ """ makes top-level python modules available as an attribute,
+ importing them on first access.
+ """
+
+ def __init__(self):
+ self.__dict__ = sys.modules
+
+ def __getattr__(self, name):
+ try:
+ m = __import__(name)
+ except ImportError:
+ raise AttributeError("py.std: could not import %s" % name)
+ return m
+
+std = Std()
diff --git a/lib/spack/external/py/_xmlgen.py b/lib/spack/external/py/_xmlgen.py
new file mode 100644
index 0000000000..1c83545884
--- /dev/null
+++ b/lib/spack/external/py/_xmlgen.py
@@ -0,0 +1,255 @@
+"""
+module for generating and serializing xml and html structures
+by using simple python objects.
+
+(c) holger krekel, holger at merlinux eu. 2009
+"""
+import sys, re
+
+if sys.version_info >= (3,0):
+ def u(s):
+ return s
+ def unicode(x, errors=None):
+ if hasattr(x, '__unicode__'):
+ return x.__unicode__()
+ return str(x)
+else:
+ def u(s):
+ return unicode(s)
+ unicode = unicode
+
+
+class NamespaceMetaclass(type):
+ def __getattr__(self, name):
+ if name[:1] == '_':
+ raise AttributeError(name)
+ if self == Namespace:
+ raise ValueError("Namespace class is abstract")
+ tagspec = self.__tagspec__
+ if tagspec is not None and name not in tagspec:
+ raise AttributeError(name)
+ classattr = {}
+ if self.__stickyname__:
+ classattr['xmlname'] = name
+ cls = type(name, (self.__tagclass__,), classattr)
+ setattr(self, name, cls)
+ return cls
+
+class Tag(list):
+ class Attr(object):
+ def __init__(self, **kwargs):
+ self.__dict__.update(kwargs)
+
+ def __init__(self, *args, **kwargs):
+ super(Tag, self).__init__(args)
+ self.attr = self.Attr(**kwargs)
+
+ def __unicode__(self):
+ return self.unicode(indent=0)
+ __str__ = __unicode__
+
+ def unicode(self, indent=2):
+ l = []
+ SimpleUnicodeVisitor(l.append, indent).visit(self)
+ return u("").join(l)
+
+ def __repr__(self):
+ name = self.__class__.__name__
+ return "<%r tag object %d>" % (name, id(self))
+
+Namespace = NamespaceMetaclass('Namespace', (object, ), {
+ '__tagspec__': None,
+ '__tagclass__': Tag,
+ '__stickyname__': False,
+})
+
+class HtmlTag(Tag):
+ def unicode(self, indent=2):
+ l = []
+ HtmlVisitor(l.append, indent, shortempty=False).visit(self)
+ return u("").join(l)
+
+# exported plain html namespace
+class html(Namespace):
+ __tagclass__ = HtmlTag
+ __stickyname__ = True
+ __tagspec__ = dict([(x,1) for x in (
+ 'a,abbr,acronym,address,applet,area,article,aside,audio,b,'
+ 'base,basefont,bdi,bdo,big,blink,blockquote,body,br,button,'
+ 'canvas,caption,center,cite,code,col,colgroup,command,comment,'
+ 'datalist,dd,del,details,dfn,dir,div,dl,dt,em,embed,'
+ 'fieldset,figcaption,figure,footer,font,form,frame,frameset,h1,'
+ 'h2,h3,h4,h5,h6,head,header,hgroup,hr,html,i,iframe,img,input,'
+ 'ins,isindex,kbd,keygen,label,legend,li,link,listing,map,mark,'
+ 'marquee,menu,meta,meter,multicol,nav,nobr,noembed,noframes,'
+ 'noscript,object,ol,optgroup,option,output,p,param,pre,progress,'
+ 'q,rp,rt,ruby,s,samp,script,section,select,small,source,span,'
+ 'strike,strong,style,sub,summary,sup,table,tbody,td,textarea,'
+ 'tfoot,th,thead,time,title,tr,track,tt,u,ul,xmp,var,video,wbr'
+ ).split(',') if x])
+
+ class Style(object):
+ def __init__(self, **kw):
+ for x, y in kw.items():
+ x = x.replace('_', '-')
+ setattr(self, x, y)
+
+
+class raw(object):
+ """just a box that can contain a unicode string that will be
+ included directly in the output"""
+ def __init__(self, uniobj):
+ self.uniobj = uniobj
+
+class SimpleUnicodeVisitor(object):
+ """ recursive visitor to write unicode. """
+ def __init__(self, write, indent=0, curindent=0, shortempty=True):
+ self.write = write
+ self.cache = {}
+ self.visited = {} # for detection of recursion
+ self.indent = indent
+ self.curindent = curindent
+ self.parents = []
+ self.shortempty = shortempty # short empty tags or not
+
+ def visit(self, node):
+ """ dispatcher on node's class/bases name. """
+ cls = node.__class__
+ try:
+ visitmethod = self.cache[cls]
+ except KeyError:
+ for subclass in cls.__mro__:
+ visitmethod = getattr(self, subclass.__name__, None)
+ if visitmethod is not None:
+ break
+ else:
+ visitmethod = self.__object
+ self.cache[cls] = visitmethod
+ visitmethod(node)
+
+ # the default fallback handler is marked private
+ # to avoid clashes with the tag name object
+ def __object(self, obj):
+ #self.write(obj)
+ self.write(escape(unicode(obj)))
+
+ def raw(self, obj):
+ self.write(obj.uniobj)
+
+ def list(self, obj):
+ assert id(obj) not in self.visited
+ self.visited[id(obj)] = 1
+ for elem in obj:
+ self.visit(elem)
+
+ def Tag(self, tag):
+ assert id(tag) not in self.visited
+ try:
+ tag.parent = self.parents[-1]
+ except IndexError:
+ tag.parent = None
+ self.visited[id(tag)] = 1
+ tagname = getattr(tag, 'xmlname', tag.__class__.__name__)
+ if self.curindent and not self._isinline(tagname):
+ self.write("\n" + u(' ') * self.curindent)
+ if tag:
+ self.curindent += self.indent
+ self.write(u('<%s%s>') % (tagname, self.attributes(tag)))
+ self.parents.append(tag)
+ for x in tag:
+ self.visit(x)
+ self.parents.pop()
+ self.write(u('</%s>') % tagname)
+ self.curindent -= self.indent
+ else:
+ nameattr = tagname+self.attributes(tag)
+ if self._issingleton(tagname):
+ self.write(u('<%s/>') % (nameattr,))
+ else:
+ self.write(u('<%s></%s>') % (nameattr, tagname))
+
+ def attributes(self, tag):
+ # serialize attributes
+ attrlist = dir(tag.attr)
+ attrlist.sort()
+ l = []
+ for name in attrlist:
+ res = self.repr_attribute(tag.attr, name)
+ if res is not None:
+ l.append(res)
+ l.extend(self.getstyle(tag))
+ return u("").join(l)
+
+ def repr_attribute(self, attrs, name):
+ if name[:2] != '__':
+ value = getattr(attrs, name)
+ if name.endswith('_'):
+ name = name[:-1]
+ if isinstance(value, raw):
+ insert = value.uniobj
+ else:
+ insert = escape(unicode(value))
+ return ' %s="%s"' % (name, insert)
+
+ def getstyle(self, tag):
+ """ return attribute list suitable for styling. """
+ try:
+ styledict = tag.style.__dict__
+ except AttributeError:
+ return []
+ else:
+ stylelist = [x+': ' + y for x,y in styledict.items()]
+ return [u(' style="%s"') % u('; ').join(stylelist)]
+
+ def _issingleton(self, tagname):
+ """can (and will) be overridden in subclasses"""
+ return self.shortempty
+
+ def _isinline(self, tagname):
+ """can (and will) be overridden in subclasses"""
+ return False
+
+class HtmlVisitor(SimpleUnicodeVisitor):
+
+ single = dict([(x, 1) for x in
+ ('br,img,area,param,col,hr,meta,link,base,'
+ 'input,frame').split(',')])
+ inline = dict([(x, 1) for x in
+ ('a abbr acronym b basefont bdo big br cite code dfn em font '
+ 'i img input kbd label q s samp select small span strike '
+ 'strong sub sup textarea tt u var'.split(' '))])
+
+ def repr_attribute(self, attrs, name):
+ if name == 'class_':
+ value = getattr(attrs, name)
+ if value is None:
+ return
+ return super(HtmlVisitor, self).repr_attribute(attrs, name)
+
+ def _issingleton(self, tagname):
+ return tagname in self.single
+
+ def _isinline(self, tagname):
+ return tagname in self.inline
+
+
+class _escape:
+ def __init__(self):
+ self.escape = {
+ u('"') : u('&quot;'), u('<') : u('&lt;'), u('>') : u('&gt;'),
+ u('&') : u('&amp;'), u("'") : u('&apos;'),
+ }
+ self.charef_rex = re.compile(u("|").join(self.escape.keys()))
+
+ def _replacer(self, match):
+ return self.escape[match.group(0)]
+
+ def __call__(self, ustring):
+ """ xml-escape the given unicode string. """
+ try:
+ ustring = unicode(ustring)
+ except UnicodeDecodeError:
+ ustring = unicode(ustring, 'utf-8', errors='replace')
+ return self.charef_rex.sub(self._replacer, ustring)
+
+escape = _escape()
diff --git a/lib/spack/external/py/test.py b/lib/spack/external/py/test.py
new file mode 100644
index 0000000000..aa5beb1789
--- /dev/null
+++ b/lib/spack/external/py/test.py
@@ -0,0 +1,10 @@
+import sys
+if __name__ == '__main__':
+ import pytest
+ sys.exit(pytest.main())
+else:
+ import sys, pytest
+ sys.modules['py.test'] = pytest
+
+# for more API entry points see the 'tests' definition
+# in __init__.py
diff --git a/lib/spack/external/pyqver2.py b/lib/spack/external/pyqver2.py
index 4690239748..571e005524 100755
--- a/lib/spack/external/pyqver2.py
+++ b/lib/spack/external/pyqver2.py
@@ -57,7 +57,11 @@ StandardModules = {
"hmac": (2, 2),
"hotshot": (2, 2),
"HTMLParser": (2, 2),
- "importlib": (2, 7),
+# skip importlib until we can conditionally skip for pytest.
+# pytest tries to import this and catches the exception, but
+# the test will still fail.
+# TODO: can we excelude with a comment like '# flake: noqa?'
+# "importlib": (2, 7),
"inspect": (2, 1),
"io": (2, 6),
"itertools": (2, 3),
diff --git a/lib/spack/external/pytest.py b/lib/spack/external/pytest.py
new file mode 100644
index 0000000000..e376e417e8
--- /dev/null
+++ b/lib/spack/external/pytest.py
@@ -0,0 +1,28 @@
+# PYTHON_ARGCOMPLETE_OK
+"""
+pytest: unit and functional testing with Python.
+"""
+__all__ = [
+ 'main',
+ 'UsageError',
+ 'cmdline',
+ 'hookspec',
+ 'hookimpl',
+ '__version__',
+]
+
+if __name__ == '__main__': # if run as a script or by 'python -m pytest'
+ # we trigger the below "else" condition by the following import
+ import pytest
+ raise SystemExit(pytest.main())
+
+# else we are imported
+
+from _pytest.config import (
+ main, UsageError, _preloadplugins, cmdline,
+ hookspec, hookimpl
+)
+from _pytest import __version__
+
+_preloadplugins() # to populate pytest.* namespace so help(pytest) works
+
diff --git a/lib/spack/llnl/util/filesystem.py b/lib/spack/llnl/util/filesystem.py
index 6661a80f27..31e09f2fe6 100644
--- a/lib/spack/llnl/util/filesystem.py
+++ b/lib/spack/llnl/util/filesystem.py
@@ -22,33 +22,55 @@
# License along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
-__all__ = ['set_install_permissions', 'install', 'install_tree', 'traverse_tree',
- 'expand_user', 'working_dir', 'touch', 'touchp', 'mkdirp',
- 'force_remove', 'join_path', 'ancestor', 'can_access', 'filter_file',
- 'FileFilter', 'change_sed_delimiter', 'is_exe', 'force_symlink',
- 'set_executable', 'copy_mode', 'unset_executable_mode',
- 'remove_dead_links', 'remove_linked_tree', 'find_library_path',
- 'fix_darwin_install_name']
-
-import os
+import collections
+import errno
+import fileinput
import glob
-import sys
+import numbers
+import os
import re
import shutil
import stat
-import errno
-import getpass
-from contextlib import contextmanager, closing
-from tempfile import NamedTemporaryFile
import subprocess
+import sys
+from contextlib import contextmanager
import llnl.util.tty as tty
-from spack.util.compression import ALLOWED_ARCHIVE_TYPES
+from llnl.util.lang import dedupe
+
+__all__ = [
+ 'FileFilter',
+ 'LibraryList',
+ 'ancestor',
+ 'can_access',
+ 'change_sed_delimiter',
+ 'copy_mode',
+ 'filter_file',
+ 'find_libraries',
+ 'fix_darwin_install_name',
+ 'force_remove',
+ 'force_symlink',
+ 'install',
+ 'install_tree',
+ 'is_exe',
+ 'join_path',
+ 'mkdirp',
+ 'remove_dead_links',
+ 'remove_if_dead_link',
+ 'remove_linked_tree',
+ 'set_executable',
+ 'set_install_permissions',
+ 'touch',
+ 'touchp',
+ 'traverse_tree',
+ 'unset_executable_mode',
+ 'working_dir']
+
def filter_file(regex, repl, *filenames, **kwargs):
"""Like sed, but uses python regular expressions.
- Filters every line of file through regex and replaces the file
+ Filters every line of each file through regex and replaces the file
with a filtered version. Preserves mode of filtered files.
As with re.sub, ``repl`` can be either a string or a callable.
@@ -59,7 +81,7 @@ def filter_file(regex, repl, *filenames, **kwargs):
Keyword Options:
string[=False] If True, treat regex as a plain string.
- backup[=True] Make a backup files suffixed with ~
+ backup[=True] Make backup file(s) suffixed with ~
ignore_absent[=False] Ignore any files that don't exist.
"""
string = kwargs.get('string', False)
@@ -69,6 +91,7 @@ def filter_file(regex, repl, *filenames, **kwargs):
# Allow strings to use \1, \2, etc. for replacement, like sed
if not callable(repl):
unescaped = repl.replace(r'\\', '\\')
+
def replace_groups_with_groupid(m):
def groupid_to_group(x):
return m.group(int(x.group(1)))
@@ -79,30 +102,32 @@ def filter_file(regex, repl, *filenames, **kwargs):
regex = re.escape(regex)
for filename in filenames:
- backup = filename + "~"
+ backup_filename = filename + "~"
if ignore_absent and not os.path.exists(filename):
continue
- shutil.copy(filename, backup)
+ # Create backup file. Don't overwrite an existing backup
+ # file in case this file is being filtered multiple times.
+ if not os.path.exists(backup_filename):
+ shutil.copy(filename, backup_filename)
+
try:
- with closing(open(backup)) as infile:
- with closing(open(filename, 'w')) as outfile:
- for line in infile:
- foo = re.sub(regex, repl, line)
- outfile.write(foo)
+ for line in fileinput.input(filename, inplace=True):
+ print(re.sub(regex, repl, line.rstrip('\n')))
except:
# clean up the original file on failure.
- shutil.move(backup, filename)
+ shutil.move(backup_filename, filename)
raise
finally:
if not backup:
- shutil.rmtree(backup, ignore_errors=True)
+ os.remove(backup_filename)
class FileFilter(object):
"""Convenience class for calling filter_file a lot."""
+
def __init__(self, *filenames):
self.filenames = filenames
@@ -113,7 +138,7 @@ class FileFilter(object):
def change_sed_delimiter(old_delim, new_delim, *filenames):
"""Find all sed search/replace commands and change the delimiter.
e.g., if the file contains seds that look like 's///', you can
- call change_sed_delimeter('/', '@', file) to change the
+ call change_sed_delimiter('/', '@', file) to change the
delimiter to '@'.
NOTE that this routine will fail if the delimiter is ' or ".
@@ -157,9 +182,12 @@ def set_install_permissions(path):
def copy_mode(src, dest):
src_mode = os.stat(src).st_mode
dest_mode = os.stat(dest).st_mode
- if src_mode & stat.S_IXUSR: dest_mode |= stat.S_IXUSR
- if src_mode & stat.S_IXGRP: dest_mode |= stat.S_IXGRP
- if src_mode & stat.S_IXOTH: dest_mode |= stat.S_IXOTH
+ if src_mode & stat.S_IXUSR:
+ dest_mode |= stat.S_IXUSR
+ if src_mode & stat.S_IXGRP:
+ dest_mode |= stat.S_IXGRP
+ if src_mode & stat.S_IXOTH:
+ dest_mode |= stat.S_IXOTH
os.chmod(dest, dest_mode)
@@ -175,7 +203,7 @@ def install(src, dest):
"""Manually install a file to a particular location."""
tty.debug("Installing %s to %s" % (src, dest))
- # Expand dsst to its eventual full path if it is a directory.
+ # Expand dest to its eventual full path if it is a directory.
if os.path.isdir(dest):
dest = join_path(dest, os.path.basename(src))
@@ -185,7 +213,7 @@ def install(src, dest):
def install_tree(src, dest, **kwargs):
- """Manually install a file to a particular location."""
+ """Manually install a directory tree to a particular location."""
tty.debug("Installing %s to %s" % (src, dest))
shutil.copytree(src, dest, **kwargs)
@@ -199,23 +227,13 @@ def is_exe(path):
return os.path.isfile(path) and os.access(path, os.X_OK)
-def expand_user(path):
- """Find instances of '%u' in a path and replace with the current user's
- username."""
- username = getpass.getuser()
- if not username and '%u' in path:
- tty.die("Couldn't get username to complete path '%s'" % path)
-
- return path.replace('%u', username)
-
-
def mkdirp(*paths):
"""Creates a directory, as well as parent directories if needed."""
for path in paths:
if not os.path.exists(path):
os.makedirs(path)
elif not os.path.isdir(path):
- raise OSError(errno.EEXIST, "File alredy exists", path)
+ raise OSError(errno.EEXIST, "File already exists", path)
def force_remove(*paths):
@@ -224,9 +242,10 @@ def force_remove(*paths):
for path in paths:
try:
os.remove(path)
- except OSError, e:
+ except OSError:
pass
+
@contextmanager
def working_dir(dirname, **kwargs):
if kwargs.get('create', False):
@@ -240,7 +259,7 @@ def working_dir(dirname, **kwargs):
def touch(path):
"""Creates an empty file at the specified path."""
- with open(path, 'a') as file:
+ with open(path, 'a'):
os.utime(path, None)
@@ -253,7 +272,7 @@ def touchp(path):
def force_symlink(src, dest):
try:
os.symlink(src, dest)
- except OSError as e:
+ except OSError:
os.remove(dest)
os.symlink(src, dest)
@@ -275,7 +294,7 @@ def ancestor(dir, n=1):
def can_access(file_name):
"""True if we have read/write access to the file."""
- return os.access(file_name, os.R_OK|os.W_OK)
+ return os.access(file_name, os.R_OK | os.W_OK)
def traverse_tree(source_root, dest_root, rel_path='', **kwargs):
@@ -304,7 +323,7 @@ def traverse_tree(source_root, dest_root, rel_path='', **kwargs):
Optional args:
- order=[pre|post] -- Whether to do pre- or post-order traveral.
+ order=[pre|post] -- Whether to do pre- or post-order traversal.
ignore=<predicate> -- Predicate indicating which files to ignore.
@@ -318,7 +337,7 @@ def traverse_tree(source_root, dest_root, rel_path='', **kwargs):
follow_links = kwargs.get('follow_link', False)
# Yield in pre or post order?
- order = kwargs.get('order', 'pre')
+ order = kwargs.get('order', 'pre')
if order not in ('pre', 'post'):
raise ValueError("Order must be 'pre' or 'post'.")
@@ -330,7 +349,7 @@ def traverse_tree(source_root, dest_root, rel_path='', **kwargs):
return
source_path = os.path.join(source_root, rel_path)
- dest_path = os.path.join(dest_root, rel_path)
+ dest_path = os.path.join(dest_root, rel_path)
# preorder yields directories before children
if order == 'pre':
@@ -338,18 +357,20 @@ def traverse_tree(source_root, dest_root, rel_path='', **kwargs):
for f in os.listdir(source_path):
source_child = os.path.join(source_path, f)
- dest_child = os.path.join(dest_path, f)
- rel_child = os.path.join(rel_path, f)
+ dest_child = os.path.join(dest_path, f)
+ rel_child = os.path.join(rel_path, f)
# Treat as a directory
if os.path.isdir(source_child) and (
- follow_links or not os.path.islink(source_child)):
+ follow_links or not os.path.islink(source_child)):
# When follow_nonexisting isn't set, don't descend into dirs
# in source that do not exist in dest
if follow_nonexisting or os.path.exists(dest_child):
- tuples = traverse_tree(source_root, dest_root, rel_child, **kwargs)
- for t in tuples: yield t
+ tuples = traverse_tree(
+ source_root, dest_root, rel_child, **kwargs)
+ for t in tuples:
+ yield t
# Treat as a file.
elif not ignore(os.path.join(rel_path, f)):
@@ -374,10 +395,21 @@ def remove_dead_links(root):
"""
for file in os.listdir(root):
path = join_path(root, file)
- if os.path.islink(path):
- real_path = os.path.realpath(path)
- if not os.path.exists(real_path):
- os.unlink(path)
+ remove_if_dead_link(path)
+
+
+def remove_if_dead_link(path):
+ """
+ Removes the argument if it is a dead link, does nothing otherwise
+
+ Args:
+ path: the potential dead link
+ """
+ if os.path.islink(path):
+ real_path = os.path.realpath(path)
+ if not os.path.exists(real_path):
+ os.unlink(path)
+
def remove_linked_tree(path):
"""
@@ -402,37 +434,189 @@ def fix_darwin_install_name(path):
Fix install name of dynamic libraries on Darwin to have full path.
There are two parts of this task:
(i) use install_name('-id',...) to change install name of a single lib;
- (ii) use install_name('-change',...) to change the cross linking between libs.
- The function assumes that all libraries are in one folder and currently won't
- follow subfolders.
+ (ii) use install_name('-change',...) to change the cross linking between
+ libs. The function assumes that all libraries are in one folder and
+ currently won't follow subfolders.
Args:
- path: directory in which .dylib files are alocated
+ path: directory in which .dylib files are located
"""
- libs = glob.glob(join_path(path,"*.dylib"))
+ libs = glob.glob(join_path(path, "*.dylib"))
for lib in libs:
# fix install name first:
- subprocess.Popen(["install_name_tool", "-id",lib,lib], stdout=subprocess.PIPE).communicate()[0]
- long_deps = subprocess.Popen(["otool", "-L",lib], stdout=subprocess.PIPE).communicate()[0].split('\n')
+ subprocess.Popen(
+ ["install_name_tool", "-id", lib, lib],
+ stdout=subprocess.PIPE).communicate()[0]
+ long_deps = subprocess.Popen(
+ ["otool", "-L", lib],
+ stdout=subprocess.PIPE).communicate()[0].split('\n')
deps = [dep.partition(' ')[0][1::] for dep in long_deps[2:-1]]
# fix all dependencies:
for dep in deps:
for loc in libs:
if dep == os.path.basename(loc):
- subprocess.Popen(["install_name_tool", "-change",dep,loc,lib], stdout=subprocess.PIPE).communicate()[0]
+ subprocess.Popen(
+ ["install_name_tool", "-change", dep, loc, lib],
+ stdout=subprocess.PIPE).communicate()[0]
break
+# Utilities for libraries
-def find_library_path(libname, *paths):
- """Searches for a file called <libname> in each path.
- Return:
- directory where the library was found, if found. None otherwise.
+class LibraryList(collections.Sequence):
+ """Sequence of absolute paths to libraries
+ Provides a few convenience methods to manipulate library paths and get
+ commonly used compiler flags or names
"""
- for path in paths:
- library = join_path(path, libname)
- if os.path.exists(library):
- return path
- return None
+
+ def __init__(self, libraries):
+ self.libraries = list(libraries)
+
+ @property
+ def directories(self):
+ """Stable de-duplication of the directories where the libraries
+ reside
+
+ >>> l = LibraryList(['/dir1/liba.a', '/dir2/libb.a', '/dir1/libc.a'])
+ >>> assert l.directories == ['/dir1', '/dir2']
+ """
+ return list(dedupe(
+ os.path.dirname(x) for x in self.libraries if os.path.dirname(x)
+ ))
+
+ @property
+ def basenames(self):
+ """Stable de-duplication of the base-names in the list
+
+ >>> l = LibraryList(['/dir1/liba.a', '/dir2/libb.a', '/dir3/liba.a'])
+ >>> assert l.basenames == ['liba.a', 'libb.a']
+ """
+ return list(dedupe(os.path.basename(x) for x in self.libraries))
+
+ @property
+ def names(self):
+ """Stable de-duplication of library names in the list
+
+ >>> l = LibraryList(['/dir1/liba.a', '/dir2/libb.a', '/dir3/liba.so'])
+ >>> assert l.names == ['a', 'b']
+ """
+ return list(dedupe(x.split('.')[0][3:] for x in self.basenames))
+
+ @property
+ def search_flags(self):
+ """Search flags for the libraries
+
+ >>> l = LibraryList(['/dir1/liba.a', '/dir2/libb.a', '/dir1/liba.so'])
+ >>> assert l.search_flags == '-L/dir1 -L/dir2'
+ """
+ return ' '.join(['-L' + x for x in self.directories])
+
+ @property
+ def link_flags(self):
+ """Link flags for the libraries
+
+ >>> l = LibraryList(['/dir1/liba.a', '/dir2/libb.a', '/dir1/liba.so'])
+ >>> assert l.search_flags == '-la -lb'
+ """
+ return ' '.join(['-l' + name for name in self.names])
+
+ @property
+ def ld_flags(self):
+ """Search flags + link flags
+
+ >>> l = LibraryList(['/dir1/liba.a', '/dir2/libb.a', '/dir1/liba.so'])
+ >>> assert l.search_flags == '-L/dir1 -L/dir2 -la -lb'
+ """
+ return self.search_flags + ' ' + self.link_flags
+
+ def __getitem__(self, item):
+ cls = type(self)
+ if isinstance(item, numbers.Integral):
+ return self.libraries[item]
+ return cls(self.libraries[item])
+
+ def __add__(self, other):
+ return LibraryList(dedupe(self.libraries + list(other)))
+
+ def __radd__(self, other):
+ return self.__add__(other)
+
+ def __eq__(self, other):
+ return self.libraries == other.libraries
+
+ def __len__(self):
+ return len(self.libraries)
+
+ def joined(self, separator=' '):
+ return separator.join(self.libraries)
+
+ def __repr__(self):
+ return self.__class__.__name__ + '(' + repr(self.libraries) + ')'
+
+ def __str__(self):
+ return self.joined()
+
+
+def find_libraries(args, root, shared=True, recurse=False):
+ """Returns an iterable object containing a list of full paths to
+ libraries if found.
+
+ Args:
+ args: iterable object containing a list of library names to \
+ search for (e.g. 'libhdf5')
+ root: root folder where to start searching
+ shared: if True searches for shared libraries, otherwise for static
+ recurse: if False search only root folder, if True descends top-down \
+ from the root
+
+ Returns:
+ list of full paths to the libraries that have been found
+ """
+ if not isinstance(args, collections.Sequence) or isinstance(args, str):
+ message = '{0} expects a sequence of strings as first argument'
+ message += ' [got {1} instead]'
+ raise TypeError(message.format(find_libraries.__name__, type(args)))
+
+ # Construct the right suffix for the library
+ if shared is True:
+ suffix = 'dylib' if sys.platform == 'darwin' else 'so'
+ else:
+ suffix = 'a'
+ # List of libraries we are searching with suffixes
+ libraries = ['{0}.{1}'.format(lib, suffix) for lib in args]
+ # Search method
+ if recurse is False:
+ search_method = _find_libraries_non_recursive
+ else:
+ search_method = _find_libraries_recursive
+
+ return search_method(libraries, root)
+
+
+def _find_libraries_recursive(libraries, root):
+ library_dict = collections.defaultdict(list)
+ for path, _, files in os.walk(root):
+ for lib in libraries:
+ if lib in files:
+ library_dict[lib].append(
+ join_path(path, lib)
+ )
+ answer = []
+ for lib in libraries:
+ answer.extend(library_dict[lib])
+ return LibraryList(answer)
+
+
+def _find_libraries_non_recursive(libraries, root):
+
+ def lib_or_none(lib):
+ library = join_path(root, lib)
+ if not os.path.exists(library):
+ return None
+ return library
+
+ return LibraryList(
+ [lib_or_none(lib) for lib in libraries if lib_or_none(lib) is not None]
+ )
diff --git a/lib/spack/llnl/util/lang.py b/lib/spack/llnl/util/lang.py
index 63eb08d803..331cf2b3c5 100644
--- a/lib/spack/llnl/util/lang.py
+++ b/lib/spack/llnl/util/lang.py
@@ -24,7 +24,6 @@
##############################################################################
import os
import re
-import sys
import functools
import collections
import inspect
@@ -39,14 +38,15 @@ def index_by(objects, *funcs):
Values are used as keys. For example, suppose you have four
objects with attributes that look like this:
- a = Spec(name="boost", compiler="gcc", arch="bgqos_0")
- b = Spec(name="mrnet", compiler="intel", arch="chaos_5_x86_64_ib")
- c = Spec(name="libelf", compiler="xlc", arch="bgqos_0")
- d = Spec(name="libdwarf", compiler="intel", arch="chaos_5_x86_64_ib")
+ a = Spec(name="boost", compiler="gcc", arch="bgqos_0")
+ b = Spec(name="mrnet", compiler="intel", arch="chaos_5_x86_64_ib")
+ c = Spec(name="libelf", compiler="xlc", arch="bgqos_0")
+ d = Spec(name="libdwarf", compiler="intel", arch="chaos_5_x86_64_ib")
- list_of_specs = [a,b,c,d]
- index1 = index_by(list_of_specs, lambda s: s.arch, lambda s: s.compiler)
- index2 = index_by(list_of_specs, lambda s: s.compiler)
+ list_of_specs = [a,b,c,d]
+ index1 = index_by(list_of_specs, lambda s: s.arch,
+ lambda s: s.compiler)
+ index2 = index_by(list_of_specs, lambda s: s.compiler)
``index1'' now has two levels of dicts, with lists at the
leaves, like this:
@@ -137,7 +137,7 @@ def get_calling_module_name():
finally:
del stack
- if not '__module__' in caller_locals:
+ if '__module__' not in caller_locals:
raise RuntimeError("Must invoke get_calling_module_name() "
"from inside a class definition!")
@@ -173,11 +173,11 @@ def has_method(cls, name):
class memoized(object):
"""Decorator that caches the results of a function, storing them
in an attribute of that function."""
+
def __init__(self, func):
self.func = func
self.cache = {}
-
def __call__(self, *args):
if not isinstance(args, collections.Hashable):
# Not hashable, so just call the function.
@@ -187,12 +187,10 @@ class memoized(object):
self.cache[args] = self.func(*args)
return self.cache[args]
-
def __get__(self, obj, objtype):
"""Support instance methods."""
return functools.partial(self.__call__, obj)
-
def clear(self):
"""Expunge cache so that self.func will be called again."""
self.cache.clear()
@@ -237,13 +235,21 @@ def key_ordering(cls):
if not has_method(cls, '_cmp_key'):
raise TypeError("'%s' doesn't define _cmp_key()." % cls.__name__)
- setter('__eq__', lambda s,o: (s is o) or (o is not None and s._cmp_key() == o._cmp_key()))
- setter('__lt__', lambda s,o: o is not None and s._cmp_key() < o._cmp_key())
- setter('__le__', lambda s,o: o is not None and s._cmp_key() <= o._cmp_key())
-
- setter('__ne__', lambda s,o: (s is not o) and (o is None or s._cmp_key() != o._cmp_key()))
- setter('__gt__', lambda s,o: o is None or s._cmp_key() > o._cmp_key())
- setter('__ge__', lambda s,o: o is None or s._cmp_key() >= o._cmp_key())
+ setter('__eq__',
+ lambda s, o:
+ (s is o) or (o is not None and s._cmp_key() == o._cmp_key()))
+ setter('__lt__',
+ lambda s, o: o is not None and s._cmp_key() < o._cmp_key())
+ setter('__le__',
+ lambda s, o: o is not None and s._cmp_key() <= o._cmp_key())
+
+ setter('__ne__',
+ lambda s, o:
+ (s is not o) and (o is None or s._cmp_key() != o._cmp_key()))
+ setter('__gt__',
+ lambda s, o: o is None or s._cmp_key() > o._cmp_key())
+ setter('__ge__',
+ lambda s, o: o is None or s._cmp_key() >= o._cmp_key())
setter('__hash__', lambda self: hash(self._cmp_key()))
@@ -254,10 +260,10 @@ def key_ordering(cls):
class HashableMap(dict):
"""This is a hashable, comparable dictionary. Hash is performed on
a tuple of the values in the dictionary."""
+
def _cmp_key(self):
return tuple(sorted(self.values()))
-
def copy(self):
"""Type-agnostic clone method. Preserves subclass type."""
# Construct a new dict of my type
@@ -336,24 +342,35 @@ def match_predicate(*args):
return match
+def dedupe(sequence):
+ """Yields a stable de-duplication of an hashable sequence
-def DictWrapper(dictionary):
- """Returns a class that wraps a dictionary and enables it to be used
- like an object."""
- class wrapper(object):
- def __getattr__(self, name): return dictionary[name]
- def __setattr__(self, name, value): dictionary[name] = value
- def setdefault(self, *args): return dictionary.setdefault(*args)
- def get(self, *args): return dictionary.get(*args)
- def keys(self): return dictionary.keys()
- def values(self): return dictionary.values()
- def items(self): return dictionary.items()
- def __iter__(self): return iter(dictionary)
-
+ Args:
+ sequence: hashable sequence to be de-duplicated
- return wrapper()
+ Returns:
+ stable de-duplication of the sequence
+ """
+ seen = set()
+ for x in sequence:
+ if x not in seen:
+ yield x
+ seen.add(x)
class RequiredAttributeError(ValueError):
+
def __init__(self, message):
super(RequiredAttributeError, self).__init__(message)
+
+
+def duplicate_stream(original):
+ """Duplicates a stream at the os level.
+
+ :param stream original: original stream to be duplicated. Must have a
+ `fileno` callable attribute.
+
+ :return: duplicate of the original stream
+ :rtype: file like object
+ """
+ return os.fdopen(os.dup(original.fileno()))
diff --git a/lib/spack/llnl/util/link_tree.py b/lib/spack/llnl/util/link_tree.py
index b6d8796084..d6547e933a 100644
--- a/lib/spack/llnl/util/link_tree.py
+++ b/lib/spack/llnl/util/link_tree.py
@@ -23,12 +23,13 @@
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
"""LinkTree class for setting up trees of symbolic links."""
-__all__ = ['LinkTree']
import os
import shutil
from llnl.util.filesystem import *
+__all__ = ['LinkTree']
+
empty_file_name = '.spack-empty'
@@ -43,13 +44,13 @@ class LinkTree(object):
modified.
"""
+
def __init__(self, source_root):
if not os.path.exists(source_root):
raise IOError("No such file or directory: '%s'", source_root)
self._root = source_root
-
def find_conflict(self, dest_root, **kwargs):
"""Returns the first file in dest that conflicts with src"""
kwargs['follow_nonexisting'] = False
@@ -61,9 +62,9 @@ class LinkTree(object):
return dest
return None
-
def merge(self, dest_root, **kwargs):
- """Link all files in src into dest, creating directories if necessary."""
+ """Link all files in src into dest, creating directories
+ if necessary."""
kwargs['order'] = 'pre'
for src, dest in traverse_tree(self._root, dest_root, **kwargs):
if os.path.isdir(src):
@@ -83,7 +84,6 @@ class LinkTree(object):
assert(not os.path.exists(dest))
os.symlink(src, dest)
-
def unmerge(self, dest_root, **kwargs):
"""Unlink all files in dest that exist in src.
diff --git a/lib/spack/llnl/util/lock.py b/lib/spack/llnl/util/lock.py
index 479a1b0167..2e44a94798 100644
--- a/lib/spack/llnl/util/lock.py
+++ b/lib/spack/llnl/util/lock.py
@@ -28,6 +28,13 @@ import errno
import time
import socket
+import llnl.util.tty as tty
+
+
+__all__ = ['Lock', 'LockTransaction', 'WriteTransaction', 'ReadTransaction',
+ 'LockError']
+
+
# Default timeout in seconds, after which locks will raise exceptions.
_default_timeout = 60
@@ -36,34 +43,88 @@ _sleep_time = 1e-5
class Lock(object):
- def __init__(self,file_path):
- self._file_path = file_path
- self._fd = None
+ """This is an implementation of a filesystem lock using Python's lockf.
+
+ In Python, `lockf` actually calls `fcntl`, so this should work with
+ any filesystem implementation that supports locking through the fcntl
+ calls. This includes distributed filesystems like Lustre (when flock
+ is enabled) and recent NFS versions.
+ """
+
+ def __init__(self, path, start=0, length=0):
+ """Construct a new lock on the file at ``path``.
+
+ By default, the lock applies to the whole file. Optionally,
+ caller can specify a byte range beginning ``start`` bytes from
+ the start of the file and extending ``length`` bytes from there.
+
+ This exposes a subset of fcntl locking functionality. It does
+ not currently expose the ``whence`` parameter -- ``whence`` is
+ always os.SEEK_SET and ``start`` is always evaluated from the
+ beginning of the file.
+ """
+ self.path = path
+ self._file = None
self._reads = 0
self._writes = 0
+ # byte range parameters
+ self._start = start
+ self._length = length
- def _lock(self, op, timeout):
+ # PID and host of lock holder
+ self.pid = self.old_pid = None
+ self.host = self.old_host = None
+
+ def _lock(self, op, timeout=_default_timeout):
"""This takes a lock using POSIX locks (``fnctl.lockf``).
- The lock is implemented as a spin lock using a nonblocking
- call to lockf().
+ The lock is implemented as a spin lock using a nonblocking call
+ to lockf().
On acquiring an exclusive lock, the lock writes this process's
- pid and host to the lock file, in case the holding process
- needs to be killed later.
+ pid and host to the lock file, in case the holding process needs
+ to be killed later.
If the lock times out, it raises a ``LockError``.
"""
start_time = time.time()
while (time.time() - start_time) < timeout:
try:
- if self._fd is None:
- self._fd = os.open(self._file_path, os.O_RDWR)
-
- fcntl.lockf(self._fd, op | fcntl.LOCK_NB)
+ # If we could write the file, we'd have opened it 'r+'.
+ # Raise an error when we attempt to upgrade to a write lock.
+ if op == fcntl.LOCK_EX:
+ if self._file and self._file.mode == 'r':
+ raise LockError(
+ "Can't take exclusive lock on read-only file: %s"
+ % self.path)
+
+ # Create file and parent directories if they don't exist.
+ if self._file is None:
+ self._ensure_parent_directory()
+
+ # Prefer to open 'r+' to allow upgrading to write
+ # lock later if possible. Open read-only if we can't
+ # write the lock file at all.
+ os_mode, fd_mode = (os.O_RDWR | os.O_CREAT), 'r+'
+ if os.path.exists(self.path) and not os.access(
+ self.path, os.W_OK):
+ os_mode, fd_mode = os.O_RDONLY, 'r'
+
+ fd = os.open(self.path, os_mode)
+ self._file = os.fdopen(fd, fd_mode)
+
+ # Try to get the lock (will raise if not available.)
+ fcntl.lockf(self._file, op | fcntl.LOCK_NB,
+ self._length, self._start, os.SEEK_SET)
+
+ # All locks read the owner PID and host
+ self._read_lock_data()
+
+ # Exclusive locks write their PID/host
if op == fcntl.LOCK_EX:
- os.write(self._fd, "pid=%s,host=%s" % (os.getpid(), socket.getfqdn()))
+ self._write_lock_data()
+
return
except IOError as error:
@@ -75,6 +136,39 @@ class Lock(object):
raise LockError("Timed out waiting for lock.")
+ def _ensure_parent_directory(self):
+ parent = os.path.dirname(self.path)
+ try:
+ os.makedirs(parent)
+ return True
+ except OSError as e:
+ # makedirs can fail when diretory already exists.
+ if not (e.errno == errno.EEXIST and os.path.isdir(parent) or
+ e.errno == errno.EISDIR):
+ raise
+
+ def _read_lock_data(self):
+ """Read PID and host data out of the file if it is there."""
+ line = self._file.read()
+ if line:
+ pid, host = line.strip().split(',')
+ _, _, self.pid = pid.rpartition('=')
+ _, _, self.host = host.rpartition('=')
+
+ def _write_lock_data(self):
+ """Write PID and host data to the file, recording old values."""
+ self.old_pid = self.pid
+ self.old_host = self.host
+
+ self.pid = os.getpid()
+ self.host = socket.getfqdn()
+
+ # write pid, host to disk to sync over FS
+ self._file.seek(0)
+ self._file.write("pid=%s,host=%s" % (self.pid, self.host))
+ self._file.truncate()
+ self._file.flush()
+ os.fsync(self._file.fileno())
def _unlock(self):
"""Releases a lock using POSIX locks (``fcntl.lockf``)
@@ -83,10 +177,10 @@ class Lock(object):
be masquerading as write locks, but this removes either.
"""
- fcntl.lockf(self._fd,fcntl.LOCK_UN)
- os.close(self._fd)
- self._fd = None
-
+ fcntl.lockf(self._file, fcntl.LOCK_UN,
+ self._length, self._start, os.SEEK_SET)
+ self._file.close()
+ self._file = None
def acquire_read(self, timeout=_default_timeout):
"""Acquires a recursive, shared lock for reading.
@@ -100,14 +194,15 @@ class Lock(object):
"""
if self._reads == 0 and self._writes == 0:
- self._lock(fcntl.LOCK_SH, timeout) # can raise LockError.
+ tty.debug('READ LOCK: {0.path}[{0._start}:{0._length}] [Acquiring]'
+ .format(self))
+ self._lock(fcntl.LOCK_SH, timeout=timeout) # can raise LockError.
self._reads += 1
return True
else:
self._reads += 1
return False
-
def acquire_write(self, timeout=_default_timeout):
"""Acquires a recursive, exclusive lock for writing.
@@ -120,14 +215,16 @@ class Lock(object):
"""
if self._writes == 0:
- self._lock(fcntl.LOCK_EX, timeout) # can raise LockError.
+ tty.debug(
+ 'WRITE LOCK: {0.path}[{0._start}:{0._length}] [Acquiring]'
+ .format(self))
+ self._lock(fcntl.LOCK_EX, timeout=timeout) # can raise LockError.
self._writes += 1
return True
else:
self._writes += 1
return False
-
def release_read(self):
"""Releases a read lock.
@@ -141,6 +238,8 @@ class Lock(object):
assert self._reads > 0
if self._reads == 1 and self._writes == 0:
+ tty.debug('READ LOCK: {0.path}[{0._start}:{0._length}] [Released]'
+ .format(self))
self._unlock() # can raise LockError.
self._reads -= 1
return True
@@ -148,7 +247,6 @@ class Lock(object):
self._reads -= 1
return False
-
def release_write(self):
"""Releases a write lock.
@@ -162,6 +260,8 @@ class Lock(object):
assert self._writes > 0
if self._writes == 1 and self._reads == 0:
+ tty.debug('WRITE LOCK: {0.path}[{0._start}:{0._length}] [Released]'
+ .format(self))
self._unlock() # can raise LockError.
self._writes -= 1
return True
@@ -170,6 +270,70 @@ class Lock(object):
return False
+class LockTransaction(object):
+ """Simple nested transaction context manager that uses a file lock.
+
+ This class can trigger actions when the lock is acquired for the
+ first time and released for the last.
+
+ If the acquire_fn returns a value, it is used as the return value for
+ __enter__, allowing it to be passed as the `as` argument of a `with`
+ statement.
+
+ If acquire_fn returns a context manager, *its* `__enter__` function will be
+ called in `__enter__` after acquire_fn, and its `__exit__` funciton will be
+ called before `release_fn` in `__exit__`, allowing you to nest a context
+ manager to be used along with the lock.
+
+ Timeout for lock is customizable.
+
+ """
+
+ def __init__(self, lock, acquire_fn=None, release_fn=None,
+ timeout=_default_timeout):
+ self._lock = lock
+ self._timeout = timeout
+ self._acquire_fn = acquire_fn
+ self._release_fn = release_fn
+ self._as = None
+
+ def __enter__(self):
+ if self._enter() and self._acquire_fn:
+ self._as = self._acquire_fn()
+ if hasattr(self._as, '__enter__'):
+ return self._as.__enter__()
+ else:
+ return self._as
+
+ def __exit__(self, type, value, traceback):
+ suppress = False
+ if self._exit():
+ if self._as and hasattr(self._as, '__exit__'):
+ if self._as.__exit__(type, value, traceback):
+ suppress = True
+ if self._release_fn:
+ if self._release_fn(type, value, traceback):
+ suppress = True
+ return suppress
+
+
+class ReadTransaction(LockTransaction):
+
+ def _enter(self):
+ return self._lock.acquire_read(self._timeout)
+
+ def _exit(self):
+ return self._lock.release_read()
+
+
+class WriteTransaction(LockTransaction):
+
+ def _enter(self):
+ return self._lock.acquire_write(self._timeout)
+
+ def _exit(self):
+ return self._lock.release_write()
+
+
class LockError(Exception):
"""Raised when an attempt to acquire a lock times out."""
- pass
diff --git a/lib/spack/llnl/util/tty/__init__.py b/lib/spack/llnl/util/tty/__init__.py
index c638b113fd..1381bb2f7d 100644
--- a/lib/spack/llnl/util/tty/__init__.py
+++ b/lib/spack/llnl/util/tty/__init__.py
@@ -28,14 +28,17 @@ import textwrap
import fcntl
import termios
import struct
+import traceback
from StringIO import StringIO
from llnl.util.tty.color import *
_debug = False
_verbose = False
+_stacktrace = False
indent = " "
+
def is_verbose():
return _verbose
@@ -44,6 +47,10 @@ def is_debug():
return _debug
+def is_stacktrace():
+ return _stacktrace
+
+
def set_debug(flag):
global _debug
_debug = flag
@@ -52,10 +59,35 @@ def set_debug(flag):
def set_verbose(flag):
global _verbose
_verbose = flag
+
+
+def set_stacktrace(flag):
+ global _stacktrace
+ _stacktrace = flag
+
+
+def process_stacktrace(countback):
+ """Gives file and line frame 'countback' frames from the bottom"""
+ st = traceback.extract_stack()
+ # Not all entries may be spack files, we have to remove those that aren't.
+ file_list = []
+ for frame in st:
+ # Check that the file is a spack file
+ if frame[0].find("/spack") >= 0:
+ file_list.append(frame[0])
+ # We use commonprefix to find what the spack 'root' directory is.
+ root_dir = os.path.commonprefix(file_list)
+ root_len = len(root_dir)
+ st_idx = len(st) - countback - 1
+ st_text = "%s:%i " % (st[st_idx][0][root_len:], st[st_idx][1])
+ return st_text
def msg(message, *args):
- cprint("@*b{==>} %s" % cescape(message))
+ st_text = ""
+ if _stacktrace:
+ st_text = process_stacktrace(2)
+ cprint("@*b{%s==>} %s" % (st_text, cescape(message)))
for arg in args:
print indent + str(arg)
@@ -64,12 +96,19 @@ def info(message, *args, **kwargs):
format = kwargs.get('format', '*b')
stream = kwargs.get('stream', sys.stdout)
wrap = kwargs.get('wrap', False)
-
- cprint("@%s{==>} %s" % (format, cescape(str(message))), stream=stream)
+ break_long_words = kwargs.get('break_long_words', False)
+ st_countback = kwargs.get('countback', 3)
+
+ st_text = ""
+ if _stacktrace:
+ st_text = process_stacktrace(st_countback)
+ cprint("@%s{%s==>} %s" % (format, st_text, cescape(str(message))),
+ stream=stream)
for arg in args:
if wrap:
lines = textwrap.wrap(
- str(arg), initial_indent=indent, subsequent_indent=indent)
+ str(arg), initial_indent=indent, subsequent_indent=indent,
+ break_long_words=break_long_words)
for line in lines:
stream.write(line + '\n')
else:
@@ -102,6 +141,7 @@ def warn(message, *args, **kwargs):
def die(message, *args, **kwargs):
+ kwargs.setdefault('countback', 4)
error(message, *args, **kwargs)
sys.exit(1)
@@ -146,7 +186,8 @@ def get_yes_or_no(prompt, **kwargs):
elif default_value is False:
prompt += ' [y/N] '
else:
- raise ValueError("default for get_yes_no() must be True, False, or None.")
+ raise ValueError(
+ "default for get_yes_no() must be True, False, or None.")
result = None
while result is None:
@@ -172,8 +213,9 @@ def hline(label=None, **kwargs):
char = kwargs.pop('char', '-')
max_width = kwargs.pop('max_width', 64)
if kwargs:
- raise TypeError("'%s' is an invalid keyword argument for this function."
- % next(kwargs.iterkeys()))
+ raise TypeError(
+ "'%s' is an invalid keyword argument for this function."
+ % next(kwargs.iterkeys()))
rows, cols = terminal_size()
if not cols:
@@ -198,7 +240,8 @@ def terminal_size():
"""Gets the dimensions of the console: (rows, cols)."""
def ioctl_GWINSZ(fd):
try:
- rc = struct.unpack('hh', fcntl.ioctl(fd, termios.TIOCGWINSZ, '1234'))
+ rc = struct.unpack('hh', fcntl.ioctl(
+ fd, termios.TIOCGWINSZ, '1234'))
except:
return
return rc
diff --git a/lib/spack/llnl/util/tty/colify.py b/lib/spack/llnl/util/tty/colify.py
index 429ba45882..67acdfa517 100644
--- a/lib/spack/llnl/util/tty/colify.py
+++ b/lib/spack/llnl/util/tty/colify.py
@@ -27,15 +27,14 @@ Routines for printing columnar output. See colify() for more information.
"""
import os
import sys
-import fcntl
-import termios
-import struct
from StringIO import StringIO
from llnl.util.tty import terminal_size
from llnl.util.tty.color import clen, cextra
+
class ColumnConfig:
+
def __init__(self, cols):
self.cols = cols
self.line_length = 0
@@ -43,7 +42,8 @@ class ColumnConfig:
self.widths = [0] * cols # does not include ansi colors
def __repr__(self):
- attrs = [(a,getattr(self, a)) for a in dir(self) if not a.startswith("__")]
+ attrs = [(a, getattr(self, a))
+ for a in dir(self) if not a.startswith("__")]
return "<Config: %s>" % ", ".join("%s: %r" % a for a in attrs)
@@ -68,7 +68,7 @@ def config_variable_cols(elts, console_width, padding, cols=0):
max_cols = min(len(elts), max_cols)
# Range of column counts to try. If forced, use the supplied value.
- col_range = [cols] if cols else xrange(1, max_cols+1)
+ col_range = [cols] if cols else xrange(1, max_cols + 1)
# Determine the most columns possible for the console width.
configs = [ColumnConfig(c) for c in col_range]
@@ -106,7 +106,6 @@ def config_uniform_cols(elts, console_width, padding, cols=0):
# 'clen' ignores length of ansi color sequences.
max_len = max(clen(e) for e in elts) + padding
- max_clen = max(len(e) for e in elts) + padding
if cols == 0:
cols = max(1, console_width / max_len)
cols = min(len(elts), cols)
@@ -130,17 +129,19 @@ def colify(elts, **options):
output=<stream> A file object to write to. Default is sys.stdout.
indent=<int> Optionally indent all columns by some number of spaces.
padding=<int> Spaces between columns. Default is 2.
- width=<int> Width of the output. Default is 80 if tty is not detected.
+ width=<int> Width of the output. Default is 80 if tty not detected.
cols=<int> Force number of columns. Default is to size to terminal,
or single-column if no tty
tty=<bool> Whether to attempt to write to a tty. Default is to
- autodetect a tty. Set to False to force single-column output.
+ autodetect a tty. Set to False to force
+ single-column output.
- method=<string> Method to use to fit columns. Options are variable or uniform.
- Variable-width columns are tighter, uniform columns are all the
- same width and fit less data on the screen.
+ method=<string> Method to use to fit columns. Options are variable or
+ uniform. Variable-width columns are tighter, uniform
+ columns are all the same width and fit less data on
+ the screen.
"""
# Get keyword arguments or set defaults
cols = options.pop("cols", 0)
@@ -152,8 +153,9 @@ def colify(elts, **options):
console_cols = options.pop("width", None)
if options:
- raise TypeError("'%s' is an invalid keyword argument for this function."
- % next(options.iterkeys()))
+ raise TypeError(
+ "'%s' is an invalid keyword argument for this function."
+ % next(options.iterkeys()))
# elts needs to be an array of strings so we can count the elements
elts = [str(elt) for elt in elts]
@@ -167,7 +169,8 @@ def colify(elts, **options):
r, c = env_size.split('x')
console_rows, console_cols = int(r), int(c)
tty = True
- except: pass
+ except:
+ pass
# Use only one column if not a tty.
if not tty:
@@ -198,8 +201,13 @@ def colify(elts, **options):
for col in xrange(cols):
elt = col * rows + row
width = config.widths[col] + cextra(elts[elt])
- fmt = '%%-%ds' % width
- output.write(fmt % elts[elt])
+ if col < cols - 1:
+ fmt = '%%-%ds' % width
+ output.write(fmt % elts[elt])
+ else:
+ # Don't pad the rightmost column (sapces can wrap on
+ # small teriminals if one line is overlong)
+ output.write(elts[elt])
output.write("\n")
row += 1
@@ -223,6 +231,7 @@ def colify_table(table, **options):
raise ValueError("Table is empty in colify_table!")
columns = len(table[0])
+
def transpose():
for i in xrange(columns):
for row in table:
diff --git a/lib/spack/llnl/util/tty/color.py b/lib/spack/llnl/util/tty/color.py
index 0abcb09b97..b0c00f1502 100644
--- a/lib/spack/llnl/util/tty/color.py
+++ b/lib/spack/llnl/util/tty/color.py
@@ -75,25 +75,27 @@ To output an @, use '@@'. To output a } inside braces, use '}}'.
import re
import sys
+
class ColorParseError(Exception):
"""Raised when a color format fails to parse."""
+
def __init__(self, message):
super(ColorParseError, self).__init__(message)
# Text styles for ansi codes
-styles = {'*' : '1', # bold
- '_' : '4', # underline
- None : '0' } # plain
+styles = {'*': '1', # bold
+ '_': '4', # underline
+ None: '0'} # plain
# Dim and bright ansi colors
-colors = {'k' : 30, 'K' : 90, # black
- 'r' : 31, 'R' : 91, # red
- 'g' : 32, 'G' : 92, # green
- 'y' : 33, 'Y' : 93, # yellow
- 'b' : 34, 'B' : 94, # blue
- 'm' : 35, 'M' : 95, # magenta
- 'c' : 36, 'C' : 96, # cyan
- 'w' : 37, 'W' : 97 } # white
+colors = {'k': 30, 'K': 90, # black
+ 'r': 31, 'R': 91, # red
+ 'g': 32, 'G': 92, # green
+ 'y': 33, 'Y': 93, # yellow
+ 'b': 34, 'B': 94, # blue
+ 'm': 35, 'M': 95, # magenta
+ 'c': 36, 'C': 96, # cyan
+ 'w': 37, 'W': 97} # white
# Regex to be used for color formatting
color_re = r'@(?:@|\.|([*_])?([a-zA-Z])?(?:{((?:[^}]|}})*)})?)'
@@ -104,6 +106,7 @@ _force_color = False
class match_to_ansi(object):
+
def __init__(self, color=True):
self.color = color
@@ -179,12 +182,14 @@ def cprint(string, stream=sys.stdout, color=None):
"""Same as cwrite, but writes a trailing newline to the stream."""
cwrite(string + "\n", stream, color)
+
def cescape(string):
"""Replace all @ with @@ in the string provided."""
return str(string).replace('@', '@@')
class ColorStream(object):
+
def __init__(self, stream, color=None):
self._stream = stream
self._color = color
@@ -196,7 +201,7 @@ class ColorStream(object):
color = self._color
if self._color is None:
if raw:
- color=True
+ color = True
else:
color = self._stream.isatty() or _force_color
raw_write(colorize(string, color=color))
diff --git a/lib/spack/llnl/util/tty/log.py b/lib/spack/llnl/util/tty/log.py
index ca82da7b17..b1d45214ab 100644
--- a/lib/spack/llnl/util/tty/log.py
+++ b/lib/spack/llnl/util/tty/log.py
@@ -24,18 +24,20 @@
##############################################################################
"""Utility classes for logging the output of blocks of code.
"""
-import sys
+import multiprocessing
import os
import re
import select
-import inspect
+import sys
+import llnl.util.lang as lang
import llnl.util.tty as tty
import llnl.util.tty.color as color
# Use this to strip escape sequences
_escape = re.compile(r'\x1b[^m]*m|\x1b\[?1034h')
+
def _strip(line):
"""Strip color and control characters from a line."""
return _escape.sub('', line)
@@ -58,10 +60,10 @@ class keyboard_input(object):
When the with block completes, this will restore settings before
canonical and echo were disabled.
"""
+
def __init__(self, stream):
self.stream = stream
-
def __enter__(self):
self.old_cfg = None
@@ -86,10 +88,9 @@ class keyboard_input(object):
# Apply new settings for terminal
termios.tcsetattr(fd, termios.TCSADRAIN, self.new_cfg)
- except Exception, e:
+ except Exception:
pass # Some OS's do not support termios, so ignore.
-
def __exit__(self, exc_type, exception, traceback):
# If termios was avaialble, restore old settings after the
# with block
@@ -100,95 +101,127 @@ class keyboard_input(object):
class log_output(object):
- """Redirects output and error of enclosed block to a file.
+ """Spawns a daemon that reads from a pipe and writes to a file
Usage:
- with log_output(open('logfile.txt', 'w')):
- # do things ... output will be logged.
+ # Spawns the daemon
+ with log_output('logfile.txt', 'w') as log_redirection:
+ # do things ... output is not redirected
+ with log_redirection:
+ # do things ... output will be logged
or:
- with log_output(open('logfile.txt', 'w'), echo=True):
- # do things ... output will be logged
- # and also printed to stdout.
-
- Closes the provided stream when done with the block.
- If echo is True, also prints the output to stdout.
+ with log_output('logfile.txt', echo=True) as log_redirection:
+ # do things ... output is not redirected
+ with log_redirection:
+ # do things ... output will be logged
+ # and also printed to stdout.
+
+ Opens a stream in 'w' mode at daemon spawning and closes it at
+ daemon joining. If echo is True, also prints the output to stdout.
"""
- def __init__(self, stream, echo=False, force_color=False, debug=False):
- self.stream = stream
- # various output options
+ def __init__(
+ self,
+ filename,
+ echo=False,
+ force_color=False,
+ debug=False,
+ input_stream=sys.stdin
+ ):
+ self.filename = filename
+ # Various output options
self.echo = echo
self.force_color = force_color
self.debug = debug
- # Default is to try file-descriptor reassignment unless the system
+ # Default is to try file-descriptor reassignment unless the system
# out/err streams do not have an associated file descriptor
self.directAssignment = False
+ self.read, self.write = os.pipe()
- def trace(self, frame, event, arg):
- """Jumps to __exit__ on the child process."""
- raise _SkipWithBlock()
-
+ # Needed to un-summon the daemon
+ self.parent_pipe, self.child_pipe = multiprocessing.Pipe()
+ # Input stream that controls verbosity interactively
+ self.input_stream = input_stream
def __enter__(self):
- """Redirect output from the with block to a file.
-
- This forks the with block as a separate process, with stdout
- and stderr redirected back to the parent via a pipe. If
- echo is set, also writes to standard out.
-
- """
- # remember these values for later.
- self._force_color = color._force_color
- self._debug = tty._debug
-
- read, write = os.pipe()
-
- self.pid = os.fork()
- if self.pid:
- # Parent: read from child, skip the with block.
- os.close(write)
-
- read_file = os.fdopen(read, 'r', 0)
- with self.stream as log_file:
- with keyboard_input(sys.stdin):
- while True:
- rlist, w, x = select.select([read_file, sys.stdin], [], [])
- if not rlist:
- break
-
- # Allow user to toggle echo with 'v' key.
- # Currently ignores other chars.
- if sys.stdin in rlist:
- if sys.stdin.read(1) == 'v':
- self.echo = not self.echo
-
- # handle output from the with block process.
- if read_file in rlist:
- line = read_file.readline()
- if not line:
- break
-
- # Echo to stdout if requested.
- if self.echo:
- sys.stdout.write(line)
-
- # Stripped output to log file.
- log_file.write(_strip(line))
-
- read_file.flush()
- read_file.close()
-
- # Set a trace function to skip the with block.
- sys.settrace(lambda *args, **keys: None)
- frame = inspect.currentframe(1)
- frame.f_trace = self.trace
-
- else:
- # Child: redirect output, execute the with block.
- os.close(read)
-
+ # Sets a daemon that writes to file what it reads from a pipe
+ try:
+ fwd_input_stream = lang.duplicate_stream(self.input_stream)
+ self.p = multiprocessing.Process(
+ target=self._spawn_writing_daemon,
+ args=(self.read, fwd_input_stream),
+ name='logger_daemon'
+ )
+ self.p.daemon = True
+ self.p.start()
+ finally:
+ fwd_input_stream.close()
+ return log_output.OutputRedirection(self)
+
+ def __exit__(self, exc_type, exc_val, exc_tb):
+ self.parent_pipe.send(True)
+ self.p.join(60.0) # 1 minute to join the child
+
+ def _spawn_writing_daemon(self, read, input_stream):
+ # Parent: read from child, skip the with block.
+ read_file = os.fdopen(read, 'r', 0)
+ with open(self.filename, 'w') as log_file:
+ with keyboard_input(input_stream):
+ while True:
+ # Without the last parameter (timeout) select will wait
+ # until at least one of the two streams are ready. This
+ # may cause the function to hang.
+ rlist, _, _ = select.select(
+ [read_file, input_stream], [], [], 0
+ )
+
+ # Allow user to toggle echo with 'v' key.
+ # Currently ignores other chars.
+ if input_stream in rlist:
+ if input_stream.read(1) == 'v':
+ self.echo = not self.echo
+
+ # Handle output from the with block process.
+ if read_file in rlist:
+ # If we arrive here it means that
+ # read_file was ready for reading : it
+ # should never happen that line is false-ish
+ line = read_file.readline()
+
+ # Echo to stdout if requested.
+ if self.echo:
+ sys.stdout.write(line)
+
+ # Stripped output to log file.
+ log_file.write(_strip(line))
+ log_file.flush()
+
+ if self.child_pipe.poll():
+ break
+
+ def __del__(self):
+ """Closes the pipes"""
+ os.close(self.write)
+ os.close(self.read)
+
+ class OutputRedirection(object):
+
+ def __init__(self, other):
+ self.__dict__.update(other.__dict__)
+
+ def __enter__(self):
+ """Redirect output from the with block to a file.
+
+ Hijacks stdout / stderr and writes to the pipe
+ connected to the logger daemon
+ """
+ # remember these values for later.
+ self._force_color = color._force_color
+ self._debug = tty._debug
+ # Redirect this output to a pipe
+ write = self.write
try:
# Save old stdout and stderr
self._stdout = os.dup(sys.stdout.fileno())
@@ -204,54 +237,26 @@ class log_output(object):
output_redirect = os.fdopen(write, 'w')
sys.stdout = output_redirect
sys.stderr = output_redirect
-
if self.force_color:
color._force_color = True
-
if self.debug:
tty._debug = True
-
- def __exit__(self, exc_type, exception, traceback):
- """Exits on child, handles skipping the with block on parent."""
- # Child should just exit here.
- if self.pid == 0:
+ def __exit__(self, exc_type, exception, traceback):
+ """Plugs back the original file descriptors
+ for stdout and stderr
+ """
# Flush the log to disk.
sys.stdout.flush()
sys.stderr.flush()
-
- if exception:
- # Restore stdout on the child if there's an exception,
- # and let it be raised normally.
- #
- # This assumes that even if the exception is caught,
- # the child will exit with a nonzero return code. If
- # it doesn't, the child process will continue running.
- #
- # TODO: think about how this works outside install.
- # TODO: ideally would propagate exception to parent...
- if self.directAssignment:
- sys.stdout = self._stdout
- sys.stderr = self._stderr
- else:
- os.dup2(self._stdout, sys.stdout.fileno())
- os.dup2(self._stderr, sys.stderr.fileno())
-
- return False
-
+ if self.directAssignment:
+ # We seem to need this only to pass test/install.py
+ sys.stdout = self._stdout
+ sys.stderr = self._stderr
else:
- # Die quietly if there was no exception.
- os._exit(0)
-
- else:
- # If the child exited badly, parent also should exit.
- pid, returncode = os.waitpid(self.pid, 0)
- if returncode != 0:
- os._exit(1)
-
- # restore output options.
- color._force_color = self._force_color
- tty._debug = self._debug
+ os.dup2(self._stdout, sys.stdout.fileno())
+ os.dup2(self._stderr, sys.stderr.fileno())
- # Suppresses exception if it's our own.
- return exc_type is _SkipWithBlock
+ # restore output options.
+ color._force_color = self._force_color
+ tty._debug = self._debug
diff --git a/lib/spack/spack/__init__.py b/lib/spack/spack/__init__.py
index 8c6e0ba527..34a7b01616 100644
--- a/lib/spack/spack/__init__.py
+++ b/lib/spack/spack/__init__.py
@@ -1,3 +1,4 @@
+# flake8: noqa
##############################################################################
# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
# Produced at the Lawrence Livermore National Laboratory.
@@ -29,7 +30,10 @@ import getpass
from llnl.util.filesystem import *
import llnl.util.tty as tty
-# This lives in $prefix/lib/spack/spack/__file__
+#-----------------------------------------------------------------------------
+# Variables describing how Spack is laid out in its prefix.
+#-----------------------------------------------------------------------------
+# This file lives in $prefix/lib/spack/spack/__file__
spack_root = ancestor(__file__, 4)
# The spack script itself
@@ -37,9 +41,12 @@ spack_file = join_path(spack_root, "bin", "spack")
# spack directory hierarchy
lib_path = join_path(spack_root, "lib", "spack")
+external_path = join_path(lib_path, "external")
build_env_path = join_path(lib_path, "env")
module_path = join_path(lib_path, "spack")
+platform_path = join_path(module_path, 'platforms')
compilers_path = join_path(module_path, "compilers")
+operating_system_path = join_path(module_path, 'operating_systems')
test_path = join_path(module_path, "test")
hooks_path = join_path(module_path, "hooks")
var_path = join_path(spack_root, "var", "spack")
@@ -47,133 +54,124 @@ stage_path = join_path(var_path, "stage")
repos_path = join_path(var_path, "repos")
share_path = join_path(spack_root, "share", "spack")
+# Paths to built-in Spack repositories.
+packages_path = join_path(repos_path, "builtin")
+mock_packages_path = join_path(repos_path, "builtin.mock")
+
+# User configuration location
+user_config_path = os.path.expanduser('~/.spack')
+
prefix = spack_root
opt_path = join_path(prefix, "opt")
-install_path = join_path(opt_path, "spack")
etc_path = join_path(prefix, "etc")
-#
-# Set up the default packages database.
-#
+
+#-----------------------------------------------------------------------------
+# Initial imports (only for use in this file -- see __all__ below.)
+#-----------------------------------------------------------------------------
+# These imports depend on the paths above, or on each other
+# Group them here so it's easy to understand the order.
+# TODO: refactor this stuff to be more init order agnostic.
import spack.repository
+import spack.error
+import spack.config
+import spack.fetch_strategy
+from spack.file_cache import FileCache
+from spack.package_prefs import PreferredPackages
+from spack.abi import ABI
+from spack.concretize import DefaultConcretizer
+from spack.version import Version
+from spack.util.path import canonicalize_path
+
+
+#-----------------------------------------------------------------------------
+# Initialize various data structures & objects at the core of Spack.
+#-----------------------------------------------------------------------------
+# Version information
+spack_version = Version("0.10.0")
+
+
+# Set up the default packages database.
try:
repo = spack.repository.RepoPath()
sys.meta_path.append(repo)
except spack.error.SpackError, e:
tty.die('while initializing Spack RepoPath:', e.message)
-#
-# Set up the installed packages database
-#
-from spack.database import Database
-installed_db = Database(install_path)
-
-#
-# Paths to built-in Spack repositories.
-#
-packages_path = join_path(repos_path, "builtin")
-mock_packages_path = join_path(repos_path, "builtin.mock")
-
-#
-# This controls how spack lays out install prefixes and
-# stage directories.
-#
-from spack.directory_layout import YamlDirectoryLayout
-install_layout = YamlDirectoryLayout(install_path)
-
-#
-# This controls how packages are sorted when trying to choose
-# the most preferred package. More preferred packages are sorted
-# first.
-#
-from spack.preferred_packages import PreferredPackages
-pkgsort = PreferredPackages()
-#
-# This tests ABI compatibility between packages
-#
-from spack.abi import ABI
+# Tests ABI compatibility between packages
abi = ABI()
-#
+
# This controls how things are concretized in spack.
# Replace it with a subclass if you want different
# policies.
-#
-from spack.concretize import DefaultConcretizer
concretizer = DefaultConcretizer()
-# Version information
-from spack.version import Version
-spack_version = Version("0.9.1")
+#-----------------------------------------------------------------------------
+# config.yaml options
+#-----------------------------------------------------------------------------
+_config = spack.config.get_config('config')
-#
-# Executables used by Spack
-#
-from spack.util.executable import Executable, which
-# User's editor from the environment
-editor = Executable(os.environ.get("EDITOR", "vi"))
+# Path where downloaded source code is cached
+cache_path = canonicalize_path(
+ _config.get('source_cache', join_path(var_path, "cache")))
+fetch_cache = spack.fetch_strategy.FsCache(cache_path)
-# Curl tool for fetching files.
-curl = which("curl", required=True)
-
-# Whether to build in tmp space or directly in the stage_path.
-# If this is true, then spack will make stage directories in
-# a tmp filesystem, and it will symlink them into stage_path.
-use_tmp_stage = True
-
-# Locations to use for staging and building, in order of preference
-# Use a %u to add a username to the stage paths here, in case this
-# is a shared filesystem. Spack will use the first of these paths
-# that it can create.
-tmp_dirs = []
-_default_tmp = tempfile.gettempdir()
-_tmp_user = getpass.getuser()
-
-_tmp_candidates = (_default_tmp, '/nfs/tmp2', '/tmp', '/var/tmp')
-for path in _tmp_candidates:
- # don't add a second username if it's already unique by user.
- if not _tmp_user in path:
- tmp_dirs.append(join_path(path, '%u', 'spack-stage'))
- else:
- tmp_dirs.append(join_path(path, 'spack-stage'))
-
-# Whether spack should allow installation of unsafe versions of
-# software. "Unsafe" versions are ones it doesn't have a checksum
-# for.
-do_checksum = True
-#
-# SYS_TYPE to use for the spack installation.
-# Value of this determines what platform spack thinks it is by
-# default. You can assign three types of values:
-# 1. None
-# Spack will try to determine the sys_type automatically.
-#
-# 2. A string
-# Spack will assume that the sys_type is hardcoded to the value.
-#
-# 3. A function that returns a string:
-# Spack will use this function to determine the sys_type.
-#
-sys_type = None
+# cache for miscellaneous stuff.
+misc_cache_path = canonicalize_path(
+ _config.get('misc_cache', join_path(user_config_path, 'cache')))
+misc_cache = FileCache(misc_cache_path)
-#
+# If this is enabled, tools that use SSL should not verify
+# certifiates. e.g., curl should use the -k option.
+insecure = not _config.get('verify_ssl', True)
+
+
+# Whether spack should allow installation of unsafe versions of software.
+# "Unsafe" versions are ones it doesn't have a checksum for.
+do_checksum = _config.get('checksum', True)
+
+
+# If this is True, spack will not clean the environment to remove
+# potentially harmful variables before builds.
+dirty = _config.get('dirty', False)
+
+
+#-----------------------------------------------------------------------------
# When packages call 'from spack import *', this extra stuff is brought in.
#
# Spack internal code should call 'import spack' and accesses other
# variables (spack.repo, paths, etc.) directly.
#
-# TODO: maybe this should be separated out and should go in build_environment.py?
-# TODO: it's not clear where all the stuff that needs to be included in packages
-# should live. This file is overloaded for spack core vs. for packages.
+# TODO: maybe this should be separated out to build_environment.py?
+# TODO: it's not clear where all the stuff that needs to be included in
+# packages should live. This file is overloaded for spack core vs.
+# for packages.
#
-__all__ = ['Package', 'Version', 'when', 'ver']
-from spack.package import Package, ExtensionConflictError
+#-----------------------------------------------------------------------------
+__all__ = []
+
+from spack.package import Package
+from spack.build_systems.makefile import MakefilePackage
+from spack.build_systems.autotools import AutotoolsPackage
+from spack.build_systems.cmake import CMakePackage
+from spack.build_systems.python import PythonPackage
+from spack.build_systems.r import RPackage
+__all__ += ['Package', 'CMakePackage', 'AutotoolsPackage', 'MakefilePackage',
+ 'PythonPackage', 'RPackage']
+
from spack.version import Version, ver
+__all__ += ['Version', 'ver']
+
+from spack.spec import Spec, alldeps
+__all__ += ['Spec', 'alldeps']
+
from spack.multimethod import when
+__all__ += ['when']
import llnl.util.filesystem
from llnl.util.filesystem import *
@@ -187,9 +185,17 @@ import spack.util.executable
from spack.util.executable import *
__all__ += spack.util.executable.__all__
+# User's editor from the environment
+editor = Executable(os.environ.get("EDITOR", "vi"))
+
from spack.package import \
- install_dependency_symlinks, flatten_dependencies, DependencyConflictError, \
- InstallError, ExternalPackageError
+ install_dependency_symlinks, flatten_dependencies, \
+ DependencyConflictError, InstallError, ExternalPackageError
__all__ += [
- 'install_dependency_symlinks', 'flatten_dependencies', 'DependencyConflictError',
- 'InstallError', 'ExternalPackageError']
+ 'install_dependency_symlinks', 'flatten_dependencies',
+ 'DependencyConflictError', 'InstallError', 'ExternalPackageError']
+
+# Add default values for attributes that would otherwise be modified from
+# Spack main script
+debug = True
+spack_working_dir = None
diff --git a/lib/spack/spack/abi.py b/lib/spack/spack/abi.py
index 91d1d2003d..b3b1dd6d27 100644
--- a/lib/spack/spack/abi.py
+++ b/lib/spack/spack/abi.py
@@ -26,18 +26,20 @@
import os
import spack
import spack.spec
+from spack.build_environment import dso_suffix
from spack.spec import CompilerSpec
from spack.util.executable import Executable, ProcessError
from llnl.util.lang import memoized
+
class ABI(object):
"""This class provides methods to test ABI compatibility between specs.
The current implementation is rather rough and could be improved."""
def architecture_compatible(self, parent, child):
- """Returns true iff the parent and child specs have ABI compatible architectures."""
- return not parent.architecture or not child.architecture or parent.architecture == child.architecture
-
+ """Return true if parent and child have ABI compatible targets."""
+ return not parent.architecture or not child.architecture or \
+ parent.architecture == child.architecture
@memoized
def _gcc_get_libstdcxx_version(self, version):
@@ -53,15 +55,16 @@ class ABI(object):
output = None
if compiler.cxx:
rungcc = Executable(compiler.cxx)
- libname = "libstdc++.so"
+ libname = "libstdc++." + dso_suffix
elif compiler.cc:
rungcc = Executable(compiler.cc)
- libname = "libgcc_s.so"
+ libname = "libgcc_s." + dso_suffix
else:
return None
try:
- output = rungcc("--print-file-name=%s" % libname, return_output=True)
- except ProcessError, e:
+ output = rungcc("--print-file-name=%s" % libname,
+ return_output=True)
+ except ProcessError:
return None
if not output:
return None
@@ -70,7 +73,6 @@ class ABI(object):
return None
return os.path.basename(libpath)
-
@memoized
def _gcc_compiler_compare(self, pversion, cversion):
"""Returns true iff the gcc version pversion and cversion
@@ -81,7 +83,6 @@ class ABI(object):
return False
return plib == clib
-
def _intel_compiler_compare(self, pversion, cversion):
"""Returns true iff the intel version pversion and cversion
are ABI compatible"""
@@ -91,9 +92,8 @@ class ABI(object):
return False
return pversion.version[:2] == cversion.version[:2]
-
def compiler_compatible(self, parent, child, **kwargs):
- """Returns true iff the compilers for parent and child specs are ABI compatible"""
+ """Return true if compilers for parent and child are ABI compatible."""
if not parent.compiler or not child.compiler:
return True
@@ -108,8 +108,8 @@ class ABI(object):
# TODO: into compiler classes?
for pversion in parent.compiler.versions:
for cversion in child.compiler.versions:
- # For a few compilers use specialized comparisons. Otherwise
- # match on version match.
+ # For a few compilers use specialized comparisons.
+ # Otherwise match on version match.
if pversion.satisfies(cversion):
return True
elif (parent.compiler.name == "gcc" and
@@ -120,9 +120,8 @@ class ABI(object):
return True
return False
-
def compatible(self, parent, child, **kwargs):
"""Returns true iff a parent and child spec are ABI compatible"""
loosematch = kwargs.get('loose', False)
return self.architecture_compatible(parent, child) and \
- self.compiler_compatible(parent, child, loose=loosematch)
+ self.compiler_compatible(parent, child, loose=loosematch)
diff --git a/lib/spack/spack/architecture.py b/lib/spack/spack/architecture.py
index b14cb2bea2..e44e0dc109 100644
--- a/lib/spack/spack/architecture.py
+++ b/lib/spack/spack/architecture.py
@@ -22,68 +22,495 @@
# License along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
+"""
+This module contains all the elements that are required to create an
+architecture object. These include, the target processor, the operating system,
+and the architecture platform (i.e. cray, darwin, linux, bgq, etc) classes.
+
+On a multiple architecture machine, the architecture spec field can be set to
+build a package against any target and operating system that is present on the
+platform. On Cray platforms or any other architecture that has different front
+and back end environments, the operating system will determine the method of
+compiler
+detection.
+
+There are two different types of compiler detection:
+ 1. Through the $PATH env variable (front-end detection)
+ 2. Through the tcl module system. (back-end detection)
+
+Depending on which operating system is specified, the compiler will be detected
+using one of those methods.
+
+For platforms such as linux and darwin, the operating system is autodetected
+and the target is set to be x86_64.
+
+The command line syntax for specifying an architecture is as follows:
+
+ target=<Target name> os=<OperatingSystem name>
+
+If the user wishes to use the defaults, either target or os can be left out of
+the command line and Spack will concretize using the default. These defaults
+are set in the 'platforms/' directory which contains the different subclasses
+for platforms. If the machine has multiple architectures, the user can
+also enter front-end, or fe or back-end or be. These settings will concretize
+to their respective front-end and back-end targets and operating systems.
+Additional platforms can be added by creating a subclass of Platform
+and adding it inside the platform directory.
+
+Platforms are an abstract class that are extended by subclasses. If the user
+wants to add a new type of platform (such as cray_xe), they can create a
+subclass and set all the class attributes such as priority, front_target,
+back_target, front_os, back_os. Platforms also contain a priority class
+attribute. A lower number signifies higher priority. These numbers are
+arbitrarily set and can be changed though often there isn't much need unless a
+new platform is added and the user wants that to be detected first.
+
+Targets are created inside the platform subclasses. Most architecture
+(like linux, and darwin) will have only one target (x86_64) but in the case of
+Cray machines, there is both a frontend and backend processor. The user can
+specify which targets are present on front-end and back-end architecture
+
+Depending on the platform, operating systems are either auto-detected or are
+set. The user can set the front-end and back-end operating setting by the class
+attributes front_os and back_os. The operating system as described earlier,
+will be responsible for compiler detection.
+"""
import os
-import re
-import platform
+import inspect
+import platform as py_platform
-from llnl.util.lang import memoized
+from llnl.util.lang import memoized, list_modules, key_ordering
+from llnl.util.filesystem import join_path
+import llnl.util.tty as tty
import spack
+from spack.util.naming import mod_to_class
+from spack.util.environment import get_path
+from spack.util.multiproc import parmap
+from spack.util.spack_yaml import syaml_dict
import spack.error as serr
-class InvalidSysTypeError(serr.SpackError):
- def __init__(self, sys_type):
- super(InvalidSysTypeError,
- self).__init__("Invalid sys_type value for Spack: " + sys_type)
-
+class NoPlatformError(serr.SpackError):
-class NoSysTypeError(serr.SpackError):
def __init__(self):
- super(NoSysTypeError,
- self).__init__("Could not determine sys_type for this machine.")
+ super(NoPlatformError, self).__init__(
+ "Could not determine a platform for this machine.")
+
+
+@key_ordering
+class Target(object):
+ """ Target is the processor of the host machine.
+ The host machine may have different front-end and back-end targets,
+ especially if it is a Cray machine. The target will have a name and
+ also the module_name (e.g craype-compiler). Targets will also
+ recognize which platform they came from using the set_platform method.
+ Targets will have compiler finding strategies
+ """
+
+ def __init__(self, name, module_name=None):
+ self.name = name # case of cray "ivybridge" but if it's x86_64
+ self.module_name = module_name # craype-ivybridge
+
+ # Sets only the platform name to avoid recursiveness
+
+ def _cmp_key(self):
+ return (self.name, self.module_name)
+
+ def __repr__(self):
+ return self.__str__()
+
+ def __str__(self):
+ return self.name
+
+
+@key_ordering
+class Platform(object):
+ """ Abstract class that each type of Platform will subclass.
+ Will return a instance of it once it is returned.
+ """
+
+ priority = None # Subclass sets number. Controls detection order
+ front_end = None
+ back_end = None
+ default = None # The default back end target. On cray ivybridge
+
+ front_os = None
+ back_os = None
+ default_os = None
+
+ reserved_targets = ['default_target', 'frontend', 'fe', 'backend', 'be']
+ reserved_oss = ['default_os', 'frontend', 'fe', 'backend', 'be']
+
+ def __init__(self, name):
+ self.targets = {}
+ self.operating_sys = {}
+ self.name = name
+
+ def add_target(self, name, target):
+ """Used by the platform specific subclass to list available targets.
+ Raises an error if the platform specifies a name
+ that is reserved by spack as an alias.
+ """
+ if name in Platform.reserved_targets:
+ raise ValueError(
+ "%s is a spack reserved alias "
+ "and cannot be the name of a target" % name)
+ self.targets[name] = target
+
+ def target(self, name):
+ """This is a getter method for the target dictionary
+ that handles defaulting based on the values provided by default,
+ front-end, and back-end. This can be overwritten
+ by a subclass for which we want to provide further aliasing options.
+ """
+ if name == 'default_target':
+ name = self.default
+ elif name == 'frontend' or name == 'fe':
+ name = self.front_end
+ elif name == 'backend' or name == 'be':
+ name = self.back_end
+
+ return self.targets.get(name, None)
+
+ def add_operating_system(self, name, os_class):
+ """ Add the operating_system class object into the
+ platform.operating_sys dictionary
+ """
+ if name in Platform.reserved_oss:
+ raise ValueError(
+ "%s is a spack reserved alias "
+ "and cannot be the name of an OS" % name)
+ self.operating_sys[name] = os_class
+
+ def operating_system(self, name):
+ if name == 'default_os':
+ name = self.default_os
+ if name == 'frontend' or name == "fe":
+ name = self.front_os
+ if name == 'backend' or name == 'be':
+ name = self.back_os
+
+ return self.operating_sys.get(name, None)
+
+ @classmethod
+ def setup_platform_environment(self, pkg, env):
+ """ Subclass can override this method if it requires any
+ platform-specific build environment modifications.
+ """
+ pass
+
+ @classmethod
+ def detect(self):
+ """ Subclass is responsible for implementing this method.
+ Returns True if the Platform class detects that
+ it is the current platform
+ and False if it's not.
+ """
+ raise NotImplementedError()
+
+ def __repr__(self):
+ return self.__str__()
+
+ def __str__(self):
+ return self.name
+
+ def _cmp_key(self):
+ t_keys = ''.join(str(t._cmp_key()) for t in
+ sorted(self.targets.values()))
+ o_keys = ''.join(str(o._cmp_key()) for o in
+ sorted(self.operating_sys.values()))
+ return (self.name,
+ self.default,
+ self.front_end,
+ self.back_end,
+ self.default_os,
+ self.front_os,
+ self.back_os,
+ t_keys,
+ o_keys)
+
+
+@key_ordering
+class OperatingSystem(object):
+ """ Operating System will be like a class similar to platform extended
+ by subclasses for the specifics. Operating System will contain the
+ compiler finding logic. Instead of calling two separate methods to
+ find compilers we call find_compilers method for each operating system
+ """
+
+ def __init__(self, name, version):
+ self.name = name
+ self.version = version
+
+ def __str__(self):
+ return "%s%s" % (self.name, self.version)
+
+ def __repr__(self):
+ return self.__str__()
+
+ def _cmp_key(self):
+ return (self.name, self.version)
+
+ def find_compilers(self, *paths):
+ """
+ Return a list of compilers found in the suppied paths.
+ This invokes the find() method for each Compiler class,
+ and appends the compilers detected to a list.
+ """
+ if not paths:
+ paths = get_path('PATH')
+ # Make sure path elements exist, and include /bin directories
+ # under prefixes.
+ filtered_path = []
+ for p in paths:
+ # Eliminate symlinks and just take the real directories.
+ p = os.path.realpath(p)
+ if not os.path.isdir(p):
+ continue
+ filtered_path.append(p)
+
+ # Check for a bin directory, add it if it exists
+ bin = join_path(p, 'bin')
+ if os.path.isdir(bin):
+ filtered_path.append(os.path.realpath(bin))
+ # Once the paths are cleaned up, do a search for each type of
+ # compiler. We can spawn a bunch of parallel searches to reduce
+ # the overhead of spelunking all these directories.
+ # NOTE: we import spack.compilers here to avoid init order cycles
+ import spack.compilers
+ types = spack.compilers.all_compiler_types()
+ compiler_lists = parmap(lambda cmp_cls:
+ self.find_compiler(cmp_cls, *filtered_path),
+ types)
-def get_sys_type_from_spack_globals():
- """Return the SYS_TYPE from spack globals, or None if it isn't set."""
- if not hasattr(spack, "sys_type"):
- return None
- elif hasattr(spack.sys_type, "__call__"):
- return spack.sys_type()
- else:
- return spack.sys_type
+ # ensure all the version calls we made are cached in the parent
+ # process, as well. This speeds up Spack a lot.
+ clist = reduce(lambda x, y: x + y, compiler_lists)
+ return clist
+ def find_compiler(self, cmp_cls, *path):
+ """Try to find the given type of compiler in the user's
+ environment. For each set of compilers found, this returns
+ compiler objects with the cc, cxx, f77, fc paths and the
+ version filled in.
-def get_sys_type_from_environment():
- """Return $SYS_TYPE or None if it's not defined."""
- return os.environ.get('SYS_TYPE')
+ This will search for compilers with the names in cc_names,
+ cxx_names, etc. and it will group them if they have common
+ prefixes, suffixes, and versions. e.g., gcc-mp-4.7 would
+ be grouped with g++-mp-4.7 and gfortran-mp-4.7.
+ """
+ dicts = parmap(
+ lambda t: cmp_cls._find_matches_in_path(*t),
+ [(cmp_cls.cc_names, cmp_cls.cc_version) + tuple(path),
+ (cmp_cls.cxx_names, cmp_cls.cxx_version) + tuple(path),
+ (cmp_cls.f77_names, cmp_cls.f77_version) + tuple(path),
+ (cmp_cls.fc_names, cmp_cls.fc_version) + tuple(path)])
+ all_keys = set()
+ for d in dicts:
+ all_keys.update(d)
-def get_sys_type_from_platform():
- """Return the architecture from Python's platform module."""
- sys_type = platform.system() + '-' + platform.machine()
- sys_type = re.sub(r'[^\w-]', '_', sys_type)
- return sys_type.lower()
+ compilers = {}
+ for k in all_keys:
+ ver, pre, suf = k
+
+ # Skip compilers with unknown version.
+ if ver == 'unknown':
+ continue
+
+ paths = tuple(pn[k] if k in pn else None for pn in dicts)
+ spec = spack.spec.CompilerSpec(cmp_cls.name, ver)
+
+ if ver in compilers:
+ prev = compilers[ver]
+
+ # prefer the one with more compilers.
+ prev_paths = [prev.cc, prev.cxx, prev.f77, prev.fc]
+ newcount = len([p for p in paths if p is not None])
+ prevcount = len([p for p in prev_paths if p is not None])
+
+ # Don't add if it's not an improvement over prev compiler.
+ if newcount <= prevcount:
+ continue
+
+ compilers[ver] = cmp_cls(spec, self, py_platform.machine(), paths)
+
+ return list(compilers.values())
+
+ def to_dict(self):
+ d = {}
+ d['name'] = self.name
+ d['version'] = self.version
+ return d
+
+
+@key_ordering
+class Arch(object):
+ """Architecture is now a class to help with setting attributes.
+
+ TODO: refactor so that we don't need this class.
+ """
+
+ def __init__(self, plat=None, os=None, target=None):
+ self.platform = plat
+ if plat and os:
+ os = self.platform.operating_system(os)
+ self.platform_os = os
+ if plat and target:
+ target = self.platform.target(target)
+ self.target = target
+
+ # Hooks for parser to use when platform is set after target or os
+ self.target_string = None
+ self.os_string = None
+
+ @property
+ def concrete(self):
+ return all((self.platform is not None,
+ isinstance(self.platform, Platform),
+ self.platform_os is not None,
+ isinstance(self.platform_os, OperatingSystem),
+ self.target is not None, isinstance(self.target, Target)))
+
+ def __str__(self):
+ if self.platform or self.platform_os or self.target:
+ if self.platform.name == 'darwin':
+ os_name = self.platform_os.name if self.platform_os else "None"
+ else:
+ os_name = str(self.platform_os)
+
+ return (str(self.platform) + "-" +
+ os_name + "-" + str(self.target))
+ else:
+ return ''
+
+ def __contains__(self, string):
+ return string in str(self)
+
+ # TODO: make this unnecessary: don't include an empty arch on *every* spec.
+ def __nonzero__(self):
+ return (self.platform is not None or
+ self.platform_os is not None or
+ self.target is not None)
+ __bool__ = __nonzero__
+
+ def _cmp_key(self):
+ if isinstance(self.platform, Platform):
+ platform = self.platform.name
+ else:
+ platform = self.platform
+ if isinstance(self.platform_os, OperatingSystem):
+ platform_os = self.platform_os.name
+ else:
+ platform_os = self.platform_os
+ if isinstance(self.target, Target):
+ target = self.target.name
+ else:
+ target = self.target
+ return (platform, platform_os, target)
+
+ def to_dict(self):
+ str_or_none = lambda v: str(v) if v else None
+ d = syaml_dict([
+ ('platform', str_or_none(self.platform)),
+ ('platform_os', str_or_none(self.platform_os)),
+ ('target', str_or_none(self.target))])
+ return syaml_dict([('arch', d)])
+
+ @staticmethod
+ def from_dict(d):
+ spec = spack.spec.ArchSpec.from_dict(d)
+ return arch_for_spec(spec)
+
+
+def get_platform(platform_name):
+ """Returns a platform object that corresponds to the given name."""
+ platform_list = all_platforms()
+ for p in platform_list:
+ if platform_name.replace("_", "").lower() == p.__name__.lower():
+ return p()
+
+
+def verify_platform(platform_name):
+ """ Determines whether or not the platform with the given name is supported
+ in Spack. For more information, see the 'spack.platforms' submodule.
+ """
+ platform_name = platform_name.replace("_", "").lower()
+ platform_names = [p.__name__.lower() for p in all_platforms()]
+
+ if platform_name not in platform_names:
+ tty.die("%s is not a supported platform; supported platforms are %s" %
+ (platform_name, platform_names))
+
+
+def arch_for_spec(arch_spec):
+ """Transforms the given architecture spec into an architecture objct."""
+ arch_spec = spack.spec.ArchSpec(arch_spec)
+ assert(arch_spec.concrete)
+
+ arch_plat = get_platform(arch_spec.platform)
+ if not (arch_plat.operating_system(arch_spec.platform_os) and
+ arch_plat.target(arch_spec.target)):
+ raise ValueError(
+ "Can't recreate arch for spec %s on current arch %s; "
+ "spec architecture is too different" % (arch_spec, sys_type()))
+
+ return Arch(arch_plat, arch_spec.platform_os, arch_spec.target)
@memoized
-def sys_type():
- """Returns a SysType for the current machine."""
- methods = [get_sys_type_from_spack_globals, get_sys_type_from_environment,
- get_sys_type_from_platform]
+def all_platforms():
+ classes = []
+ mod_path = spack.platform_path
+ parent_module = "spack.platforms"
+
+ for name in list_modules(mod_path):
+ mod_name = '%s.%s' % (parent_module, name)
+ class_name = mod_to_class(name)
+ mod = __import__(mod_name, fromlist=[class_name])
+ if not hasattr(mod, class_name):
+ tty.die('No class %s defined in %s' % (class_name, mod_name))
+ cls = getattr(mod, class_name)
+ if not inspect.isclass(cls):
+ tty.die('%s.%s is not a class' % (mod_name, class_name))
- # search for a method that doesn't return None
- sys_type = None
- for method in methods:
- sys_type = method()
- if sys_type:
- break
+ classes.append(cls)
+
+ return classes
+
+
+@memoized
+def platform():
+ """Detects the platform for this machine.
+
+ Gather a list of all available subclasses of platforms.
+ Sorts the list according to their priority looking. Priority is
+ an arbitrarily set number. Detects platform either using uname or
+ a file path (/opt/cray...)
+ """
+ # Try to create a Platform object using the config file FIRST
+ platform_list = all_platforms()
+ platform_list.sort(key=lambda a: a.priority)
+
+ for platform_cls in platform_list:
+ if platform_cls.detect():
+ return platform_cls()
+
+
+@memoized
+def sys_type():
+ """Print out the "default" platform-os-target tuple for this machine.
- # Couldn't determine the sys_type for this machine.
- if sys_type is None:
- return "unknown_arch"
+ On machines with only one target OS/target, prints out the
+ platform-os-target for the frontend. For machines with a frontend
+ and a backend, prints the default backend.
- if not isinstance(sys_type, basestring):
- raise InvalidSysTypeError(sys_type)
+ TODO: replace with use of more explicit methods to get *all* the
+ backends, as client code should really be aware of cross-compiled
+ architectures.
- return sys_type
+ """
+ arch = Arch(platform(), 'default_os', 'default_target')
+ return str(arch)
diff --git a/lib/spack/spack/build_environment.py b/lib/spack/spack/build_environment.py
index 5ce4cb1ce1..3e6dc12b35 100644
--- a/lib/spack/spack/build_environment.py
+++ b/lib/spack/spack/build_environment.py
@@ -51,15 +51,19 @@ There are two parts to the build environment:
Skimming this module is a nice way to get acquainted with the types of
calls you can make from within the install() function.
"""
+import inspect
import multiprocessing
import os
-import platform
import shutil
import sys
+import traceback
-import spack
+import llnl.util.lang as lang
import llnl.util.tty as tty
from llnl.util.filesystem import *
+
+import spack
+import spack.store
from spack.environment import EnvironmentModifications, validate
from spack.util.environment import *
from spack.util.executable import Executable, which
@@ -74,20 +78,21 @@ SPACK_NO_PARALLEL_MAKE = 'SPACK_NO_PARALLEL_MAKE'
# set_build_environment_variables and used to pass parameters to
# Spack's compiler wrappers.
#
-SPACK_ENV_PATH = 'SPACK_ENV_PATH'
-SPACK_DEPENDENCIES = 'SPACK_DEPENDENCIES'
-SPACK_PREFIX = 'SPACK_PREFIX'
-SPACK_INSTALL = 'SPACK_INSTALL'
-SPACK_DEBUG = 'SPACK_DEBUG'
-SPACK_SHORT_SPEC = 'SPACK_SHORT_SPEC'
-SPACK_DEBUG_LOG_DIR = 'SPACK_DEBUG_LOG_DIR'
+SPACK_ENV_PATH = 'SPACK_ENV_PATH'
+SPACK_DEPENDENCIES = 'SPACK_DEPENDENCIES'
+SPACK_RPATH_DEPS = 'SPACK_RPATH_DEPS'
+SPACK_LINK_DEPS = 'SPACK_LINK_DEPS'
+SPACK_PREFIX = 'SPACK_PREFIX'
+SPACK_INSTALL = 'SPACK_INSTALL'
+SPACK_DEBUG = 'SPACK_DEBUG'
+SPACK_SHORT_SPEC = 'SPACK_SHORT_SPEC'
+SPACK_DEBUG_LOG_DIR = 'SPACK_DEBUG_LOG_DIR'
# Platform-specific library suffix.
dso_suffix = 'dylib' if sys.platform == 'darwin' else 'so'
-
class MakeExecutable(Executable):
"""Special callable executable object for make so the user can
specify parallel or not on a per-invocation basis. Using
@@ -98,6 +103,7 @@ class MakeExecutable(Executable):
Note that if the SPACK_NO_PARALLEL_MAKE env var is set it overrides
everything.
"""
+
def __init__(self, name, jobs):
super(MakeExecutable, self).__init__(name)
self.jobs = jobs
@@ -113,30 +119,95 @@ class MakeExecutable(Executable):
return super(MakeExecutable, self).__call__(*args, **kwargs)
+def load_module(mod):
+ """Takes a module name and removes modules until it is possible to
+ load that module. It then loads the provided module. Depends on the
+ modulecmd implementation of modules used in cray and lmod.
+ """
+ # Create an executable of the module command that will output python code
+ modulecmd = which('modulecmd')
+ modulecmd.add_default_arg('python')
+
+ # Read the module and remove any conflicting modules
+ # We do this without checking that they are already installed
+ # for ease of programming because unloading a module that is not
+ # loaded does nothing.
+ text = modulecmd('show', mod, output=str, error=str).split()
+ for i, word in enumerate(text):
+ if word == 'conflict':
+ exec(compile(modulecmd('unload', text[i + 1], output=str,
+ error=str), '<string>', 'exec'))
+ # Load the module now that there are no conflicts
+ load = modulecmd('load', mod, output=str, error=str)
+ exec(compile(load, '<string>', 'exec'))
+
+
+def get_path_from_module(mod):
+ """Inspects a TCL module for entries that indicate the absolute path
+ at which the library supported by said module can be found.
+ """
+ # Create a modulecmd executable
+ modulecmd = which('modulecmd')
+ modulecmd.add_default_arg('python')
+
+ # Read the module
+ text = modulecmd('show', mod, output=str, error=str).split('\n')
+ # If it lists its package directory, return that
+ for line in text:
+ if line.find(mod.upper() + '_DIR') >= 0:
+ words = line.split()
+ return words[2]
+
+ # If it lists a -rpath instruction, use that
+ for line in text:
+ rpath = line.find('-rpath/')
+ if rpath >= 0:
+ return line[rpath + 6:line.find('/lib')]
+
+ # If it lists a -L instruction, use that
+ for line in text:
+ L = line.find('-L/')
+ if L >= 0:
+ return line[L + 2:line.find('/lib')]
+
+ # If it sets the LD_LIBRARY_PATH or CRAY_LD_LIBRARY_PATH, use that
+ for line in text:
+ if line.find('LD_LIBRARY_PATH') >= 0:
+ words = line.split()
+ path = words[2]
+ return path[:path.find('/lib')]
+ # Unable to find module path
+ return None
+
+
def set_compiler_environment_variables(pkg, env):
- assert pkg.spec.concrete
+ assert(pkg.spec.concrete)
+ compiler = pkg.compiler
+ flags = pkg.spec.compiler_flags
+
# Set compiler variables used by CMake and autotools
- assert all(key in pkg.compiler.link_paths for key in ('cc', 'cxx', 'f77', 'fc'))
+ assert all(key in compiler.link_paths for key in (
+ 'cc', 'cxx', 'f77', 'fc'))
# Populate an object with the list of environment modifications
# and return it
- # TODO : add additional kwargs for better diagnostics, like requestor, ttyout, ttyerr, etc.
+ # TODO : add additional kwargs for better diagnostics, like requestor,
+ # ttyout, ttyerr, etc.
link_dir = spack.build_env_path
- env.set('CC', join_path(link_dir, pkg.compiler.link_paths['cc']))
- env.set('CXX', join_path(link_dir, pkg.compiler.link_paths['cxx']))
- env.set('F77', join_path(link_dir, pkg.compiler.link_paths['f77']))
- env.set('FC', join_path(link_dir, pkg.compiler.link_paths['fc']))
# Set SPACK compiler variables so that our wrapper knows what to call
- compiler = pkg.compiler
if compiler.cc:
- env.set('SPACK_CC', compiler.cc)
+ env.set('SPACK_CC', compiler.cc)
+ env.set('CC', join_path(link_dir, compiler.link_paths['cc']))
if compiler.cxx:
env.set('SPACK_CXX', compiler.cxx)
+ env.set('CXX', join_path(link_dir, compiler.link_paths['cxx']))
if compiler.f77:
env.set('SPACK_F77', compiler.f77)
+ env.set('F77', join_path(link_dir, compiler.link_paths['f77']))
if compiler.fc:
env.set('SPACK_FC', compiler.fc)
+ env.set('FC', join_path(link_dir, compiler.link_paths['fc']))
# Set SPACK compiler rpath flags so that our wrapper knows what to use
env.set('SPACK_CC_RPATH_ARG', compiler.cc_rpath_arg)
@@ -144,13 +215,28 @@ def set_compiler_environment_variables(pkg, env):
env.set('SPACK_F77_RPATH_ARG', compiler.f77_rpath_arg)
env.set('SPACK_FC_RPATH_ARG', compiler.fc_rpath_arg)
+ # Add every valid compiler flag to the environment, prefixed with "SPACK_"
+ for flag in spack.spec.FlagMap.valid_compiler_flags():
+ # Concreteness guarantees key safety here
+ if flags[flag] != []:
+ env.set('SPACK_' + flag.upper(), ' '.join(f for f in flags[flag]))
+
env.set('SPACK_COMPILER_SPEC', str(pkg.spec.compiler))
+
+ for mod in compiler.modules:
+ load_module(mod)
+
+ compiler.setup_custom_environment(pkg, env)
+
return env
-def set_build_environment_variables(pkg, env):
+def set_build_environment_variables(pkg, env, dirty=False):
"""
- This ensures a clean install environment when we build packages
+ This ensures a clean install environment when we build packages.
+
+ Arguments:
+ dirty -- skip unsetting the user's environment settings.
"""
# Add spack build environment path with compiler wrappers first in
# the path. We add both spack.env_path, which includes default
@@ -163,36 +249,82 @@ def set_build_environment_variables(pkg, env):
# handled by putting one in the <build_env_path>/case-insensitive
# directory. Add that to the path too.
env_paths = []
- for item in [spack.build_env_path, join_path(spack.build_env_path, pkg.compiler.name)]:
+ compiler_specific = join_path(spack.build_env_path, pkg.compiler.name)
+ for item in [spack.build_env_path, compiler_specific]:
env_paths.append(item)
ci = join_path(item, 'case-insensitive')
if os.path.isdir(ci):
env_paths.append(ci)
+ env_paths = filter_system_paths(env_paths)
+
for item in reversed(env_paths):
env.prepend_path('PATH', item)
env.set_path(SPACK_ENV_PATH, env_paths)
# Prefixes of all of the package's dependencies go in SPACK_DEPENDENCIES
- dep_prefixes = [d.prefix for d in pkg.spec.traverse(root=False)]
+ dep_prefixes = [d.prefix for d in
+ pkg.spec.traverse(root=False, deptype=('build', 'link'))]
+ dep_prefixes = filter_system_paths(dep_prefixes)
env.set_path(SPACK_DEPENDENCIES, dep_prefixes)
- env.set_path('CMAKE_PREFIX_PATH', dep_prefixes) # Add dependencies to CMAKE_PREFIX_PATH
+
+ # These variables control compiler wrapper behavior
+ env.set_path(SPACK_RPATH_DEPS, filter_system_paths([
+ d.prefix for d in get_rpath_deps(pkg)]))
+ env.set_path(SPACK_LINK_DEPS, filter_system_paths([
+ d.prefix for d in pkg.spec.traverse(root=False, deptype=('link'))]))
+
+ # Add dependencies to CMAKE_PREFIX_PATH
+ env.set_path('CMAKE_PREFIX_PATH', dep_prefixes)
# Install prefix
env.set(SPACK_PREFIX, pkg.prefix)
# Install root prefix
- env.set(SPACK_INSTALL, spack.install_path)
-
- # Remove these vars from the environment during build because they
- # can affect how some packages find libraries. We want to make
- # sure that builds never pull in unintended external dependencies.
- env.unset('LD_LIBRARY_PATH')
- env.unset('LD_RUN_PATH')
- env.unset('DYLD_LIBRARY_PATH')
+ env.set(SPACK_INSTALL, spack.store.root)
+
+ # Stuff in here sanitizes the build environemnt to eliminate
+ # anything the user has set that may interfere.
+ if not dirty:
+ # Remove these vars from the environment during build because they
+ # can affect how some packages find libraries. We want to make
+ # sure that builds never pull in unintended external dependencies.
+ env.unset('LD_LIBRARY_PATH')
+ env.unset('LIBRARY_PATH')
+ env.unset('CPATH')
+ env.unset('LD_RUN_PATH')
+ env.unset('DYLD_LIBRARY_PATH')
+
+ # Remove any macports installs from the PATH. The macports ld can
+ # cause conflicts with the built-in linker on el capitan. Solves
+ # assembler issues, e.g.:
+ # suffix or operands invalid for `movq'"
+ path = get_path('PATH')
+ for p in path:
+ if '/macports/' in p:
+ env.remove_path('PATH', p)
+
+ # Set environment variables if specified for
+ # the given compiler
+ compiler = pkg.compiler
+ environment = compiler.environment
+ if 'set' in environment:
+ env_to_set = environment['set']
+ for key, value in env_to_set.iteritems():
+ env.set('SPACK_ENV_SET_%s' % key, value)
+ env.set('%s' % key, value)
+ # Let shell know which variables to set
+ env_variables = ":".join(env_to_set.keys())
+ env.set('SPACK_ENV_TO_SET', env_variables)
+
+ if compiler.extra_rpaths:
+ extra_rpaths = ':'.join(compiler.extra_rpaths)
+ env.set('SPACK_COMPILER_EXTRA_RPATHS', extra_rpaths)
# Add bin directories from dependencies to the PATH for the build.
- bin_dirs = reversed(filter(os.path.isdir, ['%s/bin' % prefix for prefix in dep_prefixes]))
+ bin_dirs = reversed(filter(os.path.isdir, [
+ '%s/bin' % d.prefix for d in pkg.spec.dependencies(deptype='build')]))
+ bin_dirs = filter_system_bin_paths(bin_dirs)
for item in bin_dirs:
env.prepend_path('PATH', item)
@@ -203,13 +335,14 @@ def set_build_environment_variables(pkg, env):
env.set(SPACK_DEBUG_LOG_DIR, spack.spack_working_dir)
# Add any pkgconfig directories to PKG_CONFIG_PATH
- pkg_config_dirs = []
- for p in dep_prefixes:
- for maybe in ('lib', 'lib64', 'share'):
- pcdir = join_path(p, maybe, 'pkgconfig')
+ for pre in dep_prefixes:
+ for directory in ('lib', 'lib64', 'share'):
+ pcdir = join_path(pre, directory, 'pkgconfig')
if os.path.isdir(pcdir):
- pkg_config_dirs.append(pcdir)
- env.set_path('PKG_CONFIG_PATH', pkg_config_dirs)
+ env.prepend_path('PKG_CONFIG_PATH', pcdir)
+
+ if pkg.architecture.target.module_name:
+ load_module(pkg.architecture.target.module_name)
return env
@@ -218,7 +351,7 @@ def set_module_variables_for_package(pkg, module):
"""Populate the module scope of install() with some useful functions.
This makes things easier for package writers.
"""
- # number of jobs spack will to build with.
+ # number of jobs spack will build with.
jobs = multiprocessing.cpu_count()
if not pkg.parallel:
jobs = 1
@@ -229,8 +362,9 @@ def set_module_variables_for_package(pkg, module):
m.make_jobs = jobs
# TODO: make these build deps that can be installed if not found.
- m.make = MakeExecutable('make', jobs)
+ m.make = MakeExecutable('make', jobs)
m.gmake = MakeExecutable('gmake', jobs)
+ m.scons = MakeExecutable('scons', jobs)
# easy shortcut to os.environ
m.env = os.environ
@@ -239,79 +373,104 @@ def set_module_variables_for_package(pkg, module):
# Don't use which for this; we want to find it in the current dir.
m.configure = Executable('./configure')
- # TODO: shouldn't really use "which" here. Consider adding notion
- # TODO: of build dependencies, as opposed to link dependencies.
- # TODO: Currently, everything is a link dependency, but tools like
- # TODO: this shouldn't be.
m.cmake = Executable('cmake')
+ m.ctest = Executable('ctest')
- # standard CMake arguments
- m.std_cmake_args = ['-DCMAKE_INSTALL_PREFIX=%s' % pkg.prefix,
- '-DCMAKE_BUILD_TYPE=RelWithDebInfo']
- if platform.mac_ver()[0]:
- m.std_cmake_args.append('-DCMAKE_FIND_FRAMEWORK=LAST')
-
- # Set up CMake rpath
- m.std_cmake_args.append('-DCMAKE_INSTALL_RPATH_USE_LINK_PATH=FALSE')
- m.std_cmake_args.append('-DCMAKE_INSTALL_RPATH=%s' % ":".join(get_rpaths(pkg)))
+ # Standard CMake arguments
+ m.std_cmake_args = spack.CMakePackage._std_args(pkg)
# Put spack compiler paths in module scope.
link_dir = spack.build_env_path
- m.spack_cc = join_path(link_dir, pkg.compiler.link_paths['cc'])
+ m.spack_cc = join_path(link_dir, pkg.compiler.link_paths['cc'])
m.spack_cxx = join_path(link_dir, pkg.compiler.link_paths['cxx'])
m.spack_f77 = join_path(link_dir, pkg.compiler.link_paths['f77'])
- m.spack_fc = join_path(link_dir, pkg.compiler.link_paths['fc'])
+ m.spack_fc = join_path(link_dir, pkg.compiler.link_paths['fc'])
# Emulate some shell commands for convenience
- m.pwd = os.getcwd
- m.cd = os.chdir
- m.mkdir = os.mkdir
- m.makedirs = os.makedirs
- m.remove = os.remove
- m.removedirs = os.removedirs
- m.symlink = os.symlink
-
- m.mkdirp = mkdirp
- m.install = install
+ m.pwd = os.getcwd
+ m.cd = os.chdir
+ m.mkdir = os.mkdir
+ m.makedirs = os.makedirs
+ m.remove = os.remove
+ m.removedirs = os.removedirs
+ m.symlink = os.symlink
+
+ m.mkdirp = mkdirp
+ m.install = install
m.install_tree = install_tree
- m.rmtree = shutil.rmtree
- m.move = shutil.move
+ m.rmtree = shutil.rmtree
+ m.move = shutil.move
# Useful directories within the prefix are encapsulated in
# a Prefix object.
- m.prefix = pkg.prefix
+ m.prefix = pkg.prefix
# Platform-specific library suffix.
m.dso_suffix = dso_suffix
+def get_rpath_deps(pkg):
+ """Return immediate or transitive RPATHs depending on the package."""
+ if pkg.transitive_rpaths:
+ return [d for d in pkg.spec.traverse(root=False, deptype=('link'))]
+ else:
+ return pkg.spec.dependencies(deptype='link')
+
+
def get_rpaths(pkg):
"""Get a list of all the rpaths for a package."""
rpaths = [pkg.prefix.lib, pkg.prefix.lib64]
- rpaths.extend(d.prefix.lib for d in pkg.spec.dependencies.values()
+ deps = get_rpath_deps(pkg)
+ rpaths.extend(d.prefix.lib for d in deps
if os.path.isdir(d.prefix.lib))
- rpaths.extend(d.prefix.lib64 for d in pkg.spec.dependencies.values()
+ rpaths.extend(d.prefix.lib64 for d in deps
if os.path.isdir(d.prefix.lib64))
+ # Second module is our compiler mod name. We use that to get rpaths from
+ # module show output.
+ if pkg.compiler.modules and len(pkg.compiler.modules) > 1:
+ rpaths.append(get_path_from_module(pkg.compiler.modules[1]))
return rpaths
+def get_std_cmake_args(pkg):
+ """Returns the list of standard arguments that would be used if this
+ package was a CMakePackage instance.
+
+ :param pkg: pkg under consideration
+
+ :return: list of arguments for cmake
+ """
+ return spack.CMakePackage._std_args(pkg)
+
+
def parent_class_modules(cls):
- """Get list of super class modules that are all descend from spack.Package"""
- if not issubclass(cls, spack.Package) or issubclass(spack.Package, cls):
+ """
+ Get list of super class modules that are all descend from spack.Package
+ """
+ if (not issubclass(cls, spack.package.Package) or
+ issubclass(spack.package.Package, cls)):
return []
result = []
module = sys.modules.get(cls.__module__)
if module:
- result = [ module ]
+ result = [module]
for c in cls.__bases__:
result.extend(parent_class_modules(c))
return result
-def setup_package(pkg):
+def load_external_modules(pkg):
+ """ traverse the spec list and find any specs that have external modules.
+ """
+ for dep in list(pkg.spec.traverse()):
+ if dep.external_module:
+ load_module(dep.external_module)
+
+
+def setup_package(pkg, dirty=False):
"""Execute all environment setup routines."""
spack_env = EnvironmentModifications()
- run_env = EnvironmentModifications()
+ run_env = EnvironmentModifications()
# Before proceeding, ensure that specs and packages are consistent
#
@@ -327,14 +486,16 @@ def setup_package(pkg):
# throwaway environment, but it is kind of dirty.
#
# TODO: Think about how to avoid this fix and do something cleaner.
- for s in pkg.spec.traverse(): s.package.spec = s
+ for s in pkg.spec.traverse():
+ s.package.spec = s
set_compiler_environment_variables(pkg, spack_env)
- set_build_environment_variables(pkg, spack_env)
-
+ set_build_environment_variables(pkg, spack_env, dirty)
+ pkg.architecture.platform.setup_platform_environment(pkg, spack_env)
+ load_external_modules(pkg)
# traverse in postorder so package can use vars from its dependencies
spec = pkg.spec
- for dspec in pkg.spec.traverse(order='post', root=False):
+ for dspec in pkg.spec.traverse(order='post', root=False, deptype='build'):
# If a user makes their own package repo, e.g.
# spack.repos.mystuff.libelf.Libelf, and they inherit from
# an existing class like spack.repos.original.libelf.Libelf,
@@ -359,16 +520,15 @@ def setup_package(pkg):
spack_env.apply_modifications()
-def fork(pkg, function):
+def fork(pkg, function, dirty=False):
"""Fork a child process to do part of a spack build.
- Arguments:
+ :param pkg: pkg whose environemnt we should set up the forked process for.
+ :param function: arg-less function to run in the child process.
+ :param dirty: If True, do NOT clean the environment before building.
- pkg -- pkg whose environemnt we should set up the
- forked process for.
- function -- arg-less function to run in the child process.
+ Usage::
- Usage:
def child_fun():
# do stuff
build_env.fork(pkg, child_fun)
@@ -383,40 +543,175 @@ def fork(pkg, function):
well. If things go well, the child exits and the parent
carries on.
"""
- try:
- pid = os.fork()
- except OSError as e:
- raise InstallError("Unable to fork build process: %s" % e)
-
- if pid == 0:
- # Give the child process the package's build environment.
- setup_package(pkg)
+ def child_execution(child_connection, input_stream):
try:
- # call the forked function.
- function()
+ setup_package(pkg, dirty=dirty)
+ function(input_stream)
+ child_connection.send(None)
+ except StopIteration as e:
+ # StopIteration is used to stop installations
+ # before the final stage, mainly for debug purposes
+ tty.msg(e.message)
+ child_connection.send(None)
+ except:
+ # catch ANYTHING that goes wrong in the child process
+ exc_type, exc, tb = sys.exc_info()
- # Use os._exit here to avoid raising a SystemExit exception,
- # which interferes with unit tests.
- os._exit(0)
+ # Need to unwind the traceback in the child because traceback
+ # objects can't be sent to the parent.
+ tb_string = traceback.format_exc()
- except spack.error.SpackError as e:
- e.die()
+ # build up some context from the offending package so we can
+ # show that, too.
+ package_context = get_package_context(tb)
- except:
- # Child doesn't raise or return to main spack code.
- # Just runs default exception handler and exits.
- sys.excepthook(*sys.exc_info())
- os._exit(1)
+ build_log = None
+ if hasattr(pkg, 'log_path'):
+ build_log = pkg.log_path
- else:
- # Parent process just waits for the child to complete. If the
- # child exited badly, assume it already printed an appropriate
- # message. Just make the parent exit with an error code.
- pid, returncode = os.waitpid(pid, 0)
- if returncode != 0:
- raise InstallError("Installation process had nonzero exit code.".format(str(returncode)))
+ # make a pickleable exception to send to parent.
+ msg = "%s: %s" % (str(exc_type.__name__), str(exc))
+
+ ce = ChildError(msg, tb_string, build_log, package_context)
+ child_connection.send(ce)
+
+ finally:
+ child_connection.close()
+
+ parent_connection, child_connection = multiprocessing.Pipe()
+ try:
+ # Forward sys.stdin to be able to activate / deactivate
+ # verbosity pressing a key at run-time
+ input_stream = lang.duplicate_stream(sys.stdin)
+ p = multiprocessing.Process(
+ target=child_execution,
+ args=(child_connection, input_stream)
+ )
+ p.start()
+ finally:
+ # Close the input stream in the parent process
+ input_stream.close()
+ child_exc = parent_connection.recv()
+ p.join()
+
+ if child_exc is not None:
+ raise child_exc
+
+
+def get_package_context(traceback):
+ """Return some context for an error message when the build fails.
+
+ Args:
+ traceback -- A traceback from some exception raised during install.
+
+ This function inspects the stack to find where we failed in the
+ package file, and it adds detailed context to the long_message
+ from there.
+
+ """
+ def make_stack(tb, stack=None):
+ """Tracebacks come out of the system in caller -> callee order. Return
+ an array in callee -> caller order so we can traverse it."""
+ if stack is None:
+ stack = []
+ if tb is not None:
+ make_stack(tb.tb_next, stack)
+ stack.append(tb)
+ return stack
+
+ stack = make_stack(traceback)
+
+ for tb in stack:
+ frame = tb.tb_frame
+ if 'self' in frame.f_locals:
+ # Find the first proper subclass of PackageBase.
+ obj = frame.f_locals['self']
+ if isinstance(obj, spack.package.PackageBase):
+ break
+
+ # we found obj, the Package implementation we care about.
+ # point out the location in the install method where we failed.
+ lines = []
+ lines.append("%s:%d, in %s:" % (
+ inspect.getfile(frame.f_code), frame.f_lineno, frame.f_code.co_name
+ ))
+
+ # Build a message showing context in the install method.
+ sourcelines, start = inspect.getsourcelines(frame)
+ for i, line in enumerate(sourcelines):
+ mark = ">> " if start + i == frame.f_lineno else " "
+ lines.append(" %s%-5d%s" % (mark, start + i, line.rstrip()))
+
+ return lines
class InstallError(spack.error.SpackError):
- """Raised when a package fails to install"""
+ """Raised by packages when a package fails to install"""
+
+
+class ChildError(spack.error.SpackError):
+ """Special exception class for wrapping exceptions from child processes
+ in Spack's build environment.
+
+ The main features of a ChildError are:
+
+ 1. They're serializable, so when a child build fails, we can send one
+ of these to the parent and let the parent report what happened.
+
+ 2. They have a ``traceback`` field containing a traceback generated
+ on the child immediately after failure. Spack will print this on
+ failure in lieu of trying to run sys.excepthook on the parent
+ process, so users will see the correct stack trace from a child.
+
+ 3. They also contain package_context, which shows source code context
+ in the Package implementation where the error happened. To get
+ this, Spack searches the stack trace for the deepest frame where
+ ``self`` is in scope and is an instance of PackageBase. This will
+ generally find a useful spot in the ``package.py`` file.
+
+ The long_message of a ChildError displays all this stuff to the user,
+ and SpackError handles displaying the special traceback if we're in
+ debug mode with spack -d.
+
+ """
+ def __init__(self, msg, traceback_string, build_log, package_context):
+ super(ChildError, self).__init__(msg)
+ self.traceback = traceback_string
+ self.build_log = build_log
+ self.package_context = package_context
+
+ @property
+ def long_message(self):
+ msg = self._long_message if self._long_message else ''
+
+ if self.package_context:
+ if msg:
+ msg += "\n\n"
+ msg += '\n'.join(self.package_context)
+
+ if msg:
+ msg += "\n\n"
+
+ if self.build_log:
+ msg += "See build log for details:\n"
+ msg += " %s" % self.build_log
+
+ return msg
+
+ def __reduce__(self):
+ """__reduce__ is used to serialize (pickle) ChildErrors.
+
+ Return a function to reconstruct a ChildError, along with the
+ salient properties we'll need.
+ """
+ return _make_child_error, (
+ self.message,
+ self.traceback,
+ self.build_log,
+ self.package_context)
+
+
+def _make_child_error(msg, traceback, build_log, package_context):
+ """Used by __reduce__ in ChildError to reconstruct pickled errors."""
+ return ChildError(msg, traceback, build_log, package_context)
diff --git a/lib/spack/spack/build_systems/__init__.py b/lib/spack/spack/build_systems/__init__.py
new file mode 100644
index 0000000000..ed1ec23bca
--- /dev/null
+++ b/lib/spack/spack/build_systems/__init__.py
@@ -0,0 +1,24 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
diff --git a/lib/spack/spack/build_systems/autotools.py b/lib/spack/spack/build_systems/autotools.py
new file mode 100644
index 0000000000..78a4df5e11
--- /dev/null
+++ b/lib/spack/spack/build_systems/autotools.py
@@ -0,0 +1,203 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+
+import inspect
+import os
+import os.path
+import shutil
+from subprocess import PIPE
+from subprocess import check_call
+
+import llnl.util.tty as tty
+from llnl.util.filesystem import working_dir
+from spack.package import PackageBase
+
+
+class AutotoolsPackage(PackageBase):
+ """Specialized class for packages that are built using GNU Autotools
+
+ This class provides four phases that can be overridden:
+
+ * autoreconf
+ * configure
+ * build
+ * install
+
+ They all have sensible defaults and for many packages the only thing
+ necessary will be to override ``configure_args``
+
+ Additionally, you may specify make targets for build and install
+ phases by overriding ``build_targets`` and ``install_targets``
+ """
+ phases = ['autoreconf', 'configure', 'build', 'install']
+ # To be used in UI queries that require to know which
+ # build-system class we are using
+ build_system_class = 'AutotoolsPackage'
+ patch_config_guess = True
+
+ build_targets = []
+ install_targets = ['install']
+
+ def do_patch_config_guess(self):
+ """Some packages ship with an older config.guess and need to have
+ this updated when installed on a newer architecture."""
+
+ my_config_guess = None
+ config_guess = None
+ if os.path.exists('config.guess'):
+ # First search the top-level source directory
+ my_config_guess = 'config.guess'
+ else:
+ # Then search in all sub directories.
+ # We would like to use AC_CONFIG_AUX_DIR, but not all packages
+ # ship with their configure.in or configure.ac.
+ d = '.'
+ dirs = [os.path.join(d, o) for o in os.listdir(d)
+ if os.path.isdir(os.path.join(d, o))]
+ for dirname in dirs:
+ path = os.path.join(dirname, 'config.guess')
+ if os.path.exists(path):
+ my_config_guess = path
+
+ if my_config_guess is not None:
+ try:
+ check_call([my_config_guess], stdout=PIPE, stderr=PIPE)
+ # The package's config.guess already runs OK, so just use it
+ return True
+ except:
+ pass
+ else:
+ return True
+
+ # Look for a spack-installed automake package
+ if 'automake' in self.spec:
+ automake_path = os.path.join(self.spec['automake'].prefix, 'share',
+ 'automake-' +
+ str(self.spec['automake'].version))
+ path = os.path.join(automake_path, 'config.guess')
+ if os.path.exists(path):
+ config_guess = path
+ if config_guess is not None:
+ try:
+ check_call([config_guess], stdout=PIPE, stderr=PIPE)
+ shutil.copyfile(config_guess, my_config_guess)
+ return True
+ except:
+ pass
+
+ # Look for the system's config.guess
+ if os.path.exists('/usr/share'):
+ automake_dir = [s for s in os.listdir('/usr/share') if
+ "automake" in s]
+ if automake_dir:
+ automake_path = os.path.join('/usr/share', automake_dir[0])
+ path = os.path.join(automake_path, 'config.guess')
+ if os.path.exists(path):
+ config_guess = path
+ if config_guess is not None:
+ try:
+ check_call([config_guess], stdout=PIPE, stderr=PIPE)
+ shutil.copyfile(config_guess, my_config_guess)
+ return True
+ except:
+ pass
+
+ return False
+
+ def build_directory(self):
+ """Override to provide another place to build the package"""
+ return self.stage.source_path
+
+ def patch(self):
+ """Perform any required patches."""
+
+ if self.patch_config_guess and self.spec.satisfies(
+ 'arch=linux-rhel7-ppc64le'):
+ if not self.do_patch_config_guess():
+ raise RuntimeError('Failed to find suitable config.guess')
+
+ def autoreconf(self, spec, prefix):
+ """Not needed usually, configure should be already there"""
+ pass
+
+ @PackageBase.sanity_check('autoreconf')
+ def is_configure_or_die(self):
+ """Checks the presence of a ``configure`` file after the
+ autoreconf phase"""
+ with working_dir(self.build_directory()):
+ if not os.path.exists('configure'):
+ raise RuntimeError(
+ 'configure script not found in {0}'.format(os.getcwd()))
+
+ def configure_args(self):
+ """Method to be overridden. Should return an iterable containing
+ all the arguments that must be passed to configure, except ``--prefix``
+ """
+ return []
+
+ def configure(self, spec, prefix):
+ """Runs configure with the arguments specified in ``configure_args``
+ and an appropriately set prefix
+ """
+ options = ['--prefix={0}'.format(prefix)] + self.configure_args()
+
+ with working_dir(self.build_directory()):
+ inspect.getmodule(self).configure(*options)
+
+ def build(self, spec, prefix):
+ """Make the build targets"""
+ with working_dir(self.build_directory()):
+ inspect.getmodule(self).make(*self.build_targets)
+
+ def install(self, spec, prefix):
+ """Make the install targets"""
+ with working_dir(self.build_directory()):
+ inspect.getmodule(self).make(*self.install_targets)
+
+ @PackageBase.sanity_check('build')
+ @PackageBase.on_package_attributes(run_tests=True)
+ def _run_default_function(self):
+ """This function is run after build if ``self.run_tests == True``
+
+ It will search for a method named ``check`` and run it. A sensible
+ default is provided in the base class.
+ """
+ try:
+ fn = getattr(self, 'check')
+ tty.msg('Trying default sanity checks [check]')
+ fn()
+ except AttributeError:
+ tty.msg('Skipping default sanity checks [method `check` not implemented]') # NOQA: ignore=E501
+
+ def check(self):
+ """Default test: search the Makefile for targets ``test`` and ``check``
+ and run them if found.
+ """
+ with working_dir(self.build_directory()):
+ self._if_make_target_execute('test')
+ self._if_make_target_execute('check')
+
+ # Check that self.prefix is there after installation
+ PackageBase.sanity_check('install')(PackageBase.sanity_check_prefix)
diff --git a/lib/spack/spack/build_systems/cmake.py b/lib/spack/spack/build_systems/cmake.py
new file mode 100644
index 0000000000..61d45784e8
--- /dev/null
+++ b/lib/spack/spack/build_systems/cmake.py
@@ -0,0 +1,152 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+
+import inspect
+import platform
+
+import llnl.util.tty as tty
+import spack.build_environment
+from llnl.util.filesystem import working_dir, join_path
+from spack.directives import depends_on
+from spack.package import PackageBase
+
+
+class CMakePackage(PackageBase):
+ """Specialized class for packages that are built using CMake
+
+ This class provides three phases that can be overridden:
+
+ * cmake
+ * build
+ * install
+
+ They all have sensible defaults and for many packages the only thing
+ necessary will be to override ``cmake_args``
+
+ Additionally, you may specify make targets for build and install
+ phases by overriding ``build_targets`` and ``install_targets``
+ """
+ phases = ['cmake', 'build', 'install']
+ # To be used in UI queries that require to know which
+ # build-system class we are using
+ build_system_class = 'CMakePackage'
+
+ build_targets = []
+ install_targets = ['install']
+
+ depends_on('cmake', type='build')
+
+ def build_type(self):
+ """Override to provide the correct build_type in case a complex
+ logic is needed
+ """
+ return 'RelWithDebInfo'
+
+ def root_cmakelists_dir(self):
+ """Directory where to find the root CMakeLists.txt"""
+ return self.stage.source_path
+
+ @property
+ def std_cmake_args(self):
+ """Standard cmake arguments provided as a property for
+ convenience of package writers
+ """
+ # standard CMake arguments
+ return CMakePackage._std_args(self)
+
+ @staticmethod
+ def _std_args(pkg):
+ """Computes the standard cmake arguments for a generic package"""
+ try:
+ build_type = pkg.build_type()
+ except AttributeError:
+ build_type = 'RelWithDebInfo'
+
+ args = ['-DCMAKE_INSTALL_PREFIX:PATH={0}'.format(pkg.prefix),
+ '-DCMAKE_BUILD_TYPE:STRING={0}'.format(build_type),
+ '-DCMAKE_VERBOSE_MAKEFILE:BOOL=ON']
+ if platform.mac_ver()[0]:
+ args.append('-DCMAKE_FIND_FRAMEWORK:STRING=LAST')
+
+ # Set up CMake rpath
+ args.append('-DCMAKE_INSTALL_RPATH_USE_LINK_PATH:BOOL=FALSE')
+ rpaths = ':'.join(spack.build_environment.get_rpaths(pkg))
+ args.append('-DCMAKE_INSTALL_RPATH:STRING={0}'.format(rpaths))
+ return args
+
+ def build_directory(self):
+ """Override to provide another place to build the package"""
+ return join_path(self.stage.source_path, 'spack-build')
+
+ def cmake_args(self):
+ """Method to be overridden. Should return an iterable containing
+ all the arguments that must be passed to configure, except:
+
+ * CMAKE_INSTALL_PREFIX
+ * CMAKE_BUILD_TYPE
+ """
+ return []
+
+ def cmake(self, spec, prefix):
+ """Run cmake in the build directory"""
+ options = [self.root_cmakelists_dir()] + self.std_cmake_args + \
+ self.cmake_args()
+ with working_dir(self.build_directory(), create=True):
+ inspect.getmodule(self).cmake(*options)
+
+ def build(self, spec, prefix):
+ """Make the build targets"""
+ with working_dir(self.build_directory()):
+ inspect.getmodule(self).make(*self.build_targets)
+
+ def install(self, spec, prefix):
+ """Make the install targets"""
+ with working_dir(self.build_directory()):
+ inspect.getmodule(self).make(*self.install_targets)
+
+ @PackageBase.sanity_check('build')
+ @PackageBase.on_package_attributes(run_tests=True)
+ def _run_default_function(self):
+ """This function is run after build if ``self.run_tests == True``
+
+ It will search for a method named ``check`` and run it. A sensible
+ default is provided in the base class.
+ """
+ try:
+ fn = getattr(self, 'check')
+ tty.msg('Trying default build sanity checks [check]')
+ fn()
+ except AttributeError:
+ tty.msg('Skipping default build sanity checks [method `check` not implemented]') # NOQA: ignore=E501
+
+ def check(self):
+ """Default test: search the Makefile for the target ``test``
+ and run them if found.
+ """
+ with working_dir(self.build_directory()):
+ self._if_make_target_execute('test')
+
+ # Check that self.prefix is there after installation
+ PackageBase.sanity_check('install')(PackageBase.sanity_check_prefix)
diff --git a/lib/spack/spack/build_systems/makefile.py b/lib/spack/spack/build_systems/makefile.py
new file mode 100644
index 0000000000..a56f316109
--- /dev/null
+++ b/lib/spack/spack/build_systems/makefile.py
@@ -0,0 +1,72 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+
+import inspect
+
+import llnl.util.tty as tty
+from llnl.util.filesystem import working_dir
+from spack.package import PackageBase
+
+
+class MakefilePackage(PackageBase):
+ """Specialized class for packages that are built using editable Makefiles
+
+ This class provides three phases that can be overridden:
+
+ * edit
+ * build
+ * install
+
+ It is necessary to override the 'edit' phase, while 'build' and 'install'
+ have sensible defaults.
+ """
+ phases = ['edit', 'build', 'install']
+ # To be used in UI queries that require to know which
+ # build-system class we are using
+ build_system_class = 'MakefilePackage'
+
+ build_targets = []
+ install_targets = ['install']
+
+ def build_directory(self):
+ """Directory where the main Makefile is located"""
+ return self.stage.source_path
+
+ def edit(self, spec, prefix):
+ """This phase cannot be defaulted for obvious reasons..."""
+ tty.msg('Using default implementation: skipping edit phase.')
+
+ def build(self, spec, prefix):
+ """Make the build targets"""
+ with working_dir(self.build_directory()):
+ inspect.getmodule(self).make(*self.build_targets)
+
+ def install(self, spec, prefix):
+ """Make the install targets"""
+ with working_dir(self.build_directory()):
+ inspect.getmodule(self).make(*self.install_targets)
+
+ # Check that self.prefix is there after installation
+ PackageBase.sanity_check('install')(PackageBase.sanity_check_prefix)
diff --git a/lib/spack/spack/build_systems/python.py b/lib/spack/spack/build_systems/python.py
new file mode 100644
index 0000000000..d21c291ae6
--- /dev/null
+++ b/lib/spack/spack/build_systems/python.py
@@ -0,0 +1,309 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+
+import inspect
+
+from spack.directives import extends
+from spack.package import PackageBase
+
+from llnl.util.filesystem import working_dir
+
+
+class PythonPackage(PackageBase):
+ """Specialized class for packages that are built using Python
+ setup.py files
+
+ This class provides the following phases that can be overridden:
+
+ * build
+ * build_py
+ * build_ext
+ * build_clib
+ * build_scripts
+ * clean
+ * install
+ * install_lib
+ * install_headers
+ * install_scripts
+ * install_data
+ * sdist
+ * register
+ * bdist
+ * bdist_dumb
+ * bdist_rpm
+ * bdist_wininst
+ * upload
+ * check
+
+ These are all standard setup.py commands and can be found by running:
+
+ .. code-block:: console
+
+ $ python setup.py --help-commands
+
+ By default, only the 'build' and 'install' phases are run, but if you
+ need to run more phases, simply modify your ``phases`` list like so:
+
+ .. code-block:: python
+
+ phases = ['build_ext', 'install', 'bdist']
+
+ Each phase provides a function <phase> that runs:
+
+ .. code-block:: console
+
+ $ python --no-user-cfg setup.py <phase>
+
+ Each phase also has a <phase_args> function that can pass arguments to
+ this call. All of these functions are empty except for the ``install_args``
+ function, which passes ``--prefix=/path/to/installation/directory``.
+
+ If you need to run a phase which is not a standard setup.py command,
+ you'll need to define a function for it like so:
+
+ .. code-block:: python
+
+ def configure(self, spec, prefix):
+ self.setup_py('configure')
+ """
+ # Default phases
+ phases = ['build', 'install']
+
+ # To be used in UI queries that require to know which
+ # build-system class we are using
+ build_system_class = 'PythonPackage'
+
+ extends('python')
+
+ def setup_file(self, spec, prefix):
+ """Returns the name of the setup file to use."""
+ return 'setup.py'
+
+ def build_directory(self):
+ """The directory containing the ``setup.py`` file."""
+ return self.stage.source_path
+
+ def python(self, *args):
+ inspect.getmodule(self).python(*args)
+
+ def setup_py(self, *args):
+ setup = self.setup_file(self.spec, self.prefix)
+
+ with working_dir(self.build_directory()):
+ self.python(setup, '--no-user-cfg', *args)
+
+ # The following phases and their descriptions come from:
+ # $ python setup.py --help-commands
+ # Only standard commands are included here, but some packages
+ # define extra commands as well
+
+ def build(self, spec, prefix):
+ """Build everything needed to install."""
+ args = self.build_args(spec, prefix)
+
+ self.setup_py('build', *args)
+
+ def build_args(self, spec, prefix):
+ """Arguments to pass to build."""
+ return []
+
+ def build_py(self, spec, prefix):
+ '''"Build" pure Python modules (copy to build directory).'''
+ args = self.build_py_args(spec, prefix)
+
+ self.setup_py('build_py', *args)
+
+ def build_py_args(self, spec, prefix):
+ """Arguments to pass to build_py."""
+ return []
+
+ def build_ext(self, spec, prefix):
+ """Build C/C++ extensions (compile/link to build directory)."""
+ args = self.build_ext_args(spec, prefix)
+
+ self.setup_py('build_ext', *args)
+
+ def build_ext_args(self, spec, prefix):
+ """Arguments to pass to build_ext."""
+ return []
+
+ def build_clib(self, spec, prefix):
+ """Build C/C++ libraries used by Python extensions."""
+ args = self.build_clib_args(spec, prefix)
+
+ self.setup_py('build_clib', *args)
+
+ def build_clib_args(self, spec, prefix):
+ """Arguments to pass to build_clib."""
+ return []
+
+ def build_scripts(self, spec, prefix):
+ '''"Build" scripts (copy and fixup #! line).'''
+ args = self.build_scripts_args(spec, prefix)
+
+ self.setup_py('build_scripts', *args)
+
+ def clean(self, spec, prefix):
+ """Clean up temporary files from 'build' command."""
+ args = self.clean_args(spec, prefix)
+
+ self.setup_py('clean', *args)
+
+ def clean_args(self, spec, prefix):
+ """Arguments to pass to clean."""
+ return []
+
+ def install(self, spec, prefix):
+ """Install everything from build directory."""
+ args = self.install_args(spec, prefix)
+
+ self.setup_py('install', *args)
+
+ def install_args(self, spec, prefix):
+ """Arguments to pass to install."""
+ return ['--prefix={0}'.format(prefix)]
+
+ def install_lib(self, spec, prefix):
+ """Install all Python modules (extensions and pure Python)."""
+ args = self.install_lib_args(spec, prefix)
+
+ self.setup_py('install_lib', *args)
+
+ def install_lib_args(self, spec, prefix):
+ """Arguments to pass to install_lib."""
+ return []
+
+ def install_headers(self, spec, prefix):
+ """Install C/C++ header files."""
+ args = self.install_headers_args(spec, prefix)
+
+ self.setup_py('install_headers', *args)
+
+ def install_headers_args(self, spec, prefix):
+ """Arguments to pass to install_headers."""
+ return []
+
+ def install_scripts(self, spec, prefix):
+ """Install scripts (Python or otherwise)."""
+ args = self.install_scripts_args(spec, prefix)
+
+ self.setup_py('install_scripts', *args)
+
+ def install_scripts_args(self, spec, prefix):
+ """Arguments to pass to install_scripts."""
+ return []
+
+ def install_data(self, spec, prefix):
+ """Install data files."""
+ args = self.install_data_args(spec, prefix)
+
+ self.setup_py('install_data', *args)
+
+ def install_data_args(self, spec, prefix):
+ """Arguments to pass to install_data."""
+ return []
+
+ def sdist(self, spec, prefix):
+ """Create a source distribution (tarball, zip file, etc.)."""
+ args = self.sdist_args(spec, prefix)
+
+ self.setup_py('sdist', *args)
+
+ def sdist_args(self, spec, prefix):
+ """Arguments to pass to sdist."""
+ return []
+
+ def register(self, spec, prefix):
+ """Register the distribution with the Python package index."""
+ args = self.register_args(spec, prefix)
+
+ self.setup_py('register', *args)
+
+ def register_args(self, spec, prefix):
+ """Arguments to pass to register."""
+ return []
+
+ def bdist(self, spec, prefix):
+ """Create a built (binary) distribution."""
+ args = self.bdist_args(spec, prefix)
+
+ self.setup_py('bdist', *args)
+
+ def bdist_args(self, spec, prefix):
+ """Arguments to pass to bdist."""
+ return []
+
+ def bdist_dumb(self, spec, prefix):
+ '''Create a "dumb" built distribution.'''
+ args = self.bdist_dumb_args(spec, prefix)
+
+ self.setup_py('bdist_dumb', *args)
+
+ def bdist_dumb_args(self, spec, prefix):
+ """Arguments to pass to bdist_dumb."""
+ return []
+
+ def bdist_rpm(self, spec, prefix):
+ """Create an RPM distribution."""
+ args = self.bdist_rpm(spec, prefix)
+
+ self.setup_py('bdist_rpm', *args)
+
+ def bdist_rpm_args(self, spec, prefix):
+ """Arguments to pass to bdist_rpm."""
+ return []
+
+ def bdist_wininst(self, spec, prefix):
+ """Create an executable installer for MS Windows."""
+ args = self.bdist_wininst_args(spec, prefix)
+
+ self.setup_py('bdist_wininst', *args)
+
+ def bdist_wininst_args(self, spec, prefix):
+ """Arguments to pass to bdist_wininst."""
+ return []
+
+ def upload(self, spec, prefix):
+ """Upload binary package to PyPI."""
+ args = self.upload_args(spec, prefix)
+
+ self.setup_py('upload', *args)
+
+ def upload_args(self, spec, prefix):
+ """Arguments to pass to upload."""
+ return []
+
+ def check(self, spec, prefix):
+ """Perform some checks on the package."""
+ args = self.check_args(spec, prefix)
+
+ self.setup_py('check', *args)
+
+ def check_args(self, spec, prefix):
+ """Arguments to pass to check."""
+ return []
+
+ # Check that self.prefix is there after installation
+ PackageBase.sanity_check('install')(PackageBase.sanity_check_prefix)
diff --git a/lib/spack/spack/build_systems/r.py b/lib/spack/spack/build_systems/r.py
new file mode 100644
index 0000000000..f642f2dfd8
--- /dev/null
+++ b/lib/spack/spack/build_systems/r.py
@@ -0,0 +1,58 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+
+import inspect
+
+from spack.directives import extends
+from spack.package import PackageBase
+
+
+class RPackage(PackageBase):
+ """Specialized class for packages that are built using R
+
+ This class provides a single phase that can be overridden:
+
+ * install
+
+ It has sensible defaults and for many packages the only thing
+ necessary will be to add dependencies
+ """
+ phases = ['install']
+
+ # To be used in UI queries that require to know which
+ # build-system class we are using
+ build_system_class = 'RPackage'
+
+ extends('r')
+
+ def install(self, spec, prefix):
+ """Install the R package"""
+ inspect.getmodule(self).R(
+ 'CMD', 'INSTALL',
+ '--library={0}'.format(self.module.r_lib_dir),
+ self.stage.source_path)
+
+ # Check that self.prefix is there after installation
+ PackageBase.sanity_check('install')(PackageBase.sanity_check_prefix)
diff --git a/lib/spack/spack/cmd/__init__.py b/lib/spack/spack/cmd/__init__.py
index 672999159c..764b6fffcf 100644
--- a/lib/spack/spack/cmd/__init__.py
+++ b/lib/spack/spack/cmd/__init__.py
@@ -27,18 +27,22 @@ import re
import sys
import llnl.util.tty as tty
-from llnl.util.lang import attr_setdefault
+from llnl.util.lang import *
+from llnl.util.tty.colify import *
+from llnl.util.tty.color import *
import spack
-import spack.spec
import spack.config
+import spack.spec
+import spack.store
#
# Settings for commands that modify configuration
#
-# Commands that modify confguration By default modify the *highest* priority scope.
+# Commands that modify configuration by default modify the *highest*
+# priority scope.
default_modify_scope = spack.config.highest_precedence_scope().name
-# Commands that list confguration list *all* scopes by default.
+# Commands that list configuration list *all* scopes by default.
default_list_scope = None
# cmd has a submodule called "list" so preserve the python list module
@@ -48,7 +52,7 @@ python_list = list
ignore_files = r'^\.|^__init__.py$|^#'
SETUP_PARSER = "setup_parser"
-DESCRIPTION = "description"
+DESCRIPTION = "description"
command_path = os.path.join(spack.lib_path, "spack", "cmd")
@@ -60,6 +64,15 @@ for file in os.listdir(command_path):
commands.sort()
+def remove_options(parser, *options):
+ """Remove some options from a parser."""
+ for option in options:
+ for action in parser._actions:
+ if vars(action)['option_strings'][0] == option:
+ parser._handle_conflict_resolve(None, [(option, action)])
+ break
+
+
def get_cmd_function_name(name):
return name.replace("-", "_")
@@ -67,17 +80,17 @@ def get_cmd_function_name(name):
def get_module(name):
"""Imports the module for a particular command name and returns it."""
module_name = "%s.%s" % (__name__, name)
- module = __import__(
- module_name, fromlist=[name, SETUP_PARSER, DESCRIPTION],
- level=0)
+ module = __import__(module_name,
+ fromlist=[name, SETUP_PARSER, DESCRIPTION],
+ level=0)
- attr_setdefault(module, SETUP_PARSER, lambda *args: None) # null-op
+ attr_setdefault(module, SETUP_PARSER, lambda *args: None) # null-op
attr_setdefault(module, DESCRIPTION, "")
fn_name = get_cmd_function_name(name)
if not hasattr(module, fn_name):
- tty.die("Command module %s (%s) must define function '%s'."
- % (module.__name__, module.__file__, fn_name))
+ tty.die("Command module %s (%s) must define function '%s'." %
+ (module.__name__, module.__file__, fn_name))
return module
@@ -94,24 +107,21 @@ def parse_specs(args, **kwargs):
concretize = kwargs.get('concretize', False)
normalize = kwargs.get('normalize', False)
- if isinstance(args, (python_list, tuple)):
- args = " ".join(args)
-
try:
specs = spack.spec.parse(args)
for spec in specs:
if concretize:
- spec.concretize() # implies normalize
+ spec.concretize() # implies normalize
elif normalize:
spec.normalize()
return specs
- except spack.parse.ParseError, e:
+ except spack.parse.ParseError as e:
tty.error(e.message, e.string, e.pos * " " + "^")
sys.exit(1)
- except spack.spec.SpecError, e:
+ except spack.spec.SpecError as e:
tty.error(e.message)
sys.exit(1)
@@ -127,21 +137,116 @@ def elide_list(line_list, max_num=10):
[1, 2, 3, '...', 6]
"""
if len(line_list) > max_num:
- return line_list[:max_num-1] + ['...'] + line_list[-1:]
+ return line_list[:max_num - 1] + ['...'] + line_list[-1:]
else:
return line_list
def disambiguate_spec(spec):
- matching_specs = spack.installed_db.query(spec)
+ matching_specs = spack.store.db.query(spec)
if not matching_specs:
tty.die("Spec '%s' matches no installed packages." % spec)
elif len(matching_specs) > 1:
- args = ["%s matches multiple packages." % spec,
- "Matching packages:"]
- args += [" " + str(s) for s in matching_specs]
+ args = ["%s matches multiple packages." % spec,
+ "Matching packages:"]
+ color = sys.stdout.isatty()
+ args += [colorize(" @K{%s} " % s.dag_hash(7), color=color) +
+ s.format('$_$@$%@$=', color=color) for s in matching_specs]
args += ["Use a more specific spec."]
tty.die(*args)
return matching_specs[0]
+
+
+def ask_for_confirmation(message):
+ while True:
+ tty.msg(message + '[y/n]')
+ choice = raw_input().lower()
+ if choice == 'y':
+ break
+ elif choice == 'n':
+ raise SystemExit('Operation aborted')
+ tty.warn('Please reply either "y" or "n"')
+
+
+def gray_hash(spec, length):
+ return colorize('@K{%s}' % spec.dag_hash(length))
+
+
+def display_specs(specs, **kwargs):
+ mode = kwargs.get('mode', 'short')
+ hashes = kwargs.get('long', False)
+ namespace = kwargs.get('namespace', False)
+ flags = kwargs.get('show_flags', False)
+ variants = kwargs.get('variants', False)
+
+ hlen = 7
+ if kwargs.get('very_long', False):
+ hashes = True
+ hlen = None
+
+ nfmt = '.' if namespace else '_'
+ ffmt = '$%+' if flags else ''
+ vfmt = '$+' if variants else ''
+ format_string = '$%s$@%s%s' % (nfmt, ffmt, vfmt)
+
+ # Make a dict with specs keyed by architecture and compiler.
+ index = index_by(specs, ('architecture', 'compiler'))
+
+ # Traverse the index and print out each package
+ for i, (architecture, compiler) in enumerate(sorted(index)):
+ if i > 0:
+ print
+
+ header = "%s{%s} / %s{%s}" % (spack.spec.architecture_color,
+ architecture, spack.spec.compiler_color,
+ compiler)
+ tty.hline(colorize(header), char='-')
+
+ specs = index[(architecture, compiler)]
+ specs.sort()
+
+ abbreviated = [s.format(format_string, color=True) for s in specs]
+ if mode == 'paths':
+ # Print one spec per line along with prefix path
+ width = max(len(s) for s in abbreviated)
+ width += 2
+ format = " %%-%ds%%s" % width
+
+ for abbrv, spec in zip(abbreviated, specs):
+ prefix = gray_hash(spec, hlen) if hashes else ''
+ print prefix + (format % (abbrv, spec.prefix))
+
+ elif mode == 'deps':
+ for spec in specs:
+ print(spec.tree(
+ format=format_string,
+ color=True,
+ indent=4,
+ prefix=(lambda s: gray_hash(s, hlen)) if hashes else None))
+
+ elif mode == 'short':
+ # Print columns of output if not printing flags
+ if not flags:
+
+ def fmt(s):
+ string = ""
+ if hashes:
+ string += gray_hash(s, hlen) + ' '
+ string += s.format('$-%s$@%s' % (nfmt, vfmt), color=True)
+
+ return string
+
+ colify(fmt(s) for s in specs)
+ # Print one entry per line if including flags
+ else:
+ for spec in specs:
+ # Print the hash if necessary
+ hsh = gray_hash(spec, hlen) + ' ' if hashes else ''
+ print(hsh + spec.format(format_string, color=True) + '\n')
+
+ else:
+ raise ValueError(
+ "Invalid mode for display_specs: %s. Must be one of (paths,"
+ "deps, short)." % mode)
diff --git a/lib/spack/spack/cmd/activate.py b/lib/spack/spack/cmd/activate.py
index 9867fa8835..797cdcb136 100644
--- a/lib/spack/spack/cmd/activate.py
+++ b/lib/spack/spack/cmd/activate.py
@@ -29,12 +29,14 @@ import spack.cmd
description = "Activate a package extension."
+
def setup_parser(subparser):
subparser.add_argument(
'-f', '--force', action='store_true',
help="Activate without first activating dependencies.")
subparser.add_argument(
- 'spec', nargs=argparse.REMAINDER, help="spec of package extension to activate.")
+ 'spec', nargs=argparse.REMAINDER,
+ help="spec of package extension to activate.")
def activate(parser, args):
diff --git a/lib/spack/spack/cmd/arch.py b/lib/spack/spack/cmd/arch.py
index dc96dd0faa..4e29230c28 100644
--- a/lib/spack/spack/cmd/arch.py
+++ b/lib/spack/spack/cmd/arch.py
@@ -22,14 +22,20 @@
# License along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
-import spack
import spack.architecture as architecture
-description = "Print the architecture for this machine"
+description = "Print architecture information about this machine."
+
+
+def setup_parser(subparser):
+ parts = subparser.add_mutually_exclusive_group()
+ parts.add_argument(
+ '-p', '--platform', action='store_true', default=False,
+ help="Print only the platform.")
+
def arch(parser, args):
- configured_sys_type = architecture.get_sys_type_from_spack_globals()
- if not configured_sys_type:
- configured_sys_type = "autodetect"
- print "Configured sys_type: %s" % configured_sys_type
- print "Autodetected default sys_type: %s" % architecture.sys_type()
+ if args.platform:
+ print architecture.platform()
+ else:
+ print architecture.sys_type()
diff --git a/lib/spack/spack/cmd/bootstrap.py b/lib/spack/spack/cmd/bootstrap.py
index bec11439b5..a79ef4aa68 100644
--- a/lib/spack/spack/cmd/bootstrap.py
+++ b/lib/spack/spack/cmd/bootstrap.py
@@ -23,34 +23,56 @@
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
import os
-from subprocess import check_call
import llnl.util.tty as tty
from llnl.util.filesystem import join_path, mkdirp
import spack
-from spack.util.executable import which
+from spack.util.executable import ProcessError, which
+
+_SPACK_UPSTREAM = 'https://github.com/llnl/spack'
description = "Create a new installation of spack in another prefix"
+
def setup_parser(subparser):
- subparser.add_argument('prefix', help="names of prefix where we should install spack")
+ subparser.add_argument(
+ '-r', '--remote', action='store', dest='remote',
+ help="name of the remote to bootstrap from", default='origin')
+ subparser.add_argument(
+ 'prefix',
+ help="names of prefix where we should install spack")
-def get_origin_url():
+def get_origin_info(remote):
git_dir = join_path(spack.prefix, '.git')
git = which('git', required=True)
- origin_url = git(
- '--git-dir=%s' % git_dir, 'config', '--get', 'remote.origin.url',
- output=str)
- return origin_url.strip()
+ try:
+ branch = git('symbolic-ref', '--short', 'HEAD', output=str)
+ except ProcessError:
+ branch = 'develop'
+ tty.warn('No branch found; using default branch: %s' % branch)
+ if remote == 'origin' and \
+ branch not in ('master', 'develop'):
+ branch = 'develop'
+ tty.warn('Unknown branch found; using default branch: %s' % branch)
+ try:
+ origin_url = git(
+ '--git-dir=%s' % git_dir,
+ 'config', '--get', 'remote.%s.url' % remote,
+ output=str)
+ except ProcessError:
+ origin_url = _SPACK_UPSTREAM
+ tty.warn('No git repository found; '
+ 'using default upstream URL: %s' % origin_url)
+ return (origin_url.strip(), branch.strip())
def bootstrap(parser, args):
- origin_url = get_origin_url()
+ origin_url, branch = get_origin_info(args.remote)
prefix = args.prefix
- tty.msg("Fetching spack from origin: %s" % origin_url)
+ tty.msg("Fetching spack from '%s': %s" % (args.remote, origin_url))
if os.path.isfile(prefix):
tty.die("There is already a file at %s" % prefix)
@@ -62,7 +84,8 @@ def bootstrap(parser, args):
files_in_the_way = os.listdir(prefix)
if files_in_the_way:
- tty.die("There are already files there! Delete these files before boostrapping spack.",
+ tty.die("There are already files there! "
+ "Delete these files before boostrapping spack.",
*files_in_the_way)
tty.msg("Installing:",
@@ -73,8 +96,10 @@ def bootstrap(parser, args):
git = which('git', required=True)
git('init', '--shared', '-q')
git('remote', 'add', 'origin', origin_url)
- git('fetch', 'origin', 'master:refs/remotes/origin/master', '-n', '-q')
- git('reset', '--hard', 'origin/master', '-q')
+ git('fetch', 'origin', '%s:refs/remotes/origin/%s' % (branch, branch),
+ '-n', '-q')
+ git('reset', '--hard', 'origin/%s' % branch, '-q')
+ git('checkout', '-B', branch, 'origin/%s' % branch, '-q')
tty.msg("Successfully created a new spack in %s" % prefix,
"Run %s/bin/spack to use this installation." % prefix)
diff --git a/lib/spack/spack/cmd/build.py b/lib/spack/spack/cmd/build.py
new file mode 100644
index 0000000000..6c0029794f
--- /dev/null
+++ b/lib/spack/spack/cmd/build.py
@@ -0,0 +1,43 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+import spack.cmd.configure as cfg
+
+from spack import *
+
+description = 'Stops at build stage when installing a package, if possible'
+
+build_system_to_phase = {
+ CMakePackage: 'build',
+ AutotoolsPackage: 'build',
+ PythonPackage: 'build'
+}
+
+
+def setup_parser(subparser):
+ cfg.setup_parser(subparser)
+
+
+def build(parser, args):
+ cfg._stop_at_phase_during_install(args, build, build_system_to_phase)
diff --git a/lib/spack/spack/cmd/cd.py b/lib/spack/spack/cmd/cd.py
index aa45f67ae1..cf7232258c 100644
--- a/lib/spack/spack/cmd/cd.py
+++ b/lib/spack/spack/cmd/cd.py
@@ -25,7 +25,8 @@
import spack.cmd.location
import spack.modules
-description="cd to spack directories in the shell."
+description = "cd to spack directories in the shell."
+
def setup_parser(subparser):
"""This is for decoration -- spack cd is used through spack's
diff --git a/lib/spack/spack/cmd/checksum.py b/lib/spack/spack/cmd/checksum.py
index 95bd4771ed..8e4de0efc3 100644
--- a/lib/spack/spack/cmd/checksum.py
+++ b/lib/spack/spack/cmd/checksum.py
@@ -22,6 +22,8 @@
# License along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
+from __future__ import print_function
+
import argparse
import hashlib
@@ -30,6 +32,7 @@ import spack
import spack.cmd
import spack.util.crypto
from spack.stage import Stage, FailedDownloadError
+from spack.util.naming import *
from spack.version import *
description = "Checksum available versions of a package."
@@ -37,79 +40,125 @@ description = "Checksum available versions of a package."
def setup_parser(subparser):
subparser.add_argument(
- 'package', metavar='PACKAGE', help='Package to list versions for')
+ 'package',
+ help='Package to checksum versions for')
subparser.add_argument(
- '--keep-stage', action='store_true', dest='keep_stage',
+ '--keep-stage', action='store_true',
help="Don't clean up staging area when command completes.")
subparser.add_argument(
- 'versions', nargs=argparse.REMAINDER, help='Versions to generate checksums for')
+ 'versions', nargs=argparse.REMAINDER,
+ help='Versions to generate checksums for')
+
+
+def get_checksums(url_dict, name, **kwargs):
+ """Fetches and checksums archives from URLs.
+
+ This function is called by both ``spack checksum`` and ``spack create``.
+ The ``first_stage_function`` kwarg allows ``spack create`` to determine
+ things like the build system of the archive.
+ :param dict url_dict: A dictionary of the form: version -> URL
+ :param str name: The name of the package
+ :param callable first_stage_function: Function to run on first staging area
+ :param bool keep_stage: Don't clean up staging area when command completes
-def get_checksums(versions, urls, **kwargs):
- # Allow commands like create() to do some analysis on the first
- # archive after it is downloaded.
+ :returns: A multi-line string containing versions and corresponding hashes
+ :rtype: str
+ """
first_stage_function = kwargs.get('first_stage_function', None)
keep_stage = kwargs.get('keep_stage', False)
+ sorted_versions = sorted(url_dict.keys(), reverse=True)
+
+ # Find length of longest string in the list for padding
+ max_len = max(len(str(v)) for v in sorted_versions)
+ num_ver = len(sorted_versions)
+
+ tty.msg("Found {0} version{1} of {2}:".format(
+ num_ver, '' if num_ver == 1 else 's', name),
+ "",
+ *spack.cmd.elide_list(
+ ["{0:{1}} {2}".format(v, max_len, url_dict[v])
+ for v in sorted_versions]))
+ print()
+
+ archives_to_fetch = tty.get_number(
+ "How many would you like to checksum?", default=1, abort='q')
+
+ if not archives_to_fetch:
+ tty.die("Aborted.")
+
+ versions = sorted_versions[:archives_to_fetch]
+ urls = [url_dict[v] for v in versions]
+
tty.msg("Downloading...")
- hashes = []
+ version_hashes = []
i = 0
for url, version in zip(urls, versions):
try:
with Stage(url, keep=keep_stage) as stage:
+ # Fetch the archive
stage.fetch()
if i == 0 and first_stage_function:
- first_stage_function(stage)
+ # Only run first_stage_function the first time,
+ # no need to run it every time
+ first_stage_function(stage, url)
- hashes.append((version,
- spack.util.crypto.checksum(hashlib.md5, stage.archive_file)))
+ # Checksum the archive and add it to the list
+ version_hashes.append((version, spack.util.crypto.checksum(
+ hashlib.md5, stage.archive_file)))
i += 1
- except FailedDownloadError as e:
- tty.msg("Failed to fetch %s" % url)
+ except FailedDownloadError:
+ tty.msg("Failed to fetch {0}".format(url))
except Exception as e:
- tty.msg('Something failed on %s, skipping.\n (%s)' % (url, e))
+ tty.msg("Something failed on {0}, skipping.".format(url),
+ " ({0})".format(e))
- return hashes
+ if not version_hashes:
+ tty.die("Could not fetch any versions for {0}".format(name))
+ # Find length of longest string in the list for padding
+ max_len = max(len(str(v)) for v, h in version_hashes)
-def checksum(parser, args):
- # get the package we're going to generate checksums for
- pkg = spack.repo.get(args.package)
+ # Generate the version directives to put in a package.py
+ version_lines = "\n".join([
+ " version('{0}', {1}'{2}')".format(
+ v, ' ' * (max_len - len(str(v))), h) for v, h in version_hashes
+ ])
- # If the user asked for specific versions, use those.
- if args.versions:
- versions = {}
- for v in args.versions:
- v = ver(v)
- if not isinstance(v, Version):
- tty.die("Cannot generate checksums for version lists or " +
- "version ranges. Use unambiguous versions.")
- versions[v] = pkg.url_for_version(v)
- else:
- versions = pkg.fetch_remote_versions()
- if not versions:
- tty.die("Could not fetch any versions for %s" % pkg.name)
+ num_hash = len(version_hashes)
+ tty.msg("Checksummed {0} version{1} of {2}".format(
+ num_hash, '' if num_hash == 1 else 's', name))
- sorted_versions = sorted(versions, reverse=True)
+ return version_lines
- tty.msg("Found %s versions of %s" % (len(versions), pkg.name),
- *spack.cmd.elide_list(
- ["%-10s%s" % (v, versions[v]) for v in sorted_versions]))
- print
- archives_to_fetch = tty.get_number(
- "How many would you like to checksum?", default=5, abort='q')
- if not archives_to_fetch:
- tty.msg("Aborted.")
- return
+def checksum(parser, args):
+ # Make sure the user provided a package and not a URL
+ if not valid_fully_qualified_module_name(args.package):
+ tty.die("`spack checksum` accepts package names, not URLs. "
+ "Use `spack md5 <url>` instead.")
- version_hashes = get_checksums(
- sorted_versions[:archives_to_fetch],
- [versions[v] for v in sorted_versions[:archives_to_fetch]],
- keep_stage=args.keep_stage)
+ # Get the package we're going to generate checksums for
+ pkg = spack.repo.get(args.package)
- if not version_hashes:
- tty.die("Could not fetch any versions for %s" % pkg.name)
+ if args.versions:
+ # If the user asked for specific versions, use those
+ url_dict = {}
+ for version in args.versions:
+ version = ver(version)
+ if not isinstance(version, Version):
+ tty.die("Cannot generate checksums for version lists or "
+ "version ranges. Use unambiguous versions.")
+ url_dict[version] = pkg.url_for_version(version)
+ else:
+ # Otherwise, see what versions we can find online
+ url_dict = pkg.fetch_remote_versions()
+ if not url_dict:
+ tty.die("Could not find any versions for {0}".format(pkg.name))
+
+ version_lines = get_checksums(
+ url_dict, pkg.name, keep_stage=args.keep_stage)
- version_lines = [" version('%s', '%s')" % (v, h) for v, h in version_hashes]
- tty.msg("Checksummed new versions of %s:" % pkg.name, *version_lines)
+ print()
+ print(version_lines)
diff --git a/lib/spack/spack/cmd/clean.py b/lib/spack/spack/cmd/clean.py
index 514c5874ef..dc62fbcaf6 100644
--- a/lib/spack/spack/cmd/clean.py
+++ b/lib/spack/spack/cmd/clean.py
@@ -31,6 +31,7 @@ import spack.cmd
description = "Remove build stage and source tarball for packages."
+
def setup_parser(subparser):
subparser.add_argument('packages', nargs=argparse.REMAINDER,
help="specs of packages to clean")
diff --git a/lib/spack/spack/cmd/common/__init__.py b/lib/spack/spack/cmd/common/__init__.py
new file mode 100644
index 0000000000..ed1ec23bca
--- /dev/null
+++ b/lib/spack/spack/cmd/common/__init__.py
@@ -0,0 +1,24 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
diff --git a/lib/spack/spack/cmd/common/arguments.py b/lib/spack/spack/cmd/common/arguments.py
new file mode 100644
index 0000000000..f091b9cf75
--- /dev/null
+++ b/lib/spack/spack/cmd/common/arguments.py
@@ -0,0 +1,107 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+
+import argparse
+
+import spack.cmd
+import spack.store
+import spack.modules
+from spack.util.pattern import Args
+__all__ = ['add_common_arguments']
+
+_arguments = {}
+
+
+def add_common_arguments(parser, list_of_arguments):
+ for argument in list_of_arguments:
+ if argument not in _arguments:
+ message = 'Trying to add non existing argument "{0}" to a command'
+ raise KeyError(message.format(argument))
+ x = _arguments[argument]
+ parser.add_argument(*x.flags, **x.kwargs)
+
+
+class ConstraintAction(argparse.Action):
+ """Constructs a list of specs based on a constraint given on the command line
+
+ An instance of this class is supposed to be used as an argument action
+ in a parser. It will read a constraint and will attach a function to the
+ arguments that accepts optional keyword arguments.
+
+ To obtain the specs from a command the function must be called.
+ """
+
+ def __call__(self, parser, namespace, values, option_string=None):
+ # Query specs from command line
+ self.values = values
+ namespace.constraint = values
+ namespace.specs = self._specs
+
+ def _specs(self, **kwargs):
+ qspecs = spack.cmd.parse_specs(self.values)
+
+ # return everything for an empty query.
+ if not qspecs:
+ return spack.store.db.query()
+
+ # Return only matching stuff otherwise.
+ specs = set()
+ for spec in qspecs:
+ for s in spack.store.db.query(spec, **kwargs):
+ specs.add(s)
+ return sorted(specs)
+
+
+_arguments['constraint'] = Args(
+ 'constraint', nargs=argparse.REMAINDER, action=ConstraintAction,
+ help='Constraint to select a subset of installed packages')
+
+_arguments['module_type'] = Args(
+ '-m', '--module-type', help='Type of module files',
+ default='tcl', choices=spack.modules.module_types)
+
+_arguments['yes_to_all'] = Args(
+ '-y', '--yes-to-all', action='store_true', dest='yes_to_all',
+ help='Assume "yes" is the answer to every confirmation request.')
+
+_arguments['recurse_dependencies'] = Args(
+ '-r', '--dependencies', action='store_true', dest='recurse_dependencies',
+ help='Recursively traverse spec dependencies')
+
+_arguments['clean'] = Args(
+ '--clean', action='store_false', dest='dirty',
+ help='Clean environment before installing package.')
+
+_arguments['dirty'] = Args(
+ '--dirty', action='store_true', dest='dirty',
+ help='Do NOT clean environment before installing.')
+
+_arguments['long'] = Args(
+ '-l', '--long', action='store_true',
+ help='Show dependency hashes as well as versions.')
+
+_arguments['very_long'] = Args(
+ '-L', '--very-long', action='store_true',
+ help='Show full dependency hashes as well as versions.')
diff --git a/lib/spack/spack/cmd/compiler.py b/lib/spack/spack/cmd/compiler.py
index dc7731a290..609210f77e 100644
--- a/lib/spack/spack/cmd/compiler.py
+++ b/lib/spack/spack/cmd/compiler.py
@@ -32,11 +32,12 @@ import spack.spec
from llnl.util.lang import index_by
from llnl.util.tty.colify import colify
from llnl.util.tty.color import colorize
-from spack.spec import CompilerSpec
+from spack.spec import CompilerSpec, ArchSpec
from spack.util.environment import get_path
description = "Manage compilers"
+
def setup_parser(subparser):
sp = subparser.add_subparsers(
metavar='SUBCOMMAND', dest='compiler_command')
@@ -44,48 +45,72 @@ def setup_parser(subparser):
scopes = spack.config.config_scopes
# Find
- find_parser = sp.add_parser('find', aliases=['add'], help='Search the system for compilers to add to the Spack configuration.')
+ find_parser = sp.add_parser(
+ 'find', aliases=['add'],
+ help='Search the system for compilers to add to Spack configuration.')
find_parser.add_argument('add_paths', nargs=argparse.REMAINDER)
- find_parser.add_argument('--scope', choices=scopes, default=spack.cmd.default_modify_scope,
- help="Configuration scope to modify.")
+ find_parser.add_argument(
+ '--scope', choices=scopes, default=spack.cmd.default_modify_scope,
+ help="Configuration scope to modify.")
# Remove
- remove_parser = sp.add_parser('remove', aliases=['rm'], help='Remove compiler by spec.')
+ remove_parser = sp.add_parser(
+ 'remove', aliases=['rm'], help='Remove compiler by spec.')
remove_parser.add_argument(
- '-a', '--all', action='store_true', help='Remove ALL compilers that match spec.')
+ '-a', '--all', action='store_true',
+ help='Remove ALL compilers that match spec.')
remove_parser.add_argument('compiler_spec')
- remove_parser.add_argument('--scope', choices=scopes, default=spack.cmd.default_modify_scope,
- help="Configuration scope to modify.")
+ remove_parser.add_argument(
+ '--scope', choices=scopes, default=spack.cmd.default_modify_scope,
+ help="Configuration scope to modify.")
# List
list_parser = sp.add_parser('list', help='list available compilers')
- list_parser.add_argument('--scope', choices=scopes, default=spack.cmd.default_list_scope,
- help="Configuration scope to read from.")
+ list_parser.add_argument(
+ '--scope', choices=scopes, default=spack.cmd.default_list_scope,
+ help="Configuration scope to read from.")
# Info
info_parser = sp.add_parser('info', help='Show compiler paths.')
info_parser.add_argument('compiler_spec')
- info_parser.add_argument('--scope', choices=scopes, default=spack.cmd.default_list_scope,
- help="Configuration scope to read from.")
+ info_parser.add_argument(
+ '--scope', choices=scopes, default=spack.cmd.default_list_scope,
+ help="Configuration scope to read from.")
def compiler_find(args):
- """Search either $PATH or a list of paths for compilers and add them
- to Spack's configuration."""
+ """Search either $PATH or a list of paths OR MODULES for compilers and
+ add them to Spack's configuration.
+
+ """
paths = args.add_paths
if not paths:
paths = get_path('PATH')
- compilers = [c for c in spack.compilers.find_compilers(*args.add_paths)
- if c.spec not in spack.compilers.all_compilers(scope=args.scope)]
-
- if compilers:
- spack.compilers.add_compilers_to_config(compilers, scope=args.scope)
- n = len(compilers)
+ # Don't initialize compilers config via compilers.get_compiler_config.
+ # Just let compiler_find do the
+ # entire process and return an empty config from all_compilers
+ # Default for any other process is init_config=True
+ compilers = [c for c in spack.compilers.find_compilers(*paths)]
+ new_compilers = []
+ for c in compilers:
+ arch_spec = ArchSpec(None, c.operating_system, c.target)
+ same_specs = spack.compilers.compilers_for_spec(c.spec,
+ arch_spec,
+ args.scope)
+
+ if not same_specs:
+ new_compilers.append(c)
+
+ if new_compilers:
+ spack.compilers.add_compilers_to_config(new_compilers,
+ scope=args.scope,
+ init_config=False)
+ n = len(new_compilers)
s = 's' if n > 1 else ''
filename = spack.config.get_config_filename(args.scope, 'compilers')
tty.msg("Added %d new compiler%s to %s" % (n, s, filename))
- colify(reversed(sorted(c.spec for c in compilers)), indent=4)
+ colify(reversed(sorted(c.spec for c in new_compilers)), indent=4)
else:
tty.msg("Found no new compilers")
@@ -93,17 +118,17 @@ def compiler_find(args):
def compiler_remove(args):
cspec = CompilerSpec(args.compiler_spec)
compilers = spack.compilers.compilers_for_spec(cspec, scope=args.scope)
-
if not compilers:
tty.die("No compilers match spec %s" % cspec)
elif not args.all and len(compilers) > 1:
tty.error("Multiple compilers match spec %s. Choose one:" % cspec)
colify(reversed(sorted([c.spec for c in compilers])), indent=4)
- tty.msg("Or, you can use `spack compiler remove -a` to remove all of them.")
+ tty.msg("Or, use `spack compiler remove -a` to remove all of them.")
sys.exit(1)
for compiler in compilers:
- spack.compilers.remove_compiler_from_config(compiler.spec, scope=args.scope)
+ spack.compilers.remove_compiler_from_config(
+ compiler.spec, scope=args.scope)
tty.msg("Removed compiler %s" % compiler.spec)
@@ -117,17 +142,33 @@ def compiler_info(args):
else:
for c in compilers:
print str(c.spec) + ":"
- print "\tcc = %s" % c.cc
- print "\tcxx = %s" % c.cxx
- print "\tf77 = %s" % c.f77
- print "\tfc = %s" % c.fc
+ print "\tpaths:"
+ for cpath in ['cc', 'cxx', 'f77', 'fc']:
+ print "\t\t%s = %s" % (cpath, getattr(c, cpath, None))
+ if c.flags:
+ print "\tflags:"
+ for flag, flag_value in c.flags.iteritems():
+ print "\t\t%s = %s" % (flag, flag_value)
+ if len(c.environment) != 0:
+ if len(c.environment['set']) != 0:
+ print "\tenvironment:"
+ print "\t set:"
+ for key, value in c.environment['set'].iteritems():
+ print "\t %s = %s" % (key, value)
+ if c.extra_rpaths:
+ print "\tExtra rpaths:"
+ for extra_rpath in c.extra_rpaths:
+ print "\t\t%s" % extra_rpath
+ print "\tmodules = %s" % c.modules
+ print "\toperating system = %s" % c.operating_system
def compiler_list(args):
tty.msg("Available compilers")
index = index_by(spack.compilers.all_compilers(scope=args.scope), 'name')
for i, (name, compilers) in enumerate(index.items()):
- if i >= 1: print
+ if i >= 1:
+ print
cname = "%s{%s}" % (spack.spec.compiler_color, name)
tty.hline(colorize(cname), char='-')
@@ -135,10 +176,10 @@ def compiler_list(args):
def compiler(parser, args):
- action = { 'add' : compiler_find,
- 'find' : compiler_find,
- 'remove' : compiler_remove,
- 'rm' : compiler_remove,
- 'info' : compiler_info,
- 'list' : compiler_list }
+ action = {'add': compiler_find,
+ 'find': compiler_find,
+ 'remove': compiler_remove,
+ 'rm': compiler_remove,
+ 'info': compiler_info,
+ 'list': compiler_list}
action[args.compiler_command](args)
diff --git a/lib/spack/spack/cmd/compilers.py b/lib/spack/spack/cmd/compilers.py
index 9fbc2bb952..b87f977e5a 100644
--- a/lib/spack/spack/cmd/compilers.py
+++ b/lib/spack/spack/cmd/compilers.py
@@ -22,18 +22,16 @@
# License along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
-import llnl.util.tty as tty
-from llnl.util.tty.colify import colify
-from llnl.util.lang import index_by
-
import spack
from spack.cmd.compiler import compiler_list
description = "List available compilers. Same as 'spack compiler list'."
+
def setup_parser(subparser):
subparser.add_argument('--scope', choices=spack.config.config_scopes,
help="Configuration scope to read/modify.")
+
def compilers(parser, args):
compiler_list(args)
diff --git a/lib/spack/spack/cmd/config.py b/lib/spack/spack/cmd/config.py
index d6f56c270d..3288c4cb8b 100644
--- a/lib/spack/spack/cmd/config.py
+++ b/lib/spack/spack/cmd/config.py
@@ -22,32 +22,31 @@
# License along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
-import sys
-import argparse
-
-import llnl.util.tty as tty
-
import spack.config
description = "Get and set configuration options."
+
def setup_parser(subparser):
# User can only choose one
- scope_group = subparser.add_mutually_exclusive_group()
- scope_group.add_argument(
- '--user', action='store_const', const='user', dest='scope',
- help="Use config file in user home directory (default).")
- scope_group.add_argument(
- '--site', action='store_const', const='site', dest='scope',
- help="Use config file in spack prefix.")
+ subparser.add_argument('--scope', choices=spack.config.config_scopes,
+ help="Configuration scope to read/modify.")
sp = subparser.add_subparsers(metavar='SUBCOMMAND', dest='config_command')
get_parser = sp.add_parser('get', help='Print configuration values.')
- get_parser.add_argument('section', help="Configuration section to print.")
+ get_parser.add_argument('section',
+ help="Configuration section to print. "
+ "Options: %(choices)s.",
+ metavar='SECTION',
+ choices=spack.config.section_schemas)
edit_parser = sp.add_parser('edit', help='Edit configuration file.')
- edit_parser.add_argument('section', help="Configuration section to edit")
+ edit_parser.add_argument('section',
+ help="Configuration section to edit. "
+ "Options: %(choices)s.",
+ metavar='SECTION',
+ choices=spack.config.section_schemas)
def config_get(args):
@@ -64,6 +63,6 @@ def config_edit(args):
def config(parser, args):
- action = { 'get' : config_get,
- 'edit' : config_edit }
+ action = {'get': config_get,
+ 'edit': config_edit}
action[args.config_command](args)
diff --git a/lib/spack/spack/cmd/configure.py b/lib/spack/spack/cmd/configure.py
new file mode 100644
index 0000000000..3eebe2584b
--- /dev/null
+++ b/lib/spack/spack/cmd/configure.py
@@ -0,0 +1,90 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+
+import argparse
+
+import llnl.util.tty as tty
+import spack.cmd
+import spack.cmd.install as inst
+
+from spack import *
+
+description = 'Stops at configuration stage when installing a package, if possible' # NOQA: ignore=E501
+
+
+build_system_to_phase = {
+ CMakePackage: 'cmake',
+ AutotoolsPackage: 'configure'
+}
+
+
+def setup_parser(subparser):
+ subparser.add_argument(
+ 'package',
+ nargs=argparse.REMAINDER,
+ help="spec of the package to install"
+ )
+ subparser.add_argument(
+ '-v', '--verbose',
+ action='store_true',
+ help="Print additional output during builds"
+ )
+
+
+def _stop_at_phase_during_install(args, calling_fn, phase_mapping):
+ if not args.package:
+ tty.die("configure requires at least one package argument")
+
+ # TODO: to be refactored with code in install
+ specs = spack.cmd.parse_specs(args.package, concretize=True)
+ if len(specs) != 1:
+ tty.error('only one spec can be installed at a time.')
+ spec = specs.pop()
+ pkg = spec.package
+ try:
+ key = [cls for cls in phase_mapping if isinstance(pkg, cls)].pop()
+ phase = phase_mapping[key]
+ # Install package dependencies if needed
+ parser = argparse.ArgumentParser()
+ inst.setup_parser(parser)
+ tty.msg('Checking dependencies for {0}'.format(args.package))
+ cli_args = ['-v'] if args.verbose else []
+ install_args = parser.parse_args(cli_args + ['--only=dependencies'])
+ install_args.package = args.package
+ inst.install(parser, install_args)
+ # Install package and stop at the given phase
+ cli_args = ['-v'] if args.verbose else []
+ install_args = parser.parse_args(cli_args + ['--only=package'])
+ install_args.package = args.package
+ inst.install(parser, install_args, stop_at=phase)
+ except IndexError:
+ tty.error(
+ 'Package {0} has no {1} phase, or its {1} phase is not separated from install'.format( # NOQA: ignore=E501
+ spec.name, calling_fn.__name__)
+ )
+
+
+def configure(parser, args):
+ _stop_at_phase_during_install(args, configure, build_system_to_phase)
diff --git a/lib/spack/spack/cmd/create.py b/lib/spack/spack/cmd/create.py
index 41bfa741f6..2575229581 100644
--- a/lib/spack/spack/cmd/create.py
+++ b/lib/spack/spack/cmd/create.py
@@ -1,4 +1,3 @@
-_copyright = """\
##############################################################################
# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
# Produced at the Lawrence Livermore National Laboratory.
@@ -23,44 +22,60 @@ _copyright = """\
# License along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
-"""
-import string
+from __future__ import print_function
+
import os
-import hashlib
import re
-from ordereddict_backport import OrderedDict
import llnl.util.tty as tty
-from llnl.util.filesystem import mkdirp
-
import spack
import spack.cmd
import spack.cmd.checksum
import spack.url
import spack.util.web
+from llnl.util.filesystem import mkdirp
+from spack.repository import Repo
from spack.spec import Spec
-from spack.util.naming import *
-from spack.repository import Repo, RepoError
-import spack.util.crypto as crypto
-
from spack.util.executable import which
-from spack.stage import Stage
-
+from spack.util.naming import *
-description = "Create a new package file from an archive URL"
+description = "Create a new package file"
-package_template = string.Template(
- _copyright + """
+package_template = '''\
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
#
# This is a template package file for Spack. We've put "FIXME"
# next to all the things you'll want to change. Once you've handled
# them, you can save this file and test your package like this:
#
-# spack install ${name}
+# spack install {name}
#
# You can edit this file again by typing:
#
-# spack edit ${name}
+# spack edit {name}
#
# See the Spack documentation for more information on packaging.
# If you submit this package back to Spack as a pull request,
@@ -68,140 +83,442 @@ package_template = string.Template(
#
from spack import *
-class ${class_name}(Package):
- ""\"FIXME: put a proper description of your package here.""\"
- # FIXME: add a proper url for your package's homepage here.
+
+class {class_name}({base_class_name}):
+ """FIXME: Put a proper description of your package here."""
+
+ # FIXME: Add a proper url for your package's homepage here.
homepage = "http://www.example.com"
- url = "${url}"
+ url = "{url}"
-${versions}
+{versions}
- # FIXME: Add dependencies if this package requires them.
- # depends_on("foo")
+{dependencies}
+
+{body}
+'''
- def install(self, spec, prefix):
- # FIXME: Modify the configure line to suit your build system here.
- ${configure}
- # FIXME: Add logic to build and install here
+class PackageTemplate(object):
+ """Provides the default values to be used for the package file template"""
+
+ base_class_name = 'Package'
+
+ dependencies = """\
+ # FIXME: Add dependencies if required.
+ # depends_on('foo')"""
+
+ body = """\
+ def install(self, spec, prefix):
+ # FIXME: Unknown build system
make()
- make("install")
-""")
+ make('install')"""
+
+ def __init__(self, name, url, versions):
+ self.name = name
+ self.class_name = mod_to_class(name)
+ self.url = url
+ self.versions = versions
+
+ def write(self, pkg_path):
+ """Writes the new package file."""
+
+ # Write out a template for the file
+ with open(pkg_path, "w") as pkg_file:
+ pkg_file.write(package_template.format(
+ name=self.name,
+ class_name=self.class_name,
+ base_class_name=self.base_class_name,
+ url=self.url,
+ versions=self.versions,
+ dependencies=self.dependencies,
+ body=self.body))
+
+
+class AutotoolsPackageTemplate(PackageTemplate):
+ """Provides appropriate overrides for Autotools-based packages"""
+
+ base_class_name = 'AutotoolsPackage'
+
+ dependencies = """\
+ # FIXME: Add dependencies if required.
+ # depends_on('foo')"""
+
+ body = """\
+ def configure_args(self):
+ # FIXME: Add arguments other than --prefix
+ # FIXME: If not needed delete this function
+ args = []
+ return args"""
+
+
+class CMakePackageTemplate(PackageTemplate):
+ """Provides appropriate overrides for CMake-based packages"""
+
+ base_class_name = 'CMakePackage'
+
+ body = """\
+ def cmake_args(self):
+ # FIXME: Add arguments other than
+ # FIXME: CMAKE_INSTALL_PREFIX and CMAKE_BUILD_TYPE
+ # FIXME: If not needed delete this function
+ args = []
+ return args"""
+
+
+class SconsPackageTemplate(PackageTemplate):
+ """Provides appropriate overrides for SCons-based packages"""
+
+ dependencies = """\
+ # FIXME: Add additional dependencies if required.
+ depends_on('scons', type='build')"""
+
+ body = """\
+ def install(self, spec, prefix):
+ # FIXME: Add logic to build and install here.
+ scons('prefix={0}'.format(prefix))
+ scons('install')"""
+
+
+class BazelPackageTemplate(PackageTemplate):
+ """Provides appropriate overrides for Bazel-based packages"""
+
+ dependencies = """\
+ # FIXME: Add additional dependencies if required.
+ depends_on('bazel', type='build')"""
+
+ body = """\
+ def install(self, spec, prefix):
+ # FIXME: Add logic to build and install here.
+ bazel()"""
+
+
+class PythonPackageTemplate(PackageTemplate):
+ """Provides appropriate overrides for python extensions"""
+ base_class_name = 'PythonPackage'
+
+ dependencies = """\
+ # FIXME: Add dependencies if required.
+ # depends_on('py-setuptools', type='build')
+ # depends_on('py-foo', type=('build', 'run'))"""
+
+ body = """\
+ def build_args(self):
+ # FIXME: Add arguments other than --prefix
+ # FIXME: If not needed delete the function
+ args = []
+ return args"""
+
+ def __init__(self, name, *args):
+ # If the user provided `--name py-numpy`, don't rename it py-py-numpy
+ if not name.startswith('py-'):
+ # Make it more obvious that we are renaming the package
+ tty.msg("Changing package name from {0} to py-{0}".format(name))
+ name = 'py-{0}'.format(name)
+
+ super(PythonPackageTemplate, self).__init__(name, *args)
+
+
+class RPackageTemplate(PackageTemplate):
+ """Provides appropriate overrides for R extensions"""
+
+ dependencies = """\
+ # FIXME: Add dependencies if required.
+ # depends_on('r-foo', type=('build', 'run'))"""
+
+ body = """\
+ # FIXME: Override install() if necessary."""
+ def __init__(self, name, *args):
+ # If the user provided `--name r-rcpp`, don't rename it r-r-rcpp
+ if not name.startswith('r-'):
+ # Make it more obvious that we are renaming the package
+ tty.msg("Changing package name from {0} to r-{0}".format(name))
+ name = 'r-{0}'.format(name)
-def make_version_calls(ver_hash_tuples):
- """Adds a version() call to the package for each version found."""
- max_len = max(len(str(v)) for v, h in ver_hash_tuples)
- format = " version(%%-%ds, '%%s')" % (max_len + 2)
- return '\n'.join(format % ("'%s'" % v, h) for v, h in ver_hash_tuples)
+ super(RPackageTemplate, self).__init__(name, *args)
+
+
+class OctavePackageTemplate(PackageTemplate):
+ """Provides appropriate overrides for octave packages"""
+
+ dependencies = """\
+ extends('octave')
+
+ # FIXME: Add additional dependencies if required.
+ # depends_on('octave-foo', type=('build', 'run'))"""
+
+ body = """\
+ def install(self, spec, prefix):
+ # FIXME: Add logic to build and install here.
+ octave('--quiet', '--norc',
+ '--built-in-docstrings-file=/dev/null',
+ '--texi-macros-file=/dev/null',
+ '--eval', 'pkg prefix {0}; pkg install {1}'.format(
+ prefix, self.stage.archive_file))"""
+
+ def __init__(self, name, *args):
+ # If the user provided `--name octave-splines`, don't rename it
+ # octave-octave-splines
+ if not name.startswith('octave-'):
+ # Make it more obvious that we are renaming the package
+ tty.msg("Changing package name from {0} to octave-{0}".format(name)) # noqa
+ name = 'octave-{0}'.format(name)
+
+ super(OctavePackageTemplate, self).__init__(name, *args)
+
+
+templates = {
+ 'autotools': AutotoolsPackageTemplate,
+ 'cmake': CMakePackageTemplate,
+ 'scons': SconsPackageTemplate,
+ 'bazel': BazelPackageTemplate,
+ 'python': PythonPackageTemplate,
+ 'r': RPackageTemplate,
+ 'octave': OctavePackageTemplate,
+ 'generic': PackageTemplate
+}
def setup_parser(subparser):
- subparser.add_argument('url', nargs='?', help="url of package archive")
+ subparser.add_argument(
+ 'url', nargs='?',
+ help="url of package archive")
subparser.add_argument(
'--keep-stage', action='store_true',
help="Don't clean up staging area when command completes.")
subparser.add_argument(
- '-n', '--name', dest='alternate_name', default=None, metavar='NAME',
- help="Override the autodetected name for the created package.")
+ '-n', '--name',
+ help="name of the package to create")
subparser.add_argument(
- '-r', '--repo', default=None,
+ '-t', '--template', metavar='TEMPLATE', choices=templates.keys(),
+ help="build system template to use. options: %(choices)s")
+ subparser.add_argument(
+ '-r', '--repo',
help="Path to a repository where the package should be created.")
subparser.add_argument(
'-N', '--namespace',
help="Specify a namespace for the package. Must be the namespace of "
"a repository registered with Spack.")
subparser.add_argument(
- '-f', '--force', action='store_true', dest='force',
+ '-f', '--force', action='store_true',
help="Overwrite any existing package file with the same name.")
- setup_parser.subparser = subparser
-
-class ConfigureGuesser(object):
- def __call__(self, stage):
- """Try to guess the type of build system used by the project, and return
- an appropriate configure line.
- """
- autotools = "configure('--prefix=%s' % prefix)"
- cmake = "cmake('.', *std_cmake_args)"
- python = "python('setup.py', 'install', '--prefix=%s' % prefix)"
- r = "R('CMD', 'INSTALL', '--library=%s' % self.module.r_lib_dir, '%s' % self.stage.archive_file)"
-
- config_lines = ((r'/configure$', 'autotools', autotools),
- (r'/CMakeLists.txt$', 'cmake', cmake),
- (r'/setup.py$', 'python', python),
- (r'/NAMESPACE$', 'r', r))
-
- # Peek inside the tarball.
- tar = which('tar')
- output = tar(
- "--exclude=*/*/*", "-tf", stage.archive_file, output=str)
- lines = output.split("\n")
+class BuildSystemGuesser:
+ """An instance of BuildSystemGuesser provides a callable object to be used
+ during ``spack create``. By passing this object to ``spack checksum``, we
+ can take a peek at the fetched tarball and discern the build system it uses
+ """
- # Set the configure line to the one that matched.
- for pattern, bs, cl in config_lines:
+ def __call__(self, stage, url):
+ """Try to guess the type of build system used by a project based on
+ the contents of its archive or the URL it was downloaded from."""
+
+ # Most octave extensions are hosted on Octave-Forge:
+ # http://octave.sourceforge.net/index.html
+ # They all have the same base URL.
+ if 'downloads.sourceforge.net/octave/' in url:
+ self.build_system = 'octave'
+ return
+
+ # A list of clues that give us an idea of the build system a package
+ # uses. If the regular expression matches a file contained in the
+ # archive, the corresponding build system is assumed.
+ clues = [
+ (r'/configure$', 'autotools'),
+ (r'/CMakeLists.txt$', 'cmake'),
+ (r'/SConstruct$', 'scons'),
+ (r'/setup.py$', 'python'),
+ (r'/NAMESPACE$', 'r'),
+ (r'/WORKSPACE$', 'bazel')
+ ]
+
+ # Peek inside the compressed file.
+ if stage.archive_file.endswith('.zip'):
+ try:
+ unzip = which('unzip')
+ output = unzip('-lq', stage.archive_file, output=str)
+ except:
+ output = ''
+ else:
+ try:
+ tar = which('tar')
+ output = tar('--exclude=*/*/*', '-tf',
+ stage.archive_file, output=str)
+ except:
+ output = ''
+ lines = output.split('\n')
+
+ # Determine the build system based on the files contained
+ # in the archive.
+ build_system = 'generic'
+ for pattern, bs in clues:
if any(re.search(pattern, l) for l in lines):
- config_line = cl
build_system = bs
- break
- else:
- # None matched -- just put both, with cmake commented out
- config_line = "# FIXME: Spack couldn't guess one, so here are some options:\n"
- config_line += " # " + autotools + "\n"
- config_line += " # " + cmake
- build_system = 'unknown'
- self.configure = config_line
self.build_system = build_system
-def guess_name_and_version(url, args):
- # Try to deduce name and version of the new package from the URL
- version = spack.url.parse_version(url)
- if not version:
- tty.die("Couldn't guess a version string from %s" % url)
+def get_name(args):
+ """Get the name of the package based on the supplied arguments.
- # Try to guess a name. If it doesn't work, allow the user to override.
- if args.alternate_name:
- name = args.alternate_name
- else:
+ If a name was provided, always use that. Otherwise, if a URL was
+ provided, extract the name from that. Otherwise, use a default.
+
+ :param argparse.Namespace args: The arguments given to ``spack create``
+
+ :returns: The name of the package
+ :rtype: str
+ """
+
+ # Default package name
+ name = 'example'
+
+ if args.name:
+ # Use a user-supplied name if one is present
+ name = args.name
+ tty.msg("Using specified package name: '{0}'".format(name))
+ elif args.url:
+ # Try to guess the package name based on the URL
try:
- name = spack.url.parse_name(url, version)
- except spack.url.UndetectableNameError, e:
- # Use a user-supplied name if one is present
- tty.die("Couldn't guess a name for this package. Try running:", "",
- "spack create --name <name> <url>")
+ name = spack.url.parse_name(args.url)
+ tty.msg("This looks like a URL for {0}".format(name))
+ except spack.url.UndetectableNameError:
+ tty.die("Couldn't guess a name for this package.",
+ " Please report this bug. In the meantime, try running:",
+ " `spack create --name <name> <url>`")
if not valid_fully_qualified_module_name(name):
- tty.die("Package name can only contain A-Z, a-z, 0-9, '_' and '-'")
+ tty.die("Package name can only contain a-z, 0-9, and '-'")
+
+ return name
+
+
+def get_url(args):
+ """Get the URL to use.
+
+ Use a default URL if none is provided.
+
+ :param argparse.Namespace args: The arguments given to ``spack create``
+
+ :returns: The URL of the package
+ :rtype: str
+ """
+
+ # Default URL
+ url = 'http://www.example.com/example-1.2.3.tar.gz'
+
+ if args.url:
+ # Use a user-supplied URL if one is present
+ url = args.url
+
+ return url
+
+
+def get_versions(args, name):
+ """Returns a list of versions and hashes for a package.
+
+ Also returns a BuildSystemGuesser object.
+
+ Returns default values if no URL is provided.
+
+ :param argparse.Namespace args: The arguments given to ``spack create``
+ :param str name: The name of the package
+
+ :returns: Versions and hashes, and a BuildSystemGuesser object
+ :rtype: str and BuildSystemGuesser
+ """
+
+ # Default version, hash, and guesser
+ versions = """\
+ # FIXME: Add proper versions and checksums here.
+ # version('1.2.3', '0123456789abcdef0123456789abcdef')"""
+
+ guesser = BuildSystemGuesser()
+
+ if args.url:
+ # Find available versions
+ url_dict = spack.util.web.find_versions_of_archive(args.url)
+
+ if not url_dict:
+ # If no versions were found, revert to what the user provided
+ version = spack.url.parse_version(args.url)
+ url_dict = {version: args.url}
+
+ versions = spack.cmd.checksum.get_checksums(
+ url_dict, name, first_stage_function=guesser,
+ keep_stage=args.keep_stage)
+
+ return versions, guesser
+
+
+def get_build_system(args, guesser):
+ """Determine the build system template.
- return name, version
+ If a template is specified, always use that. Otherwise, if a URL
+ is provided, download the tarball and peek inside to guess what
+ build system it uses. Otherwise, use a generic template by default.
+ :param argparse.Namespace args: The arguments given to ``spack create``
+ :param BuildSystemGuesser guesser: The first_stage_function given to \
+ ``spack checksum`` which records the build system it detects
-def find_repository(spec, args):
- # figure out namespace for spec
+ :returns: The name of the build system template to use
+ :rtype: str
+ """
+
+ # Default template
+ template = 'generic'
+
+ if args.template:
+ # Use a user-supplied template if one is present
+ template = args.template
+ tty.msg("Using specified package template: '{0}'".format(template))
+ elif args.url:
+ # Use whatever build system the guesser detected
+ template = guesser.build_system
+ if template == 'generic':
+ tty.warn("Unable to detect a build system. "
+ "Using a generic package template.")
+ else:
+ msg = "This package looks like it uses the {0} build system"
+ tty.msg(msg.format(template))
+
+ return template
+
+
+def get_repository(args, name):
+ """Returns a Repo object that will allow us to determine the path where
+ the new package file should be created.
+
+ :param argparse.Namespace args: The arguments given to ``spack create``
+ :param str name: The name of the package to create
+
+ :returns: A Repo object capable of determining the path to the package file
+ :rtype: Repo
+ """
+ spec = Spec(name)
+ # Figure out namespace for spec
if spec.namespace and args.namespace and spec.namespace != args.namespace:
- tty.die("Namespaces '%s' and '%s' do not match." % (spec.namespace, args.namespace))
+ tty.die("Namespaces '{0}' and '{1}' do not match.".format(
+ spec.namespace, args.namespace))
if not spec.namespace and args.namespace:
spec.namespace = args.namespace
- # Figure out where the new package should live.
+ # Figure out where the new package should live
repo_path = args.repo
if repo_path is not None:
- try:
- repo = Repo(repo_path)
- if spec.namespace and spec.namespace != repo.namespace:
- tty.die("Can't create package with namespace %s in repo with namespace %s"
- % (spec.namespace, repo.namespace))
- except RepoError as e:
- tty.die(str(e))
+ repo = Repo(repo_path)
+ if spec.namespace and spec.namespace != repo.namespace:
+ tty.die("Can't create package with namespace {0} in repo with "
+ "namespace {0}".format(spec.namespace, repo.namespace))
else:
if spec.namespace:
repo = spack.repo.get_repo(spec.namespace, None)
if not repo:
- tty.die("Unknown namespace: %s" % spec.namespace)
+ tty.die("Unknown namespace: '{0}'".format(spec.namespace))
else:
repo = spack.repo.first_repo()
@@ -212,91 +529,30 @@ def find_repository(spec, args):
return repo
-def fetch_tarballs(url, name, version):
- """Try to find versions of the supplied archive by scraping the web.
-
- Prompts the user to select how many to download if many are found.
-
-
- """
- versions = spack.util.web.find_versions_of_archive(url)
- rkeys = sorted(versions.keys(), reverse=True)
- versions = OrderedDict(zip(rkeys, (versions[v] for v in rkeys)))
-
- archives_to_fetch = 1
- if not versions:
- # If the fetch failed for some reason, revert to what the user provided
- versions = { version : url }
- elif len(versions) > 1:
- tty.msg("Found %s versions of %s:" % (len(versions), name),
- *spack.cmd.elide_list(
- ["%-10s%s" % (v,u) for v, u in versions.iteritems()]))
- print
- archives_to_fetch = tty.get_number(
- "Include how many checksums in the package file?",
- default=5, abort='q')
-
- if not archives_to_fetch:
- tty.die("Aborted.")
-
- sorted_versions = sorted(versions.keys(), reverse=True)
- sorted_urls = [versions[v] for v in sorted_versions]
- return sorted_versions[:archives_to_fetch], sorted_urls[:archives_to_fetch]
-
-
def create(parser, args):
- url = args.url
- if not url:
- setup_parser.subparser.print_help()
- return
-
- # Figure out a name and repo for the package.
- name, version = guess_name_and_version(url, args)
- spec = Spec(name)
- name = spec.name # factors out namespace, if any
- repo = find_repository(spec, args)
-
- tty.msg("This looks like a URL for %s version %s" % (name, version))
- tty.msg("Creating template for package %s" % name)
-
- # Fetch tarballs (prompting user if necessary)
- versions, urls = fetch_tarballs(url, name, version)
-
- # Try to guess what configure system is used.
- guesser = ConfigureGuesser()
- ver_hash_tuples = spack.cmd.checksum.get_checksums(
- versions, urls,
- first_stage_function=guesser,
- keep_stage=args.keep_stage)
-
- if not ver_hash_tuples:
- tty.die("Could not fetch any tarballs for %s" % name)
-
- # Prepend 'py-' to python package names, by convention.
- if guesser.build_system == 'python':
- name = 'py-%s' % name
-
- # Prepend 'r-' to R package names, by convention.
- if guesser.build_system == 'r':
- name = 'r-%s' % name
-
- # Create a directory for the new package.
- pkg_path = repo.filename_for_package_name(name)
+ # Gather information about the package to be created
+ name = get_name(args)
+ url = get_url(args)
+ versions, guesser = get_versions(args, name)
+ build_system = get_build_system(args, guesser)
+
+ # Create the package template object
+ PackageClass = templates[build_system]
+ package = PackageClass(name, url, versions)
+ tty.msg("Created template for {0} package".format(package.name))
+
+ # Create a directory for the new package
+ repo = get_repository(args, name)
+ pkg_path = repo.filename_for_package_name(package.name)
if os.path.exists(pkg_path) and not args.force:
- tty.die("%s already exists." % pkg_path)
+ tty.die('{0} already exists.'.format(pkg_path),
+ ' Try running `spack create --force` to overwrite it.')
else:
mkdirp(os.path.dirname(pkg_path))
- # Write out a template for the file
- with open(pkg_path, "w") as pkg_file:
- pkg_file.write(
- package_template.substitute(
- name=name,
- configure=guesser.configure,
- class_name=mod_to_class(name),
- url=url,
- versions=make_version_calls(ver_hash_tuples)))
-
- # If everything checks out, go ahead and edit.
+ # Write the new package file
+ package.write(pkg_path)
+ tty.msg("Created package file: {0}".format(pkg_path))
+
+ # Open up the new package file in your $EDITOR
spack.editor(pkg_path)
- tty.msg("Created package %s" % pkg_path)
diff --git a/lib/spack/spack/cmd/deactivate.py b/lib/spack/spack/cmd/deactivate.py
index 990309ee48..fedd078972 100644
--- a/lib/spack/spack/cmd/deactivate.py
+++ b/lib/spack/spack/cmd/deactivate.py
@@ -27,10 +27,12 @@ import llnl.util.tty as tty
import spack
import spack.cmd
+import spack.store
from spack.graph import topological_sort
description = "Deactivate a package extension."
+
def setup_parser(subparser):
subparser.add_argument(
'-f', '--force', action='store_true',
@@ -40,7 +42,8 @@ def setup_parser(subparser):
help="Deactivate all extensions of an extendable package, or "
"deactivate an extension AND its dependencies.")
subparser.add_argument(
- 'spec', nargs=argparse.REMAINDER, help="spec of package extension to deactivate.")
+ 'spec', nargs=argparse.REMAINDER,
+ help="spec of package extension to deactivate.")
def deactivate(parser, args):
@@ -54,7 +57,7 @@ def deactivate(parser, args):
if args.all:
if pkg.extendable:
tty.msg("Deactivating all extensions of %s" % pkg.spec.short_spec)
- ext_pkgs = spack.installed_db.installed_extensions_for(spec)
+ ext_pkgs = spack.store.db.installed_extensions_for(spec)
for ext_pkg in ext_pkgs:
ext_pkg.spec.normalize()
@@ -65,7 +68,8 @@ def deactivate(parser, args):
if not args.force and not spec.package.activated:
tty.die("%s is not activated." % pkg.spec.short_spec)
- tty.msg("Deactivating %s and all dependencies." % pkg.spec.short_spec)
+ tty.msg("Deactivating %s and all dependencies." %
+ pkg.spec.short_spec)
topo_order = topological_sort(spec)
index = spec.index()
@@ -79,7 +83,9 @@ def deactivate(parser, args):
epkg.do_deactivate(force=args.force)
else:
- tty.die("spack deactivate --all requires an extendable package or an extension.")
+ tty.die(
+ "spack deactivate --all requires an extendable package "
+ "or an extension.")
else:
if not pkg.is_extension:
diff --git a/lib/spack/spack/cmd/debug.py b/lib/spack/spack/cmd/debug.py
new file mode 100644
index 0000000000..c7e90cb210
--- /dev/null
+++ b/lib/spack/spack/cmd/debug.py
@@ -0,0 +1,100 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+import os
+import re
+from datetime import datetime
+from glob import glob
+
+import llnl.util.tty as tty
+from llnl.util.filesystem import working_dir
+
+import spack
+from spack.util.executable import which
+
+description = "Debugging commands for troubleshooting Spack."
+
+
+def setup_parser(subparser):
+ sp = subparser.add_subparsers(metavar='SUBCOMMAND', dest='debug_command')
+ sp.add_parser('create-db-tarball',
+ help="Create a tarball of Spack's installation metadata.")
+
+
+def _debug_tarball_suffix():
+ now = datetime.now()
+ suffix = now.strftime('%Y-%m-%d-%H%M%S')
+
+ git = which('git')
+ if not git:
+ return 'nobranch-nogit-%s' % suffix
+
+ with working_dir(spack.spack_root):
+ if not os.path.isdir('.git'):
+ return 'nobranch.nogit.%s' % suffix
+
+ # Get symbolic branch name and strip any special chars (mainly '/')
+ symbolic = git(
+ 'rev-parse', '--abbrev-ref', '--short', 'HEAD', output=str).strip()
+ symbolic = re.sub(r'[^\w.-]', '-', symbolic)
+
+ # Get the commit hash too.
+ commit = git(
+ 'rev-parse', '--short', 'HEAD', output=str).strip()
+
+ if symbolic == commit:
+ return "nobranch.%s.%s" % (commit, suffix)
+ else:
+ return "%s.%s.%s" % (symbolic, commit, suffix)
+
+
+def create_db_tarball(args):
+ tar = which('tar')
+ tarball_name = "spack-db.%s.tar.gz" % _debug_tarball_suffix()
+ tarball_path = os.path.abspath(tarball_name)
+
+ base = os.path.basename(spack.store.root)
+ transform_args = []
+ if 'GNU' in tar('--version', output=str):
+ transform_args = ['--transform', 's/^%s/%s/' % (base, tarball_name)]
+ else:
+ transform_args = ['-s', '/^%s/%s/' % (base, tarball_name)]
+
+ wd = os.path.dirname(spack.store.root)
+ with working_dir(wd):
+ files = [spack.store.db._index_path]
+ files += glob('%s/*/*/*/.spack/spec.yaml' % base)
+ files = [os.path.relpath(f) for f in files]
+
+ args = ['-czf', tarball_path]
+ args += transform_args
+ args += files
+ tar(*args)
+
+ tty.msg('Created %s' % tarball_name)
+
+
+def debug(parser, args):
+ action = {'create-db-tarball': create_db_tarball}
+ action[args.debug_command](args)
diff --git a/lib/spack/spack/cmd/dependents.py b/lib/spack/spack/cmd/dependents.py
index 78eb6847b8..dc2ee658ac 100644
--- a/lib/spack/spack/cmd/dependents.py
+++ b/lib/spack/spack/cmd/dependents.py
@@ -27,20 +27,27 @@ import argparse
import llnl.util.tty as tty
import spack
+import spack.store
import spack.cmd
description = "Show installed packages that depend on another."
+
def setup_parser(subparser):
subparser.add_argument(
- 'spec', nargs=argparse.REMAINDER, help="specs to list dependencies of.")
+ 'spec', nargs=argparse.REMAINDER,
+ help="specs to list dependencies of.")
def dependents(parser, args):
- specs = spack.cmd.parse_specs(args.spec, concretize=True)
+ specs = spack.cmd.parse_specs(args.spec)
if len(specs) != 1:
tty.die("spack dependents takes only one spec.")
+ spec = spack.cmd.disambiguate_spec(specs[0])
- fmt = '$_$@$%@$+$=$#'
- deps = [d.format(fmt, color=True) for d in specs[0].package.installed_dependents]
- tty.msg("Dependents of %s" % specs[0].format(fmt, color=True), *deps)
+ tty.msg("Dependents of %s" % spec.format('$_$@$%@$#', color=True))
+ deps = spack.store.db.installed_dependents(spec)
+ if deps:
+ spack.cmd.display_specs(deps)
+ else:
+ print "No dependents"
diff --git a/lib/spack/spack/cmd/diy.py b/lib/spack/spack/cmd/diy.py
index 39faf59a17..dbb5a253ec 100644
--- a/lib/spack/spack/cmd/diy.py
+++ b/lib/spack/spack/cmd/diy.py
@@ -30,11 +30,12 @@ import llnl.util.tty as tty
import spack
import spack.cmd
-from spack.cmd.edit import edit_package
+import spack.cmd.common.arguments as arguments
from spack.stage import DIYStage
description = "Do-It-Yourself: build from an existing source directory."
+
def setup_parser(subparser):
subparser.add_argument(
'-i', '--ignore-dependencies', action='store_true', dest='ignore_deps',
@@ -50,7 +51,10 @@ def setup_parser(subparser):
help="Do not display verbose build output while installing.")
subparser.add_argument(
'spec', nargs=argparse.REMAINDER,
- help="specs to use for install. Must contain package AND verison.")
+ help="specs to use for install. Must contain package AND version.")
+
+ cd_group = subparser.add_mutually_exclusive_group()
+ arguments.add_common_arguments(cd_group, ['clean', 'dirty'])
def diy(self, args):
@@ -61,39 +65,33 @@ def diy(self, args):
if len(specs) > 1:
tty.die("spack diy only takes one spec.")
- # Take a write lock before checking for existence.
- with spack.installed_db.write_transaction():
- spec = specs[0]
- if not spack.repo.exists(spec.name):
- tty.warn("No such package: %s" % spec.name)
- create = tty.get_yes_or_no("Create this package?", default=False)
- if not create:
- tty.msg("Exiting without creating.")
- sys.exit(1)
- else:
- tty.msg("Running 'spack edit -f %s'" % spec.name)
- edit_package(spec.name, spack.repo.first_repo(), None, True)
- return
-
- if not spec.versions.concrete:
- tty.die("spack diy spec must have a single, concrete version. Did you forget a package version number?")
-
- spec.concretize()
- package = spack.repo.get(spec)
-
- if package.installed:
- tty.error("Already installed in %s" % package.prefix)
- tty.msg("Uninstall or try adding a version suffix for this DIY build.")
- sys.exit(1)
-
- # Forces the build to run out of the current directory.
- package.stage = DIYStage(os.getcwd())
-
- # TODO: make this an argument, not a global.
- spack.do_checksum = False
-
- package.do_install(
- keep_prefix=args.keep_prefix,
- ignore_deps=args.ignore_deps,
- verbose=not args.quiet,
- keep_stage=True) # don't remove source dir for DIY.
+ spec = specs[0]
+ if not spack.repo.exists(spec.name):
+ tty.die("No package for '{0}' was found.".format(spec.name),
+ " Use `spack create` to create a new package")
+
+ if not spec.versions.concrete:
+ tty.die(
+ "spack diy spec must have a single, concrete version. "
+ "Did you forget a package version number?")
+
+ spec.concretize()
+ package = spack.repo.get(spec)
+
+ if package.installed:
+ tty.error("Already installed in %s" % package.prefix)
+ tty.msg("Uninstall or try adding a version suffix for this DIY build.")
+ sys.exit(1)
+
+ # Forces the build to run out of the current directory.
+ package.stage = DIYStage(os.getcwd())
+
+ # TODO: make this an argument, not a global.
+ spack.do_checksum = False
+
+ package.do_install(
+ keep_prefix=args.keep_prefix,
+ install_deps=not args.ignore_deps,
+ verbose=not args.quiet,
+ keep_stage=True, # don't remove source dir for DIY.
+ dirty=args.dirty)
diff --git a/lib/spack/spack/cmd/doc.py b/lib/spack/spack/cmd/doc.py
index b3d0737d13..291b17216f 100644
--- a/lib/spack/spack/cmd/doc.py
+++ b/lib/spack/spack/cmd/doc.py
@@ -25,6 +25,7 @@
description = "Run pydoc from within spack."
+
def setup_parser(subparser):
subparser.add_argument('entity', help="Run pydoc help on entity")
diff --git a/lib/spack/spack/cmd/edit.py b/lib/spack/spack/cmd/edit.py
index 49ab83867a..77f23333b6 100644
--- a/lib/spack/spack/cmd/edit.py
+++ b/lib/spack/spack/cmd/edit.py
@@ -23,39 +23,26 @@
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
import os
-import string
import llnl.util.tty as tty
-from llnl.util.filesystem import mkdirp, join_path
+from llnl.util.filesystem import join_path
import spack
import spack.cmd
from spack.spec import Spec
from spack.repository import Repo
-from spack.util.naming import mod_to_class
description = "Open package files in $EDITOR"
-# When -f is supplied, we'll create a very minimal skeleton.
-package_template = string.Template("""\
-from spack import *
-class ${class_name}(Package):
- ""\"Description""\"
+def edit_package(name, repo_path, namespace):
+ """Opens the requested package file in your favorite $EDITOR.
- homepage = "http://www.example.com"
- url = "http://www.example.com/${name}-1.0.tar.gz"
-
- version('1.0', '0123456789abcdef0123456789abcdef')
-
- def install(self, spec, prefix):
- configure("--prefix=%s" % prefix)
- make()
- make("install")
-""")
-
-
-def edit_package(name, repo_path, namespace, force=False):
+ :param str name: The name of the package
+ :param str repo_path: The path to the repository containing this package
+ :param str namespace: A valid namespace registered with Spack
+ """
+ # Find the location of the package
if repo_path:
repo = Repo(repo_path)
elif namespace:
@@ -67,64 +54,63 @@ def edit_package(name, repo_path, namespace, force=False):
spec = Spec(name)
if os.path.exists(path):
if not os.path.isfile(path):
- tty.die("Something's wrong. '%s' is not a file!" % path)
- if not os.access(path, os.R_OK|os.W_OK):
+ tty.die("Something is wrong. '{0}' is not a file!".format(path))
+ if not os.access(path, os.R_OK | os.W_OK):
tty.die("Insufficient permissions on '%s'!" % path)
- elif not force:
- tty.die("No package '%s'. Use spack create, or supply -f/--force "
- "to edit a new file." % spec.name)
else:
- mkdirp(os.path.dirname(path))
- with open(path, "w") as pkg_file:
- pkg_file.write(
- package_template.substitute(
- name=spec.name, class_name=mod_to_class(spec.name)))
+ tty.die("No package for '{0}' was found.".format(spec.name),
+ " Use `spack create` to create a new package")
spack.editor(path)
def setup_parser(subparser):
- subparser.add_argument(
- '-f', '--force', dest='force', action='store_true',
- help="Open a new file in $EDITOR even if package doesn't exist.")
-
excl_args = subparser.add_mutually_exclusive_group()
- # Various filetypes you can edit directly from the cmd line.
+ # Various types of Spack files that can be edited
+ # Edits package files by default
excl_args.add_argument(
'-c', '--command', dest='path', action='store_const',
- const=spack.cmd.command_path, help="Edit the command with the supplied name.")
+ const=spack.cmd.command_path,
+ help="Edit the command with the supplied name.")
excl_args.add_argument(
'-t', '--test', dest='path', action='store_const',
- const=spack.test_path, help="Edit the test with the supplied name.")
+ const=spack.test_path,
+ help="Edit the test with the supplied name.")
excl_args.add_argument(
'-m', '--module', dest='path', action='store_const',
- const=spack.module_path, help="Edit the main spack module with the supplied name.")
+ const=spack.module_path,
+ help="Edit the main spack module with the supplied name.")
# Options for editing packages
excl_args.add_argument(
- '-r', '--repo', default=None, help="Path to repo to edit package in.")
+ '-r', '--repo', default=None,
+ help="Path to repo to edit package in.")
excl_args.add_argument(
- '-N', '--namespace', default=None, help="Namespace of package to edit.")
+ '-N', '--namespace', default=None,
+ help="Namespace of package to edit.")
subparser.add_argument(
- 'name', nargs='?', default=None, help="name of package to edit")
+ 'name', nargs='?', default=None,
+ help="name of package to edit")
def edit(parser, args):
name = args.name
+ # By default, edit package files
path = spack.packages_path
+
+ # If `--command`, `--test`, or `--module` is chosen, edit those instead
if args.path:
path = args.path
if name:
path = join_path(path, name + ".py")
- if not args.force and not os.path.exists(path):
- tty.die("No command named '%s'." % name)
+ if not os.path.exists(path):
+ tty.die("No command for '{0}' was found.".format(name))
spack.editor(path)
-
elif name:
- edit_package(name, args.repo, args.namespace, args.force)
+ edit_package(name, args.repo, args.namespace)
else:
- # By default open the directory where packages or commands live.
+ # By default open the directory where packages live
spack.editor(path)
diff --git a/lib/spack/spack/cmd/env.py b/lib/spack/spack/cmd/env.py
index 85d111e91e..f3bad039d4 100644
--- a/lib/spack/spack/cmd/env.py
+++ b/lib/spack/spack/cmd/env.py
@@ -28,11 +28,13 @@ import llnl.util.tty as tty
import spack.cmd
import spack.build_environment as build_env
-description = "Run a command with the environment for a particular spec's install."
+description = "Run a command with the install environment for a spec."
+
def setup_parser(subparser):
subparser.add_argument(
- 'spec', nargs=argparse.REMAINDER, help="specs of package environment to emulate.")
+ 'spec', nargs=argparse.REMAINDER,
+ help="specs of package environment to emulate.")
def env(parser, args):
@@ -47,7 +49,7 @@ def env(parser, args):
if sep in args.spec:
s = args.spec.index(sep)
spec = args.spec[:s]
- cmd = args.spec[s+1:]
+ cmd = args.spec[s + 1:]
else:
spec = args.spec[0]
cmd = args.spec[1:]
diff --git a/lib/spack/spack/cmd/extensions.py b/lib/spack/spack/cmd/extensions.py
index 11659e0c96..bd149044ca 100644
--- a/lib/spack/spack/cmd/extensions.py
+++ b/lib/spack/spack/cmd/extensions.py
@@ -22,7 +22,6 @@
# License along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
-import sys
import argparse
import llnl.util.tty as tty
@@ -31,9 +30,11 @@ from llnl.util.tty.colify import colify
import spack
import spack.cmd
import spack.cmd.find
+import spack.store
description = "List extensions for package."
+
def setup_parser(subparser):
format_group = subparser.add_mutually_exclusive_group()
format_group.add_argument(
@@ -47,7 +48,8 @@ def setup_parser(subparser):
help='Show full dependency DAG of extensions')
subparser.add_argument(
- 'spec', nargs=argparse.REMAINDER, help='Spec of package to list extensions for')
+ 'spec', nargs=argparse.REMAINDER,
+ help='Spec of package to list extensions for')
def extensions(parser, args):
@@ -85,7 +87,9 @@ def extensions(parser, args):
#
# List specs of installed extensions.
#
- installed = [s.spec for s in spack.installed_db.installed_extensions_for(spec)]
+ installed = [s.spec
+ for s in spack.store.db.installed_extensions_for(spec)]
+
print
if not installed:
tty.msg("None installed.")
@@ -96,10 +100,11 @@ def extensions(parser, args):
#
# List specs of activated extensions.
#
- activated = spack.install_layout.extension_map(spec)
+ activated = spack.store.layout.extension_map(spec)
print
if not activated:
tty.msg("None activated.")
return
tty.msg("%d currently activated:" % len(activated))
- spack.cmd.find.display_specs(activated.values(), mode=args.mode, long=args.long)
+ spack.cmd.find.display_specs(
+ activated.values(), mode=args.mode, long=args.long)
diff --git a/lib/spack/spack/cmd/fetch.py b/lib/spack/spack/cmd/fetch.py
index e40caaa234..c1ac2ed48d 100644
--- a/lib/spack/spack/cmd/fetch.py
+++ b/lib/spack/spack/cmd/fetch.py
@@ -29,16 +29,21 @@ import spack.cmd
description = "Fetch archives for packages"
+
def setup_parser(subparser):
subparser.add_argument(
'-n', '--no-checksum', action='store_true', dest='no_checksum',
help="Do not check packages against checksum")
subparser.add_argument(
- '-m', '--missing', action='store_true', help="Also fetch all missing dependencies")
+ '-m', '--missing', action='store_true',
+ help="Also fetch all missing dependencies")
subparser.add_argument(
- '-D', '--dependencies', action='store_true', help="Also fetch all dependencies")
+ '-D', '--dependencies', action='store_true',
+ help="Also fetch all dependencies")
subparser.add_argument(
- 'packages', nargs=argparse.REMAINDER, help="specs of packages to fetch")
+ 'packages', nargs=argparse.REMAINDER,
+ help="specs of packages to fetch")
+
def fetch(parser, args):
if not args.packages:
@@ -50,8 +55,7 @@ def fetch(parser, args):
specs = spack.cmd.parse_specs(args.packages, concretize=True)
for spec in specs:
if args.missing or args.dependencies:
- to_fetch = set()
- for s in spec.traverse():
+ for s in spec.traverse(deptype_query=spack.alldeps):
package = spack.repo.get(s)
if args.missing and package.installed:
continue
diff --git a/lib/spack/spack/cmd/find.py b/lib/spack/spack/cmd/find.py
index 9bcbf8d376..ecd6ae2822 100644
--- a/lib/spack/spack/cmd/find.py
+++ b/lib/spack/spack/cmd/find.py
@@ -23,144 +23,77 @@
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
import sys
-import collections
-import itertools
-import argparse
-from StringIO import StringIO
import llnl.util.tty as tty
-from llnl.util.tty.colify import *
-from llnl.util.tty.color import *
-from llnl.util.lang import *
+import spack.cmd.common.arguments as arguments
-import spack
-import spack.spec
+from spack.cmd import display_specs
+
+description = "Find installed spack packages"
-description ="Find installed spack packages"
def setup_parser(subparser):
format_group = subparser.add_mutually_exclusive_group()
+ format_group.add_argument('-s', '--short',
+ action='store_const',
+ dest='mode',
+ const='short',
+ default='short',
+ help='Show only specs (default)')
+ format_group.add_argument('-p', '--paths',
+ action='store_const',
+ dest='mode',
+ const='paths',
+ help='Show paths to package install directories')
format_group.add_argument(
- '-s', '--short', action='store_const', dest='mode', const='short',
- help='Show only specs (default)')
- format_group.add_argument(
- '-p', '--paths', action='store_const', dest='mode', const='paths',
- help='Show paths to package install directories')
- format_group.add_argument(
- '-d', '--deps', action='store_const', dest='mode', const='deps',
+ '-d', '--deps',
+ action='store_const',
+ dest='mode',
+ const='deps',
help='Show full dependency DAG of installed packages')
- subparser.add_argument(
- '-l', '--long', action='store_true',
- help='Show dependency hashes as well as versions.')
- subparser.add_argument(
- '-L', '--very-long', action='store_true',
- help='Show dependency hashes as well as versions.')
+ arguments.add_common_arguments(subparser, ['long', 'very_long'])
- subparser.add_argument(
- '-e', '--explicit', action='store_true',
+ subparser.add_argument('-f', '--show-flags',
+ action='store_true',
+ dest='show_flags',
+ help='Show spec compiler flags.')
+ implicit_explicit = subparser.add_mutually_exclusive_group()
+ implicit_explicit.add_argument(
+ '-e', '--explicit',
+ action='store_true',
help='Show only specs that were installed explicitly')
- subparser.add_argument(
- '-E', '--implicit', action='store_true',
+ implicit_explicit.add_argument(
+ '-E', '--implicit',
+ action='store_true',
help='Show only specs that were installed as dependencies')
subparser.add_argument(
- '-u', '--unknown', action='store_true',
+ '-u', '--unknown',
+ action='store_true',
+ dest='unknown',
help='Show only specs Spack does not have a package for.')
subparser.add_argument(
- '-m', '--missing', action='store_true',
+ '-m', '--missing',
+ action='store_true',
+ dest='missing',
help='Show missing dependencies as well as installed specs.')
subparser.add_argument(
- '-M', '--only-missing', action='store_true',
- help='Show only missing dependencies.')
- subparser.add_argument(
- '-N', '--namespace', action='store_true',
- help='Show fully qualified package names.')
-
- subparser.add_argument(
- 'query_specs', nargs=argparse.REMAINDER,
- help='optional specs to filter results')
-
-
-def gray_hash(spec, length):
- return colorize('@K{%s}' % spec.dag_hash(length))
-
-
-def display_specs(specs, **kwargs):
- mode = kwargs.get('mode', 'short')
- hashes = kwargs.get('long', False)
- namespace = kwargs.get('namespace', False)
-
- hlen = 7
- if kwargs.get('very_long', False):
- hashes = True
- hlen = None
-
- # Make a dict with specs keyed by architecture and compiler.
- index = index_by(specs, ('architecture', 'compiler'))
-
- # Traverse the index and print out each package
- for i, (architecture, compiler) in enumerate(sorted(index)):
- if i > 0: print
-
- header = "%s{%s} / %s{%s}" % (
- spack.spec.architecture_color, architecture,
- spack.spec.compiler_color, compiler)
- tty.hline(colorize(header), char='-')
-
- specs = index[(architecture,compiler)]
- specs.sort()
-
- nfmt = '.' if namespace else '_'
- abbreviated = [s.format('$%s$@$+' % nfmt, color=True) for s in specs]
- if mode == 'paths':
- # Print one spec per line along with prefix path
- width = max(len(s) for s in abbreviated)
- width += 2
- format = " %%-%ds%%s" % width
-
- for abbrv, spec in zip(abbreviated, specs):
- if hashes:
- print gray_hash(spec, hlen),
- print format % (abbrv, spec.prefix)
-
- elif mode == 'deps':
- for spec in specs:
- print spec.tree(
- format='$%s$@$+' % nfmt,
- color=True,
- indent=4,
- prefix=(lambda s: gray_hash(s, hlen)) if hashes else None)
-
- elif mode == 'short':
- def fmt(s):
- string = ""
- if hashes:
- string += gray_hash(s, hlen) + ' '
- string += s.format('$-%s$@$+' % nfmt, color=True)
-
- return string
- colify(fmt(s) for s in specs)
-
- else:
- raise ValueError(
- "Invalid mode for display_specs: %s. Must be one of (paths, deps, short)." % mode)
-
-
-
-def find(parser, args):
- # Filter out specs that don't exist.
- query_specs = spack.cmd.parse_specs(args.query_specs)
- query_specs, nonexisting = partition_list(
- query_specs, lambda s: spack.repo.exists(s.name))
-
- if nonexisting:
- msg = "No such package%s: " % ('s' if len(nonexisting) > 1 else '')
- msg += ", ".join(s.name for s in nonexisting)
- tty.msg(msg)
-
- if not query_specs:
- return
-
+ '-v', '--variants',
+ action='store_true',
+ dest='variants',
+ help='Show variants in output (can be long)')
+ subparser.add_argument('-M', '--only-missing',
+ action='store_true',
+ dest='only_missing',
+ help='Show only missing dependencies.')
+ subparser.add_argument('-N', '--namespace',
+ action='store_true',
+ help='Show fully qualified package names.')
+
+ arguments.add_common_arguments(subparser, ['constraint'])
+
+
+def query_arguments(args):
# Set up query arguments.
installed, known = True, any
if args.only_missing:
@@ -169,28 +102,34 @@ def find(parser, args):
installed = any
if args.unknown:
known = False
-
explicit = any
if args.explicit:
- explicit = False
- if args.implicit:
explicit = True
+ if args.implicit:
+ explicit = False
+ q_args = {'installed': installed, 'known': known, "explicit": explicit}
+ return q_args
- q_args = { 'installed' : installed, 'known' : known, "explicit" : explicit }
- # Get all the specs the user asked for
- if not query_specs:
- specs = set(spack.installed_db.query(**q_args))
- else:
- results = [set(spack.installed_db.query(qs, **q_args)) for qs in query_specs]
- specs = set.union(*results)
+def find(parser, args):
+ q_args = query_arguments(args)
+ query_specs = args.specs(**q_args)
- if not args.mode:
- args.mode = 'short'
+ # Exit early if no package matches the constraint
+ if not query_specs and args.constraint:
+ msg = "No package matches the query: {0}".format(
+ ' '.join(args.constraint))
+ tty.msg(msg)
+ return
+ # Display the result
if sys.stdout.isatty():
- tty.msg("%d installed packages." % len(specs))
- display_specs(specs, mode=args.mode,
+ tty.msg("%d installed packages." % len(query_specs))
+
+ display_specs(query_specs,
+ mode=args.mode,
long=args.long,
very_long=args.very_long,
- namespace=args.namespace)
+ show_flags=args.show_flags,
+ namespace=args.namespace,
+ variants=args.variants)
diff --git a/lib/spack/spack/cmd/flake8.py b/lib/spack/spack/cmd/flake8.py
new file mode 100644
index 0000000000..b8e28b0860
--- /dev/null
+++ b/lib/spack/spack/cmd/flake8.py
@@ -0,0 +1,218 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+import re
+import os
+import sys
+import shutil
+import tempfile
+import argparse
+
+from llnl.util.filesystem import *
+
+import spack
+from spack.util.executable import *
+
+description = "Runs source code style checks on Spack. Requires flake8."
+flake8 = None
+include_untracked = True
+
+"""List of directories to exclude from checks."""
+exclude_directories = [spack.external_path]
+
+"""
+This is a dict that maps:
+ filename pattern ->
+ a flake8 exemption code ->
+ list of patterns, for which matching lines should have codes applied.
+"""
+exemptions = {
+ # exemptions applied only to package.py files.
+ r'package.py$': {
+ # Exempt lines with urls and descriptions from overlong line errors.
+ 501: [r'^\s*homepage\s*=',
+ r'^\s*url\s*=',
+ r'^\s*git\s*=',
+ r'^\s*svn\s*=',
+ r'^\s*hg\s*=',
+ r'^\s*version\(.*\)',
+ r'^\s*variant\(.*\)',
+ r'^\s*depends_on\(.*\)',
+ r'^\s*extends\(.*\)'],
+ # Exempt '@when' decorated functions from redefinition errors.
+ 811: [r'^\s*\@when\(.*\)'],
+ },
+
+ # exemptions applied to all files.
+ r'.py$': {
+ # Exempt lines with URLs from overlong line errors.
+ 501: [r'(https?|file)\:']
+ },
+}
+
+# compile all regular expressions.
+exemptions = dict((re.compile(file_pattern),
+ dict((code, [re.compile(p) for p in patterns])
+ for code, patterns in error_dict.items()))
+ for file_pattern, error_dict in exemptions.items())
+
+
+def changed_files():
+ """Get list of changed files in the Spack repository."""
+
+ git = which('git', required=True)
+
+ git_args = [
+ # Add changed files committed since branching off of develop
+ ['diff', '--name-only', '--diff-filter=ACMR', 'develop'],
+ # Add changed files that have been staged but not yet committed
+ ['diff', '--name-only', '--diff-filter=ACMR', '--cached'],
+ # Add changed files that are unstaged
+ ['diff', '--name-only', '--diff-filter=ACMR']]
+
+ # Add new files that are untracked
+ if include_untracked:
+ git_args.append(['ls-files', '--exclude-standard', '--other'])
+
+ excludes = [os.path.realpath(f) for f in exclude_directories]
+ changed = set()
+ for git_arg_list in git_args:
+ arg_list = git_arg_list + ['--', '*.py']
+
+ files = [f for f in git(*arg_list, output=str).split('\n') if f]
+ for f in files:
+ # don't look at files that are in the exclude locations
+ if any(os.path.realpath(f).startswith(e) for e in excludes):
+ continue
+ changed.add(f)
+ return sorted(changed)
+
+
+def filter_file(source, dest, output=False):
+ """Filter a single file through all the patterns in exemptions."""
+ with open(source) as infile:
+ parent = os.path.dirname(dest)
+ mkdirp(parent)
+
+ with open(dest, 'w') as outfile:
+ for line in infile:
+ line = line.rstrip()
+
+ for file_pattern, errors in exemptions.items():
+ if not file_pattern.search(source):
+ continue
+
+ for code, patterns in errors.items():
+ for pattern in patterns:
+ if pattern.search(line):
+ line += (" # NOQA: ignore=%d" % code)
+ break
+
+ oline = line + '\n'
+ outfile.write(oline)
+ if output:
+ sys.stdout.write(oline)
+
+
+def setup_parser(subparser):
+ subparser.add_argument(
+ '-k', '--keep-temp', action='store_true',
+ help="Do not delete temporary directory where flake8 runs. "
+ "Use for debugging, to see filtered files.")
+ subparser.add_argument(
+ '-o', '--output', action='store_true',
+ help="Send filtered files to stdout as well as temp files.")
+ subparser.add_argument(
+ '-r', '--root-relative', action='store_true', default=False,
+ help="print root-relative paths (default is cwd-relative)")
+ subparser.add_argument(
+ '-U', '--no-untracked', dest='untracked', action='store_false',
+ default=True, help="Exclude untracked files from checks.")
+ subparser.add_argument(
+ 'files', nargs=argparse.REMAINDER, help="specific files to check")
+
+
+def flake8(parser, args):
+ # Just use this to check for flake8 -- we actually execute it with Popen.
+ global flake8, include_untracked
+ flake8 = which('flake8', required=True)
+ include_untracked = args.untracked
+
+ temp = tempfile.mkdtemp()
+ try:
+ file_list = args.files
+ if file_list:
+ def prefix_relative(path):
+ return os.path.relpath(
+ os.path.abspath(os.path.realpath(path)), spack.prefix)
+
+ file_list = [prefix_relative(p) for p in file_list]
+
+ with working_dir(spack.prefix):
+ if not file_list:
+ file_list = changed_files()
+ shutil.copy('.flake8', os.path.join(temp, '.flake8'))
+
+ print '======================================================='
+ print 'flake8: running flake8 code checks on spack.'
+ print
+ print 'Modified files:'
+ for filename in file_list:
+ print " %s" % filename.strip()
+ print('=======================================================')
+
+ # filter files into a temporary directory with exemptions added.
+ for filename in file_list:
+ src_path = os.path.join(spack.prefix, filename)
+ dest_path = os.path.join(temp, filename)
+ filter_file(src_path, dest_path, args.output)
+
+ # run flake8 on the temporary tree.
+ with working_dir(temp):
+ output = flake8('--format', 'pylint', *file_list,
+ fail_on_error=False, output=str)
+
+ if args.root_relative:
+ # print results relative to repo root.
+ print output
+ else:
+ # print results relative to current working directory
+ def cwd_relative(path):
+ return '%s: [' % os.path.relpath(
+ os.path.join(spack.prefix, path.group(1)), os.getcwd())
+
+ for line in output.split('\n'):
+ print re.sub(r'^(.*): \[', cwd_relative, line)
+
+ if flake8.returncode != 0:
+ print "Flake8 found errors."
+ sys.exit(1)
+ else:
+ print "Flake8 checks were clean."
+
+ finally:
+ if args.keep_temp:
+ print "temporary files are in ", temp
+ else:
+ shutil.rmtree(temp, ignore_errors=True)
diff --git a/lib/spack/spack/cmd/graph.py b/lib/spack/spack/cmd/graph.py
index da65121836..6a268e6961 100644
--- a/lib/spack/spack/cmd/graph.py
+++ b/lib/spack/spack/cmd/graph.py
@@ -24,43 +24,77 @@
##############################################################################
import argparse
+import llnl.util.tty as tty
+
import spack
import spack.cmd
+import spack.store
+from spack.spec import *
from spack.graph import *
description = "Generate graphs of package dependency relationships."
+
def setup_parser(subparser):
setup_parser.parser = subparser
method = subparser.add_mutually_exclusive_group()
method.add_argument(
- '--ascii', action='store_true',
+ '-a', '--ascii', action='store_true',
help="Draw graph as ascii to stdout (default).")
method.add_argument(
- '--dot', action='store_true',
+ '-d', '--dot', action='store_true',
help="Generate graph in dot format and print to stdout.")
subparser.add_argument(
- '--concretize', action='store_true', help="Concretize specs before graphing.")
+ '-n', '--normalize', action='store_true',
+ help="Skip concretization; only print normalized spec.")
+
+ subparser.add_argument(
+ '-s', '--static', action='store_true',
+ help="Use static information from packages, not dynamic spec info.")
+
+ subparser.add_argument(
+ '-i', '--installed', action='store_true',
+ help="Graph all installed specs in dot format (implies --dot).")
+
+ subparser.add_argument(
+ '-t', '--deptype', action='store',
+ help="Comma-separated list of deptypes to traverse. default=%s."
+ % ','.join(alldeps))
subparser.add_argument(
- 'specs', nargs=argparse.REMAINDER, help="specs of packages to graph.")
+ 'specs', nargs=argparse.REMAINDER,
+ help="specs of packages to graph.")
def graph(parser, args):
- specs = spack.cmd.parse_specs(
- args.specs, normalize=True, concretize=args.concretize)
+ concretize = not args.normalize
+ if args.installed:
+ if args.specs:
+ tty.die("Can't specify specs with --installed")
+ args.dot = True
+ specs = spack.store.db.query()
+
+ else:
+ specs = spack.cmd.parse_specs(
+ args.specs, normalize=True, concretize=concretize)
if not specs:
setup_parser.parser.print_help()
return 1
- if args.dot: # Dot graph only if asked for.
- graph_dot(*specs)
+ deptype = alldeps
+ if args.deptype:
+ deptype = tuple(args.deptype.split(','))
+ validate_deptype(deptype)
+ deptype = canonical_deptype(deptype)
+
+ if args.dot: # Dot graph only if asked for.
+ graph_dot(specs, static=args.static, deptype=deptype)
- elif specs: # ascii is default: user doesn't need to provide it explicitly
- graph_ascii(specs[0], debug=spack.debug)
+ elif specs: # ascii is default: user doesn't need to provide it explicitly
+ graph_ascii(specs[0], debug=spack.debug, deptype=deptype)
for spec in specs[1:]:
- print # extra line bt/w independent graphs
+ print # extra line bt/w independent graphs
graph_ascii(spec, debug=spack.debug)
diff --git a/lib/spack/spack/cmd/help.py b/lib/spack/spack/cmd/help.py
index 1d23161839..5bc8fc3e74 100644
--- a/lib/spack/spack/cmd/help.py
+++ b/lib/spack/spack/cmd/help.py
@@ -22,14 +22,14 @@
# License along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
-import sys
-
description = "Get help on spack and its commands"
+
def setup_parser(subparser):
subparser.add_argument('help_command', nargs='?', default=None,
help='command to get help on')
+
def help(parser, args):
if args.help_command:
parser.parse_args([args.help_command, '-h'])
diff --git a/lib/spack/spack/cmd/info.py b/lib/spack/spack/cmd/info.py
index 64d0d20e24..8e7df87a02 100644
--- a/lib/spack/spack/cmd/info.py
+++ b/lib/spack/spack/cmd/info.py
@@ -29,9 +29,11 @@ import spack.fetch_strategy as fs
description = "Get detailed information on a particular package"
+
def padder(str_list, extra=0):
"""Return a function to pad elements of a list."""
length = max(len(str(s)) for s in str_list) + extra
+
def pad(string):
string = str(string)
padding = max(0, length - len(string))
@@ -40,13 +42,17 @@ def padder(str_list, extra=0):
def setup_parser(subparser):
- subparser.add_argument('name', metavar="PACKAGE", help="Name of package to get info for.")
+ subparser.add_argument(
+ 'name', metavar="PACKAGE", help="Name of package to get info for.")
def print_text_info(pkg):
"""Print out a plain text description of a package."""
- print "Package: ", pkg.name
- print "Homepage: ", pkg.homepage
+ header = "{0}: ".format(pkg.build_system_class)
+
+ print header, pkg.name
+ whitespaces = ''.join([' '] * (len(header) - len("Homepage: ")))
+ print "Homepage:", whitespaces, pkg.homepage
print
print "Safe versions: "
@@ -82,17 +88,33 @@ def print_text_info(pkg):
print " " + fmt % (name, default, desc)
print
- print "Dependencies:"
- if pkg.dependencies:
- colify(pkg.dependencies, indent=4)
- else:
- print " None"
+ print "Installation Phases:"
+ phase_str = ''
+ for phase in pkg.phases:
+ phase_str += " {0}".format(phase)
+ print phase_str
+
+ for deptype in ('build', 'link', 'run'):
+ print
+ print "%s Dependencies:" % deptype.capitalize()
+ deps = sorted(pkg.dependencies_of_type(deptype))
+ if deps:
+ colify(deps, indent=4)
+ else:
+ print " None"
print
- print "Virtual packages: "
+ print "Virtual Packages: "
if pkg.provided:
- for spec, when in pkg.provided.items():
- print " %s provides %s" % (when, spec)
+ inverse_map = {}
+ for spec, whens in pkg.provided.items():
+ for when in whens:
+ if when not in inverse_map:
+ inverse_map[when] = set()
+ inverse_map[when].add(spec)
+ for when, specs in reversed(sorted(inverse_map.items())):
+ print " %s provides %s" % (
+ when, ', '.join(str(s) for s in specs))
else:
print " None"
diff --git a/lib/spack/spack/cmd/install.py b/lib/spack/spack/cmd/install.py
index 9d3175786b..3731fe3c81 100644
--- a/lib/spack/spack/cmd/install.py
+++ b/lib/spack/spack/cmd/install.py
@@ -23,18 +23,37 @@
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
import argparse
+import codecs
+import functools
+import os
+import platform
+import time
+import xml.dom.minidom
+import xml.etree.ElementTree as ET
+import llnl.util.filesystem as fs
import llnl.util.tty as tty
-
import spack
import spack.cmd
+import spack.cmd.common.arguments as arguments
+from spack.build_environment import InstallError
+from spack.fetch_strategy import FetchError
+from spack.package import PackageBase
description = "Build and install packages"
+
def setup_parser(subparser):
subparser.add_argument(
- '-i', '--ignore-dependencies', action='store_true', dest='ignore_deps',
- help="Do not try to install dependencies of requested packages.")
+ '--only',
+ default='package,dependencies',
+ dest='things_to_install',
+ choices=['package', 'dependencies'],
+ help="""Select the mode of installation.
+The default is to install the package along with all its dependencies.
+Alternatively one can decide to install only the package or only
+the dependencies."""
+ )
subparser.add_argument(
'-j', '--jobs', action='store', type=int,
help="Explicitly set number of make jobs. Default is #cpus.")
@@ -52,13 +71,226 @@ def setup_parser(subparser):
help="Display verbose build output while installing.")
subparser.add_argument(
'--fake', action='store_true', dest='fake',
- help="Fake install. Just remove the prefix and touch a fake file in it.")
+ help="Fake install. Just remove prefix and create a fake file.")
+
+ cd_group = subparser.add_mutually_exclusive_group()
+ arguments.add_common_arguments(cd_group, ['clean', 'dirty'])
+
+ subparser.add_argument(
+ 'package',
+ nargs=argparse.REMAINDER,
+ help="spec of the package to install"
+ )
subparser.add_argument(
- 'packages', nargs=argparse.REMAINDER, help="specs of packages to install")
+ '--run-tests', action='store_true', dest='run_tests',
+ help="Run package level tests during installation."
+ )
+ subparser.add_argument(
+ '--log-format',
+ default=None,
+ choices=['junit'],
+ help="Format to be used for log files."
+ )
+ subparser.add_argument(
+ '--log-file',
+ default=None,
+ help="Filename for the log file. If not passed a default will be used."
+ )
+
+
+# Needed for test cases
+class TestResult(object):
+ PASSED = 0
+ FAILED = 1
+ SKIPPED = 2
+ ERRORED = 3
+
+
+class TestSuite(object):
+ def __init__(self, spec):
+ self.root = ET.Element('testsuite')
+ self.tests = []
+ self.spec = spec
+
+ def append(self, item):
+ if not isinstance(item, TestCase):
+ raise TypeError(
+ 'only TestCase instances may be appended to TestSuite'
+ )
+ self.tests.append(item) # Append the item to the list of tests
+
+ def dump(self, filename):
+ # Prepare the header for the entire test suite
+ number_of_errors = sum(
+ x.result_type == TestResult.ERRORED for x in self.tests
+ )
+ self.root.set('errors', str(number_of_errors))
+ number_of_failures = sum(
+ x.result_type == TestResult.FAILED for x in self.tests
+ )
+ self.root.set('failures', str(number_of_failures))
+ self.root.set('tests', str(len(self.tests)))
+ self.root.set('name', self.spec.short_spec)
+ self.root.set('hostname', platform.node())
+
+ for item in self.tests:
+ self.root.append(item.element)
+
+ with codecs.open(filename, 'wb', 'utf-8') as file:
+ xml_string = ET.tostring(self.root)
+ xml_string = xml.dom.minidom.parseString(xml_string).toprettyxml()
+ file.write(xml_string)
+
+
+class TestCase(object):
+
+ results = {
+ TestResult.PASSED: None,
+ TestResult.SKIPPED: 'skipped',
+ TestResult.FAILED: 'failure',
+ TestResult.ERRORED: 'error',
+ }
+ def __init__(self, classname, name):
+ self.element = ET.Element('testcase')
+ self.element.set('classname', str(classname))
+ self.element.set('name', str(name))
+ self.result_type = None
-def install(parser, args):
- if not args.packages:
+ def set_duration(self, duration):
+ self.element.set('time', str(duration))
+
+ def set_result(self, result_type,
+ message=None, error_type=None, text=None):
+ self.result_type = result_type
+ result = TestCase.results[self.result_type]
+ if result is not None and result is not TestResult.PASSED:
+ subelement = ET.SubElement(self.element, result)
+ if error_type is not None:
+ subelement.set('type', error_type)
+ if message is not None:
+ subelement.set('message', str(message))
+ if text is not None:
+ subelement.text = text
+
+
+def fetch_text(path):
+ if not os.path.exists(path):
+ return ''
+
+ with codecs.open(path, 'rb', 'utf-8') as f:
+ return '\n'.join(
+ list(line.strip() for line in f.readlines())
+ )
+
+
+def junit_output(spec, test_suite):
+ # Cycle once and for all on the dependencies and skip
+ # the ones that are already installed. This ensures that
+ # for the same spec, the same number of entries will be
+ # displayed in the XML report
+ for x in spec.traverse(order='post'):
+ package = spack.repo.get(x)
+ if package.installed:
+ test_case = TestCase(package.name, x.short_spec)
+ test_case.set_duration(0.0)
+ test_case.set_result(
+ TestResult.SKIPPED,
+ message='Skipped [already installed]',
+ error_type='already_installed'
+ )
+ test_suite.append(test_case)
+
+ def decorator(func):
+ @functools.wraps(func)
+ def wrapper(self, *args, ** kwargs):
+
+ # Check if the package has been installed already
+ if self.installed:
+ return
+
+ test_case = TestCase(self.name, self.spec.short_spec)
+ # Try to install the package
+ try:
+ # If already installed set the spec as skipped
+ start_time = time.time()
+ # PackageBase.do_install
+ func(self, *args, **kwargs)
+ duration = time.time() - start_time
+ test_case.set_duration(duration)
+ test_case.set_result(TestResult.PASSED)
+ except InstallError:
+ # Check if the package relies on dependencies that
+ # did not install
+ duration = time.time() - start_time
+ test_case.set_duration(duration)
+ if [x for x in self.spec.dependencies(('link', 'run')) if not spack.repo.get(x).installed]: # NOQA: ignore=E501
+ test_case.set_duration(0.0)
+ test_case.set_result(
+ TestResult.SKIPPED,
+ message='Skipped [failed dependencies]',
+ error_type='dep_failed'
+ )
+ else:
+ # An InstallError is considered a failure (the recipe
+ # didn't work correctly)
+ text = fetch_text(self.build_log_path)
+ test_case.set_result(
+ TestResult.FAILED,
+ message='Installation failure',
+ text=text
+ )
+ except FetchError:
+ # A FetchError is considered an error as
+ # we didn't even start building
+ duration = time.time() - start_time
+ test_case.set_duration(duration)
+ text = fetch_text(self.build_log_path)
+ test_case.set_result(
+ TestResult.FAILED,
+ message='Unable to fetch package',
+ text=text
+ )
+ except Exception:
+ # Anything else is also an error
+ duration = time.time() - start_time
+ test_case.set_duration(duration)
+ text = fetch_text(self.build_log_path)
+ test_case.set_result(
+ TestResult.FAILED,
+ message='Unexpected exception thrown during install',
+ text=text
+ )
+ except:
+ # Anything else is also an error
+ duration = time.time() - start_time
+ test_case.set_duration(duration)
+ text = fetch_text(self.build_log_path)
+ test_case.set_result(
+ TestResult.FAILED,
+ message='Unknown error',
+ text=text
+ )
+
+ # Try to get the log
+ test_suite.append(test_case)
+ return wrapper
+ return decorator
+
+
+def default_log_file(spec):
+ """Computes the default filename for the log file and creates
+ the corresponding directory if not present
+ """
+ fmt = 'test-{x.name}-{x.version}-{hash}.xml'
+ basename = fmt.format(x=spec, hash=spec.dag_hash())
+ dirname = fs.join_path(spack.var_path, 'junit-report')
+ fs.mkdirp(dirname)
+ return fs.join_path(dirname, basename)
+
+
+def install(parser, args, **kwargs):
+ if not args.package:
tty.die("install requires at least one package argument")
if args.jobs is not None:
@@ -68,15 +300,51 @@ def install(parser, args):
if args.no_checksum:
spack.do_checksum = False # TODO: remove this global.
- specs = spack.cmd.parse_specs(args.packages, concretize=True)
+ # Parse cli arguments and construct a dictionary
+ # that will be passed to Package.do_install API
+ kwargs.update({
+ 'keep_prefix': args.keep_prefix,
+ 'keep_stage': args.keep_stage,
+ 'install_deps': 'dependencies' in args.things_to_install,
+ 'make_jobs': args.jobs,
+ 'run_tests': args.run_tests,
+ 'verbose': args.verbose,
+ 'fake': args.fake,
+ 'dirty': args.dirty
+ })
+
+ # Spec from cli
+ specs = spack.cmd.parse_specs(args.package, concretize=True)
+ if len(specs) == 0:
+ tty.error('The `spack install` command requires a spec to install.')
+
for spec in specs:
- package = spack.repo.get(spec)
- with spack.installed_db.write_transaction():
- package.do_install(
- keep_prefix=args.keep_prefix,
- keep_stage=args.keep_stage,
- ignore_deps=args.ignore_deps,
- make_jobs=args.jobs,
- verbose=args.verbose,
- fake=args.fake,
- explicit=True)
+ # Check if we were asked to produce some log for dashboards
+ if args.log_format is not None:
+ # Compute the filename for logging
+ log_filename = args.log_file
+ if not log_filename:
+ log_filename = default_log_file(spec)
+ # Create the test suite in which to log results
+ test_suite = TestSuite(spec)
+ # Decorate PackageBase.do_install to get installation status
+ PackageBase.do_install = junit_output(
+ spec, test_suite
+ )(PackageBase.do_install)
+
+ # Do the actual installation
+ if args.things_to_install == 'dependencies':
+ # Install dependencies as-if they were installed
+ # for root (explicit=False in the DB)
+ kwargs['explicit'] = False
+ for s in spec.dependencies():
+ p = spack.repo.get(s)
+ p.do_install(**kwargs)
+ else:
+ package = spack.repo.get(spec)
+ kwargs['explicit'] = True
+ package.do_install(**kwargs)
+
+ # Dump log file if asked to
+ if args.log_format is not None:
+ test_suite.dump(log_filename)
diff --git a/lib/spack/spack/cmd/list.py b/lib/spack/spack/cmd/list.py
index 1e3699cee0..e1389df69f 100644
--- a/lib/spack/spack/cmd/list.py
+++ b/lib/spack/spack/cmd/list.py
@@ -22,43 +22,161 @@
# License along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
+import argparse
+import cgi
+import fnmatch
+import re
import sys
+from StringIO import StringIO
+
import llnl.util.tty as tty
-import argparse
+import spack
from llnl.util.tty.colify import colify
-import spack
-import fnmatch
+description = "Print available spack packages to stdout in different formats"
+
+formatters = {}
+
+
+def formatter(func):
+ """Decorator used to register formatters"""
+ formatters[func.__name__] = func
+ return func
-description ="List available spack packages"
def setup_parser(subparser):
subparser.add_argument(
'filter', nargs=argparse.REMAINDER,
- help='Optional glob patterns to filter results.')
+ help='Optional case-insensitive glob patterns to filter results.')
subparser.add_argument(
- '-i', '--insensitive', action='store_true', default=False,
- help='Filtering will be case insensitive.')
+ '-d', '--search-description', action='store_true', default=False,
+ help='Filtering will also search the description for a match.')
+ subparser.add_argument(
+ '--format', default='name_only', choices=formatters,
+ help='Format to be used to print the output [default: name_only]')
-def list(parser, args):
- # Start with all package names.
- pkgs = spack.repo.all_package_names()
+def filter_by_name(pkgs, args):
+ """
+ Filters the sequence of packages according to user prescriptions
- # filter if a filter arg was provided
+ Args:
+ pkgs: sequence of packages
+ args: parsed command line arguments
+
+ Returns:
+ filtered and sorted list of packages
+ """
if args.filter:
- def match(p, f):
- if args.insensitive:
- p = p.lower()
- f = f.lower()
- return fnmatch.fnmatchcase(p, f)
- pkgs = [p for p in pkgs if any(match(p, f) for f in args.filter)]
-
- # sort before displaying.
- sorted_packages = sorted(pkgs, key=lambda s:s.lower())
-
- # Print all the package names in columns
- indent=0
+ res = []
+ for f in args.filter:
+ if '*' not in f and '?' not in f:
+ r = fnmatch.translate('*' + f + '*')
+ else:
+ r = fnmatch.translate(f)
+
+ rc = re.compile(r, flags=re.IGNORECASE)
+ res.append(rc)
+
+ if args.search_description:
+ def match(p, f):
+ if f.match(p):
+ return True
+
+ pkg = spack.repo.get(p)
+ if pkg.__doc__:
+ return f.match(pkg.__doc__)
+ return False
+ else:
+ def match(p, f):
+ return f.match(p)
+ pkgs = [p for p in pkgs if any(match(p, f) for f in res)]
+
+ return sorted(pkgs, key=lambda s: s.lower())
+
+
+@formatter
+def name_only(pkgs):
+ indent = 0
if sys.stdout.isatty():
- tty.msg("%d packages." % len(sorted_packages))
- colify(sorted_packages, indent=indent)
+ tty.msg("%d packages." % len(pkgs))
+ colify(pkgs, indent=indent)
+
+
+@formatter
+def rst(pkgs):
+ """Print out information on all packages in restructured text."""
+
+ def github_url(pkg):
+ """Link to a package file on github."""
+ url = 'https://github.com/LLNL/spack/blob/develop/var/spack/repos/builtin/packages/{0}/package.py'
+ return url.format(pkg.name)
+
+ def rst_table(elts):
+ """Print out a RST-style table."""
+ cols = StringIO()
+ ncol, widths = colify(elts, output=cols, tty=True)
+ header = ' '.join('=' * (w - 1) for w in widths)
+ return '%s\n%s%s' % (header, cols.getvalue(), header)
+
+ pkg_names = pkgs
+ pkgs = [spack.repo.get(name) for name in pkg_names]
+
+ print('.. _package-list:')
+ print('')
+ print('============')
+ print('Package List')
+ print('============')
+ print('')
+ print('This is a list of things you can install using Spack. It is')
+ print('automatically generated based on the packages in the latest Spack')
+ print('release.')
+ print('')
+ print('Spack currently has %d mainline packages:' % len(pkgs))
+ print('')
+ print(rst_table('`%s`_' % p for p in pkg_names))
+ print('')
+
+ # Output some text for each package.
+ for pkg in pkgs:
+ print('-----')
+ print('')
+ print('.. _%s:' % pkg.name)
+ print('')
+ # Must be at least 2 long, breaks for single letter packages like R.
+ print('-' * max(len(pkg.name), 2))
+ print(pkg.name)
+ print('-' * max(len(pkg.name), 2))
+ print('')
+ print('Homepage:')
+ print(' * `%s <%s>`__' % (cgi.escape(pkg.homepage), pkg.homepage))
+ print('')
+ print('Spack package:')
+ print(' * `%s/package.py <%s>`__' % (pkg.name, github_url(pkg)))
+ print('')
+ if pkg.versions:
+ print('Versions:')
+ print(' ' + ', '.join(str(v) for v in
+ reversed(sorted(pkg.versions))))
+ print('')
+
+ for deptype in spack.alldeps:
+ deps = pkg.dependencies_of_type(deptype)
+ if deps:
+ print('%s Dependencies' % deptype.capitalize())
+ print(' ' + ', '.join('%s_' % d if d in pkg_names
+ else d for d in deps))
+ print('')
+
+ print('Description:')
+ print(pkg.format_doc(indent=2))
+ print('')
+
+
+def list(parser, args):
+ # Retrieve the names of all the packages
+ pkgs = set(spack.repo.all_package_names())
+ # Filter the set appropriately
+ sorted_packages = filter_by_name(pkgs, args)
+ # Print to stdout
+ formatters[args.format](sorted_packages)
diff --git a/lib/spack/spack/cmd/load.py b/lib/spack/spack/cmd/load.py
index 54cf01eb43..85190a5d0b 100644
--- a/lib/spack/spack/cmd/load.py
+++ b/lib/spack/spack/cmd/load.py
@@ -25,13 +25,16 @@
import argparse
import spack.modules
-description ="Add package to environment using modules."
+description = "Add package to environment using modules."
+
def setup_parser(subparser):
"""Parser is only constructed so that this prints a nice help
message with -h. """
subparser.add_argument(
- 'spec', nargs=argparse.REMAINDER, help='Spec of package to load with modules.')
+ 'spec', nargs=argparse.REMAINDER,
+ help="Spec of package to load with modules. "
+ "(If -, read specs from STDIN)")
def load(parser, args):
diff --git a/lib/spack/spack/cmd/location.py b/lib/spack/spack/cmd/location.py
index b0dbb1a550..54f7185707 100644
--- a/lib/spack/spack/cmd/location.py
+++ b/lib/spack/spack/cmd/location.py
@@ -22,26 +22,26 @@
# License along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
-import os
-import sys
import argparse
import llnl.util.tty as tty
-from llnl.util.filesystem import join_path
import spack
import spack.cmd
-description="Print out locations of various directories used by Spack"
+description = "Print out locations of various directories used by Spack"
+
def setup_parser(subparser):
global directories
directories = subparser.add_mutually_exclusive_group()
directories.add_argument(
- '-m', '--module-dir', action='store_true', help="Spack python module directory.")
+ '-m', '--module-dir', action='store_true',
+ help="Spack python module directory.")
directories.add_argument(
- '-r', '--spack-root', action='store_true', help="Spack installation root.")
+ '-r', '--spack-root', action='store_true',
+ help="Spack installation root.")
directories.add_argument(
'-i', '--install-dir', action='store_true',
@@ -53,15 +53,19 @@ def setup_parser(subparser):
'-P', '--packages', action='store_true',
help="Top-level packages directory for Spack.")
directories.add_argument(
- '-s', '--stage-dir', action='store_true', help="Stage directory for a spec.")
+ '-s', '--stage-dir', action='store_true',
+ help="Stage directory for a spec.")
directories.add_argument(
- '-S', '--stages', action='store_true', help="Top level Stage directory.")
+ '-S', '--stages', action='store_true',
+ help="Top level Stage directory.")
directories.add_argument(
'-b', '--build-dir', action='store_true',
- help="Checked out or expanded source directory for a spec (requires it to be staged first).")
+ help="Checked out or expanded source directory for a spec "
+ "(requires it to be staged first).")
subparser.add_argument(
- 'spec', nargs=argparse.REMAINDER, help="spec of package to fetch directory for.")
+ 'spec', nargs=argparse.REMAINDER,
+ help="spec of package to fetch directory for.")
def location(parser, args):
@@ -72,7 +76,7 @@ def location(parser, args):
print spack.prefix
elif args.packages:
- print spack.repo.root
+ print spack.repo.first_repo().root
elif args.stages:
print spack.stage_path
@@ -94,7 +98,7 @@ def location(parser, args):
if args.package_dir:
# This one just needs the spec name.
- print join_path(spack.repo.root, spec.name)
+ print spack.repo.dirname_for_package_name(spec.name)
else:
# These versions need concretized specs.
@@ -104,9 +108,9 @@ def location(parser, args):
if args.stage_dir:
print pkg.stage.path
- else: # args.build_dir is the default.
+ else: # args.build_dir is the default.
if not pkg.stage.source_path:
- tty.die("Build directory does not exist yet. Run this to create it:",
+ tty.die("Build directory does not exist yet. "
+ "Run this to create it:",
"spack stage " + " ".join(args.spec))
print pkg.stage.source_path
-
diff --git a/lib/spack/spack/cmd/md5.py b/lib/spack/spack/cmd/md5.py
index 3ba3c71562..2ae279a41e 100644
--- a/lib/spack/spack/cmd/md5.py
+++ b/lib/spack/spack/cmd/md5.py
@@ -25,6 +25,7 @@
import argparse
import hashlib
import os
+from urlparse import urlparse
import llnl.util.tty as tty
import spack.util.crypto
@@ -36,7 +37,7 @@ description = "Calculate md5 checksums for files/urls."
def setup_parser(subparser):
setup_parser.parser = subparser
subparser.add_argument('files', nargs=argparse.REMAINDER,
- help="Files to checksum.")
+ help="Files/urls to checksum.")
def compute_md5_checksum(url):
@@ -49,13 +50,23 @@ def compute_md5_checksum(url):
return value
+def normalized(files):
+ for p in files:
+ result = urlparse(p)
+ value = p
+ if not result.scheme:
+ value = os.path.abspath(p)
+ yield value
+
+
def md5(parser, args):
if not args.files:
setup_parser.parser.print_help()
return 1
+ urls = [x for x in normalized(args.files)]
results = []
- for url in args.files:
+ for url in urls:
try:
checksum = compute_md5_checksum(url)
results.append((checksum, url))
@@ -67,6 +78,7 @@ def md5(parser, args):
tty.warn("%s" % e)
# Dump the MD5s at last without interleaving them with downloads
- tty.msg("%d MD5 checksums:" % len(results))
+ checksum = 'checksum' if len(results) == 1 else 'checksums'
+ tty.msg("%d MD5 %s:" % (len(results), checksum))
for checksum, url in results:
- print "%s %s" % (checksum, url)
+ print("{0} {1}".format(checksum, url))
diff --git a/lib/spack/spack/cmd/mirror.py b/lib/spack/spack/cmd/mirror.py
index d5f7abe212..585faaf524 100644
--- a/lib/spack/spack/cmd/mirror.py
+++ b/lib/spack/spack/cmd/mirror.py
@@ -23,7 +23,6 @@
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
import os
-import sys
from datetime import datetime
import argparse
@@ -40,6 +39,7 @@ from spack.util.spack_yaml import syaml_dict
description = "Manage mirrors."
+
def setup_parser(subparser):
subparser.add_argument(
'-n', '--no-checksum', action='store_true', dest='no_checksum',
@@ -61,8 +61,9 @@ def setup_parser(subparser):
'-D', '--dependencies', action='store_true',
help="Also fetch all dependencies")
create_parser.add_argument(
- '-o', '--one-version-per-spec', action='store_const', const=1, default=0,
- help="Only fetch one 'preferred' version per spec, not all known versions.")
+ '-o', '--one-version-per-spec', action='store_const',
+ const=1, default=0,
+ help="Only fetch one 'preferred' version per spec, not all known.")
scopes = spack.config.config_scopes
@@ -70,7 +71,7 @@ def setup_parser(subparser):
add_parser = sp.add_parser('add', help=mirror_add.__doc__)
add_parser.add_argument('name', help="Mnemonic name for mirror.")
add_parser.add_argument(
- 'url', help="URL of mirror directory created by 'spack mirror create'.")
+ 'url', help="URL of mirror directory from 'spack mirror create'.")
add_parser.add_argument(
'--scope', choices=scopes, default=spack.cmd.default_modify_scope,
help="Configuration scope to modify.")
@@ -107,7 +108,7 @@ def mirror_add(args):
tty.die("Mirror with url %s already exists." % url)
# should only be one item per mirror dict.
- items = [(n,u) for n,u in mirrors.items()]
+ items = [(n, u) for n, u in mirrors.items()]
items.insert(0, (args.name, url))
mirrors = syaml_dict(items)
spack.config.update_config('mirrors', mirrors, scope=args.scope)
@@ -121,7 +122,7 @@ def mirror_remove(args):
if not mirrors:
mirrors = syaml_dict()
- if not name in mirrors:
+ if name not in mirrors:
tty.die("No mirror with name %s" % name)
old_value = mirrors.pop(name)
@@ -152,7 +153,7 @@ def _read_specs_from_file(filename):
s.package
specs.append(s)
except SpackError, e:
- tty.die("Parse error in %s, line %d:" % (args.file, i+1),
+ tty.die("Parse error in %s, line %d:" % (args.file, i + 1),
">>> " + string, str(e))
return specs
@@ -179,7 +180,7 @@ def mirror_create(args):
new_specs = set()
for spec in specs:
spec.concretize()
- for s in spec.traverse():
+ for s in spec.traverse(deptype_query=spack.alldeps):
new_specs.add(s)
specs = list(new_specs)
@@ -214,10 +215,10 @@ def mirror_create(args):
def mirror(parser, args):
- action = { 'create' : mirror_create,
- 'add' : mirror_add,
- 'remove' : mirror_remove,
- 'rm' : mirror_remove,
- 'list' : mirror_list }
+ action = {'create': mirror_create,
+ 'add': mirror_add,
+ 'remove': mirror_remove,
+ 'rm': mirror_remove,
+ 'list': mirror_list}
action[args.mirror_command](args)
diff --git a/lib/spack/spack/cmd/module.py b/lib/spack/spack/cmd/module.py
index 5292d42225..b4ee561339 100644
--- a/lib/spack/spack/cmd/module.py
+++ b/lib/spack/spack/cmd/module.py
@@ -22,83 +22,249 @@
# License along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
+from __future__ import print_function
+
+import collections
import os
import shutil
import sys
+import llnl.util.filesystem as filesystem
import llnl.util.tty as tty
import spack.cmd
-from llnl.util.filesystem import mkdirp
+import spack.cmd.common.arguments as arguments
from spack.modules import module_types
-from spack.util.string import *
-description = "Manipulate modules and dotkits."
+description = "Manipulate module files"
+
+# Dictionary that will be populated with the list of sub-commands
+# Each sub-command must be callable and accept 3 arguments :
+# - mtype : the type of the module file
+# - specs : the list of specs to be processed
+# - args : namespace containing the parsed command line arguments
+callbacks = {}
+
+
+def subcommand(subparser_name):
+ """Registers a function in the callbacks dictionary"""
+ def decorator(callback):
+ callbacks[subparser_name] = callback
+ return callback
+ return decorator
def setup_parser(subparser):
- sp = subparser.add_subparsers(metavar='SUBCOMMAND', dest='module_command')
+ sp = subparser.add_subparsers(metavar='SUBCOMMAND', dest='subparser_name')
- sp.add_parser('refresh', help='Regenerate all module files.')
+ # spack module refresh
+ refresh_parser = sp.add_parser('refresh', help='Regenerate module files')
+ refresh_parser.add_argument(
+ '--delete-tree',
+ help='Delete the module file tree before refresh',
+ action='store_true'
+ )
+ arguments.add_common_arguments(
+ refresh_parser, ['constraint', 'module_type', 'yes_to_all']
+ )
- find_parser = sp.add_parser('find', help='Find module files for packages.')
- find_parser.add_argument('module_type',
- help="Type of module to find file for. [" +
- '|'.join(module_types) + "]")
- find_parser.add_argument('spec',
- nargs='+',
- help='spec to find a module file for.')
+ # spack module find
+ find_parser = sp.add_parser('find', help='Find module files for packages')
+ arguments.add_common_arguments(find_parser, ['constraint', 'module_type'])
+ # spack module rm
+ rm_parser = sp.add_parser('rm', help='Remove module files')
+ arguments.add_common_arguments(
+ rm_parser, ['constraint', 'module_type', 'yes_to_all']
+ )
-def module_find(mtype, spec_array):
- """Look at all installed packages and see if the spec provided
- matches any. If it does, check whether there is a module file
- of type <mtype> there, and print out the name that the user
- should type to use that package's module.
- """
- if mtype not in module_types:
- tty.die("Invalid module type: '%s'. Options are %s" %
- (mtype, comma_or(module_types)))
+ # spack module loads
+ loads_parser = sp.add_parser(
+ 'loads',
+ help='Prompt the list of modules associated with a constraint'
+ )
+ loads_parser.add_argument(
+ '--input-only', action='store_false', dest='shell',
+ help='Generate input for module command (instead of a shell script)'
+ )
+ loads_parser.add_argument(
+ '-p', '--prefix', dest='prefix', default='',
+ help='Prepend to module names when issuing module load commands'
+ )
+ loads_parser.add_argument(
+ '-x', '--exclude', dest='exclude', action='append', default=[],
+ help="Exclude package from output; may be specified multiple times"
+ )
+ arguments.add_common_arguments(
+ loads_parser, ['constraint', 'module_type', 'recurse_dependencies']
+ )
+
+
+class MultipleMatches(Exception):
+ pass
+
+
+class NoMatch(Exception):
+ pass
+
+
+@subcommand('loads')
+def loads(mtype, specs, args):
+ """Prompt the list of modules associated with a list of specs"""
+ # Get a comprehensive list of specs
+ if args.recurse_dependencies:
+ specs_from_user_constraint = specs[:]
+ specs = []
+ # FIXME : during module file creation nodes seem to be visited
+ # FIXME : multiple times even if cover='nodes' is given. This
+ # FIXME : work around permits to get a unique list of spec anyhow.
+ # FIXME : (same problem as in spack/modules.py)
+ seen = set()
+ seen_add = seen.add
+ for spec in specs_from_user_constraint:
+ specs.extend(
+ [item for item in spec.traverse(order='post', cover='nodes')
+ if not (item in seen or seen_add(item))]
+ )
+
+ module_cls = module_types[mtype]
+ modules = [(spec, module_cls(spec).use_name)
+ for spec in specs if os.path.exists(module_cls(spec).file_name)]
+
+ module_commands = {
+ 'tcl': 'module load ',
+ 'lmod': 'module load ',
+ 'dotkit': 'dotkit use '
+ }
+
+ d = {
+ 'command': '' if not args.shell else module_commands[mtype],
+ 'prefix': args.prefix
+ }
+
+ exclude_set = set(args.exclude)
+ prompt_template = '{comment}{exclude}{command}{prefix}{name}'
+ for spec, mod in modules:
+ d['exclude'] = '## ' if spec.name in exclude_set else ''
+ d['comment'] = '' if not args.shell else '# {0}\n'.format(
+ spec.format())
+ d['name'] = mod
+ print(prompt_template.format(**d))
- specs = spack.cmd.parse_specs(spec_array)
- if len(specs) > 1:
- tty.die("You can only pass one spec.")
- spec = specs[0]
- specs = spack.installed_db.query(spec)
+@subcommand('find')
+def find(mtype, specs, args):
+ """
+ Look at all installed packages and see if the spec provided
+ matches any. If it does, check whether there is a module file
+ of type <mtype> there, and print out the name that the user
+ should type to use that package's module.
+ """
if len(specs) == 0:
- tty.die("No installed packages match spec %s" % spec)
+ raise NoMatch()
if len(specs) > 1:
- tty.error("Multiple matches for spec %s. Choose one:" % spec)
- for s in specs:
- sys.stderr.write(s.tree(color=True))
- sys.exit(1)
+ raise MultipleMatches()
- mt = module_types[mtype]
- mod = mt(specs[0])
+ spec = specs.pop()
+ mod = module_types[mtype](spec)
if not os.path.isfile(mod.file_name):
tty.die("No %s module is installed for %s" % (mtype, spec))
-
print(mod.use_name)
-def module_refresh():
- """Regenerate all module files for installed packages known to
- spack (some packages may no longer exist)."""
- specs = [s for s in spack.installed_db.query(installed=True, known=True)]
+@subcommand('rm')
+def rm(mtype, specs, args):
+ """Deletes module files associated with items in specs"""
+ module_cls = module_types[mtype]
+ specs_with_modules = [
+ spec for spec in specs if os.path.exists(module_cls(spec).file_name)]
+ modules = [module_cls(spec) for spec in specs_with_modules]
- for name, cls in module_types.items():
- tty.msg("Regenerating %s module files." % name)
- if os.path.isdir(cls.path):
- shutil.rmtree(cls.path, ignore_errors=False)
- mkdirp(cls.path)
- for spec in specs:
- cls(spec).write()
+ if not modules:
+ tty.msg('No module file matches your query')
+ raise SystemExit(1)
+ # Ask for confirmation
+ if not args.yes_to_all:
+ tty.msg(
+ 'You are about to remove {0} module files the following specs:\n'
+ .format(mtype))
+ spack.cmd.display_specs(specs_with_modules, long=True)
+ print('')
+ spack.cmd.ask_for_confirmation('Do you want to proceed ? ')
-def module(parser, args):
- if args.module_command == 'refresh':
- module_refresh()
+ # Remove the module files
+ for s in modules:
+ s.remove()
+
+
+@subcommand('refresh')
+def refresh(mtype, specs, args):
+ """Regenerate module files for item in specs"""
+ # Prompt a message to the user about what is going to change
+ if not specs:
+ tty.msg('No package matches your query')
+ return
+
+ if not args.yes_to_all:
+ tty.msg(
+ 'You are about to regenerate {name} module files for:\n'
+ .format(name=mtype))
+ spack.cmd.display_specs(specs, long=True)
+ print('')
+ spack.cmd.ask_for_confirmation('Do you want to proceed ? ')
- elif args.module_command == 'find':
- module_find(args.module_type, args.spec)
+ cls = module_types[mtype]
+
+ # Detect name clashes
+ writers = [cls(spec) for spec in specs
+ if spack.repo.exists(spec.name)] # skip unknown packages.
+ file2writer = collections.defaultdict(list)
+ for item in writers:
+ file2writer[item.file_name].append(item)
+
+ if len(file2writer) != len(writers):
+ message = 'Name clashes detected in module files:\n'
+ for filename, writer_list in file2writer.items():
+ if len(writer_list) > 1:
+ message += '\nfile : {0}\n'.format(filename)
+ for x in writer_list:
+ message += 'spec : {0}\n'.format(x.spec.format(color=True))
+ tty.error(message)
+ tty.error('Operation aborted')
+ raise SystemExit(1)
+
+ # Proceed regenerating module files
+ tty.msg('Regenerating {name} module files'.format(name=mtype))
+ if os.path.isdir(cls.path) and args.delete_tree:
+ shutil.rmtree(cls.path, ignore_errors=False)
+ filesystem.mkdirp(cls.path)
+ for x in writers:
+ x.write(overwrite=True)
+
+
+def module(parser, args):
+ # Qualifiers to be used when querying the db for specs
+ constraint_qualifiers = {
+ 'refresh': {
+ 'installed': True,
+ 'known': True
+ },
+ }
+ query_args = constraint_qualifiers.get(args.subparser_name, {})
+ specs = args.specs(**query_args)
+ module_type = args.module_type
+ constraint = args.constraint
+ try:
+ callbacks[args.subparser_name](module_type, specs, args)
+ except MultipleMatches:
+ message = ('the constraint \'{query}\' matches multiple packages, '
+ 'and this is not allowed in this context')
+ tty.error(message.format(query=constraint))
+ for s in specs:
+ sys.stderr.write(s.format(color=True) + '\n')
+ raise SystemExit(1)
+ except NoMatch:
+ message = ('the constraint \'{query}\' match no package, '
+ 'and this is not allowed in this context')
+ tty.die(message.format(query=constraint))
diff --git a/lib/spack/spack/cmd/package-list.py b/lib/spack/spack/cmd/package-list.py
deleted file mode 100644
index 6c5c4ae8c6..0000000000
--- a/lib/spack/spack/cmd/package-list.py
+++ /dev/null
@@ -1,95 +0,0 @@
-##############################################################################
-# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
-# Produced at the Lawrence Livermore National Laboratory.
-#
-# This file is part of Spack.
-# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
-# LLNL-CODE-647188
-#
-# For details, see https://github.com/llnl/spack
-# Please also see the LICENSE file for our notice and the LGPL.
-#
-# This program is free software; you can redistribute it and/or modify
-# it under the terms of the GNU Lesser General Public License (as
-# published by the Free Software Foundation) version 2.1, February 1999.
-#
-# This program is distributed in the hope that it will be useful, but
-# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
-# conditions of the GNU Lesser General Public License for more details.
-#
-# You should have received a copy of the GNU Lesser General Public
-# License along with this program; if not, write to the Free Software
-# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
-##############################################################################
-import re
-import cgi
-from StringIO import StringIO
-import llnl.util.tty as tty
-from llnl.util.tty.colify import *
-import spack
-
-description = "Print a list of all packages in reStructuredText."
-
-
-def github_url(pkg):
- """Link to a package file on github."""
- return ("https://github.com/llnl/spack/blob/master/var/spack/packages/%s/package.py" %
- pkg.name)
-
-
-def rst_table(elts):
- """Print out a RST-style table."""
- cols = StringIO()
- ncol, widths = colify(elts, output=cols, tty=True)
- header = " ".join("=" * (w-1) for w in widths)
- return "%s\n%s%s" % (header, cols.getvalue(), header)
-
-
-def print_rst_package_list():
- """Print out information on all packages in restructured text."""
- pkgs = sorted(spack.repo.all_packages(), key=lambda s:s.name.lower())
-
- print ".. _package-list:"
- print
- print "Package List"
- print "=================="
-
- print "This is a list of things you can install using Spack. It is"
- print "automatically generated based on the packages in the latest Spack"
- print "release."
- print
-
- print "Spack currently has %d mainline packages:" % len(pkgs)
- print
- print rst_table("`%s`_" % p.name for p in pkgs)
- print
- print "-----"
-
- # Output some text for each package.
- for pkg in pkgs:
- print
- print ".. _%s:" % pkg.name
- print
- print pkg.name
- print "-" * len(pkg.name)
- print "Links:"
- print " * `%s <%s>`__" % (cgi.escape(pkg.homepage), pkg.homepage)
- print " * `%s/package.py <%s>`__" % (pkg.name, github_url(pkg))
- print
- if pkg.versions:
- print "Versions:"
- print " " + ", ".join(str(v) for v in reversed(sorted(pkg.versions)))
- if pkg.dependencies:
- print "Dependencies"
- print " " + ", ".join("`%s`_" % d if d != "mpi" else d
- for d in pkg.dependencies)
- print
- print "Description:"
- print pkg.format_doc(indent=2)
- print
- print "-----"
-
-
-def package_list(parser, args):
- print_rst_package_list()
diff --git a/lib/spack/spack/cmd/patch.py b/lib/spack/spack/cmd/patch.py
index a5507e42cf..9c72da40b5 100644
--- a/lib/spack/spack/cmd/patch.py
+++ b/lib/spack/spack/cmd/patch.py
@@ -29,14 +29,16 @@ import spack.cmd
import spack
-description="Patch expanded archive sources in preparation for install"
+description = "Patch expanded archive sources in preparation for install"
+
def setup_parser(subparser):
subparser.add_argument(
'-n', '--no-checksum', action='store_true', dest='no_checksum',
help="Do not check downloaded packages against checksum")
subparser.add_argument(
- 'packages', nargs=argparse.REMAINDER, help="specs of packages to stage")
+ 'packages', nargs=argparse.REMAINDER,
+ help="specs of packages to stage")
def patch(parser, args):
diff --git a/lib/spack/spack/cmd/pkg.py b/lib/spack/spack/cmd/pkg.py
index a24c2759fe..7791b93cf5 100644
--- a/lib/spack/spack/cmd/pkg.py
+++ b/lib/spack/spack/cmd/pkg.py
@@ -33,6 +33,7 @@ from spack.util.executable import *
description = "Query packages associated with particular git revisions."
+
def setup_parser(subparser):
sp = subparser.add_subparsers(
metavar='SUBCOMMAND', dest='pkg_command')
@@ -46,22 +47,28 @@ def setup_parser(subparser):
help="Revision to list packages for.")
diff_parser = sp.add_parser('diff', help=pkg_diff.__doc__)
- diff_parser.add_argument('rev1', nargs='?', default='HEAD^',
- help="Revision to compare against.")
- diff_parser.add_argument('rev2', nargs='?', default='HEAD',
- help="Revision to compare to rev1 (default is HEAD).")
+ diff_parser.add_argument(
+ 'rev1', nargs='?', default='HEAD^',
+ help="Revision to compare against.")
+ diff_parser.add_argument(
+ 'rev2', nargs='?', default='HEAD',
+ help="Revision to compare to rev1 (default is HEAD).")
add_parser = sp.add_parser('added', help=pkg_added.__doc__)
- add_parser.add_argument('rev1', nargs='?', default='HEAD^',
- help="Revision to compare against.")
- add_parser.add_argument('rev2', nargs='?', default='HEAD',
- help="Revision to compare to rev1 (default is HEAD).")
+ add_parser.add_argument(
+ 'rev1', nargs='?', default='HEAD^',
+ help="Revision to compare against.")
+ add_parser.add_argument(
+ 'rev2', nargs='?', default='HEAD',
+ help="Revision to compare to rev1 (default is HEAD).")
rm_parser = sp.add_parser('removed', help=pkg_removed.__doc__)
- rm_parser.add_argument('rev1', nargs='?', default='HEAD^',
- help="Revision to compare against.")
- rm_parser.add_argument('rev2', nargs='?', default='HEAD',
- help="Revision to compare to rev1 (default is HEAD).")
+ rm_parser.add_argument(
+ 'rev1', nargs='?', default='HEAD^',
+ help="Revision to compare against.")
+ rm_parser.add_argument(
+ 'rev2', nargs='?', default='HEAD',
+ help="Revision to compare to rev1 (default is HEAD).")
def get_git():
@@ -88,7 +95,8 @@ def pkg_add(args):
for pkg_name in args.packages:
filename = spack.repo.filename_for_package_name(pkg_name)
if not os.path.isfile(filename):
- tty.die("No such package: %s. Path does not exist:" % pkg_name, filename)
+ tty.die("No such package: %s. Path does not exist:" %
+ pkg_name, filename)
git = get_git()
git('-C', spack.packages_path, 'add', filename)
@@ -112,7 +120,8 @@ def pkg_diff(args):
if u1:
print "%s:" % args.rev1
colify(sorted(u1), indent=4)
- if u1: print
+ if u1:
+ print
if u2:
print "%s:" % args.rev2
@@ -122,19 +131,21 @@ def pkg_diff(args):
def pkg_removed(args):
"""Show packages removed since a commit."""
u1, u2 = diff_packages(args.rev1, args.rev2)
- if u1: colify(sorted(u1))
+ if u1:
+ colify(sorted(u1))
def pkg_added(args):
"""Show packages added since a commit."""
u1, u2 = diff_packages(args.rev1, args.rev2)
- if u2: colify(sorted(u2))
+ if u2:
+ colify(sorted(u2))
def pkg(parser, args):
- action = { 'add' : pkg_add,
- 'diff' : pkg_diff,
- 'list' : pkg_list,
- 'removed' : pkg_removed,
- 'added' : pkg_added }
+ action = {'add': pkg_add,
+ 'diff': pkg_diff,
+ 'list': pkg_list,
+ 'removed': pkg_removed,
+ 'added': pkg_added}
action[args.pkg_command](args)
diff --git a/lib/spack/spack/cmd/providers.py b/lib/spack/spack/cmd/providers.py
index e9007486d2..0f4a97cc4a 100644
--- a/lib/spack/spack/cmd/providers.py
+++ b/lib/spack/spack/cmd/providers.py
@@ -22,7 +22,6 @@
# License along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
-import os
import argparse
from llnl.util.tty.colify import colify
@@ -30,11 +29,13 @@ from llnl.util.tty.colify import colify
import spack
import spack.cmd
-description ="List packages that provide a particular virtual package"
+description = "List packages that provide a particular virtual package"
+
def setup_parser(subparser):
- subparser.add_argument('vpkg_spec', metavar='VPACKAGE_SPEC', nargs=argparse.REMAINDER,
- help='Find packages that provide this virtual package')
+ subparser.add_argument(
+ 'vpkg_spec', metavar='VPACKAGE_SPEC', nargs=argparse.REMAINDER,
+ help='Find packages that provide this virtual package')
def providers(parser, args):
diff --git a/lib/spack/spack/cmd/purge.py b/lib/spack/spack/cmd/purge.py
index 7b33ef7f69..66cfc2af29 100644
--- a/lib/spack/spack/cmd/purge.py
+++ b/lib/spack/spack/cmd/purge.py
@@ -22,9 +22,37 @@
# License along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
+import spack
import spack.stage as stage
-description = "Remove all temporary build files and downloaded archives"
+description = "Remove temporary build files and/or downloaded archives"
+
+
+def setup_parser(subparser):
+ subparser.add_argument(
+ '-s', '--stage', action='store_true', default=True,
+ help="Remove all temporary build stages (default).")
+ subparser.add_argument(
+ '-d', '--downloads', action='store_true',
+ help="Remove cached downloads.")
+ subparser.add_argument(
+ '-m', '--misc-cache', action='store_true',
+ help="Remove long-lived caches, like the virtual package index.")
+ subparser.add_argument(
+ '-a', '--all', action='store_true',
+ help="Remove all of the above.")
+
def purge(parser, args):
- stage.purge()
+ # Special case: no flags.
+ if not any((args.stage, args.downloads, args.misc_cache, args.all)):
+ stage.purge()
+ return
+
+ # handle other flags with fall through.
+ if args.stage or args.all:
+ stage.purge()
+ if args.downloads or args.all:
+ spack.fetch_cache.destroy()
+ if args.misc_cache or args.all:
+ spack.misc_cache.destroy()
diff --git a/lib/spack/spack/cmd/python.py b/lib/spack/spack/cmd/python.py
index 59423271b9..12727cb599 100644
--- a/lib/spack/spack/cmd/python.py
+++ b/lib/spack/spack/cmd/python.py
@@ -30,18 +30,22 @@ import platform
import spack
+
def setup_parser(subparser):
subparser.add_argument(
'-c', dest='python_command', help='Command to execute.')
subparser.add_argument(
- 'python_args', nargs=argparse.REMAINDER, help="File to run plus arguments.")
+ 'python_args', nargs=argparse.REMAINDER,
+ help="File to run plus arguments.")
+
description = "Launch an interpreter as spack would launch a command"
+
def python(parser, args):
# Fake a main python shell by setting __name__ to __main__.
- console = code.InteractiveConsole({'__name__' : '__main__',
- 'spack' : spack})
+ console = code.InteractiveConsole({'__name__': '__main__',
+ 'spack': spack})
if "PYTHONSTARTUP" in os.environ:
startup_file = os.environ["PYTHONSTARTUP"]
diff --git a/lib/spack/spack/cmd/reindex.py b/lib/spack/spack/cmd/reindex.py
index 93eba7a0f1..7dddda2ffb 100644
--- a/lib/spack/spack/cmd/reindex.py
+++ b/lib/spack/spack/cmd/reindex.py
@@ -22,10 +22,10 @@
# License along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
-import argparse
import spack
-
+import spack.store
description = "Rebuild Spack's package database."
+
def reindex(parser, args):
- spack.installed_db.reindex(spack.install_layout)
+ spack.store.db.reindex(spack.store.layout)
diff --git a/lib/spack/spack/cmd/repo.py b/lib/spack/spack/cmd/repo.py
index 399237b169..79df63ce8d 100644
--- a/lib/spack/spack/cmd/repo.py
+++ b/lib/spack/spack/cmd/repo.py
@@ -23,20 +23,16 @@
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
import os
-import re
-import shutil
-from external import argparse
import llnl.util.tty as tty
-from llnl.util.filesystem import join_path, mkdirp
import spack.spec
import spack.config
-from spack.util.environment import get_path
from spack.repository import *
description = "Manage package source repositories."
+
def setup_parser(subparser):
sp = subparser.add_subparsers(metavar='SUBCOMMAND', dest='repo_command')
scopes = spack.config.config_scopes
@@ -57,13 +53,15 @@ def setup_parser(subparser):
# Add
add_parser = sp.add_parser('add', help=repo_add.__doc__)
- add_parser.add_argument('path', help="Path to a Spack package repository directory.")
+ add_parser.add_argument(
+ 'path', help="Path to a Spack package repository directory.")
add_parser.add_argument(
'--scope', choices=scopes, default=spack.cmd.default_modify_scope,
help="Configuration scope to modify.")
# Remove
- remove_parser = sp.add_parser('remove', help=repo_remove.__doc__, aliases=['rm'])
+ remove_parser = sp.add_parser(
+ 'remove', help=repo_remove.__doc__, aliases=['rm'])
remove_parser.add_argument(
'path_or_namespace',
help="Path or namespace of a Spack package repository.")
@@ -100,14 +98,15 @@ def repo_add(args):
# If that succeeds, finally add it to the configuration.
repos = spack.config.get_config('repos', args.scope)
- if not repos: repos = []
+ if not repos:
+ repos = []
if repo.root in repos or path in repos:
tty.die("Repository is already registered with Spack: %s" % path)
repos.insert(0, canon_path)
spack.config.update_config('repos', repos, args.scope)
- tty.msg("Created repo with namespace '%s'." % repo.namespace)
+ tty.msg("Added repo with namespace '%s'." % repo.namespace)
def repo_remove(args):
@@ -135,7 +134,7 @@ def repo_remove(args):
tty.msg("Removed repository %s with namespace '%s'."
% (repo.root, repo.namespace))
return
- except RepoError as e:
+ except RepoError:
continue
tty.die("No repository with path or namespace: %s"
@@ -149,7 +148,7 @@ def repo_list(args):
for r in roots:
try:
repos.append(Repo(r))
- except RepoError as e:
+ except RepoError:
continue
msg = "%d package repositor" % len(repos)
@@ -166,9 +165,9 @@ def repo_list(args):
def repo(parser, args):
- action = { 'create' : repo_create,
- 'list' : repo_list,
- 'add' : repo_add,
- 'remove' : repo_remove,
- 'rm' : repo_remove}
+ action = {'create': repo_create,
+ 'list': repo_list,
+ 'add': repo_add,
+ 'remove': repo_remove,
+ 'rm': repo_remove}
action[args.repo_command](args)
diff --git a/lib/spack/spack/cmd/restage.py b/lib/spack/spack/cmd/restage.py
index 325d30662f..969afe09bd 100644
--- a/lib/spack/spack/cmd/restage.py
+++ b/lib/spack/spack/cmd/restage.py
@@ -31,6 +31,7 @@ import spack.cmd
description = "Revert checked out package source code."
+
def setup_parser(subparser):
subparser.add_argument('packages', nargs=argparse.REMAINDER,
help="specs of packages to restage")
diff --git a/lib/spack/spack/cmd/setup.py b/lib/spack/spack/cmd/setup.py
new file mode 100644
index 0000000000..5d8aaefa72
--- /dev/null
+++ b/lib/spack/spack/cmd/setup.py
@@ -0,0 +1,182 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+import argparse
+import copy
+import os
+import string
+import sys
+
+import llnl.util.tty as tty
+import spack
+import spack.store
+import spack.cmd
+import spack.cmd.install as install
+import spack.cmd.common.arguments as arguments
+from llnl.util.filesystem import set_executable
+from spack import which
+from spack.stage import DIYStage
+
+description = "Create a configuration script and module, but don't build."
+
+
+def setup_parser(subparser):
+ subparser.add_argument(
+ '-i', '--ignore-dependencies', action='store_true', dest='ignore_deps',
+ help="Do not try to install dependencies of requested packages.")
+ subparser.add_argument(
+ '-v', '--verbose', action='store_true', dest='verbose',
+ help="Display verbose build output while installing.")
+ subparser.add_argument(
+ 'spec', nargs=argparse.REMAINDER,
+ help="specs to use for install. Must contain package AND version.")
+
+ cd_group = subparser.add_mutually_exclusive_group()
+ arguments.add_common_arguments(cd_group, ['clean', 'dirty'])
+
+
+def spack_transitive_include_path():
+ return ';'.join(
+ os.path.join(dep, 'include')
+ for dep in os.environ['SPACK_DEPENDENCIES'].split(os.pathsep)
+ )
+
+
+def write_spconfig(package):
+ # Set-up the environment
+ spack.build_environment.setup_package(package)
+
+ cmd = [str(which('cmake'))] + package.std_cmake_args + package.cmake_args()
+
+ env = dict()
+
+ paths = os.environ['PATH'].split(':')
+ paths = [item for item in paths if 'spack/env' not in item]
+ env['PATH'] = ':'.join(paths)
+ env['SPACK_TRANSITIVE_INCLUDE_PATH'] = spack_transitive_include_path()
+ env['CMAKE_PREFIX_PATH'] = os.environ['CMAKE_PREFIX_PATH']
+ env['CC'] = os.environ['SPACK_CC']
+ env['CXX'] = os.environ['SPACK_CXX']
+ env['FC'] = os.environ['SPACK_FC']
+
+ setup_fname = 'spconfig.py'
+ with open(setup_fname, 'w') as fout:
+ fout.write(
+ r"""#!%s
+#
+
+import sys
+import os
+import subprocess
+
+def cmdlist(str):
+ return list(x.strip().replace("'",'') for x in str.split('\n') if x)
+env = dict(os.environ)
+""" % sys.executable)
+
+ env_vars = sorted(list(env.keys()))
+ for name in env_vars:
+ val = env[name]
+ if string.find(name, 'PATH') < 0:
+ fout.write('env[%s] = %s\n' % (repr(name), repr(val)))
+ else:
+ if name == 'SPACK_TRANSITIVE_INCLUDE_PATH':
+ sep = ';'
+ else:
+ sep = ':'
+
+ fout.write(
+ 'env[%s] = "%s".join(cmdlist("""\n' % (repr(name), sep))
+ for part in string.split(val, sep):
+ fout.write(' %s\n' % part)
+ fout.write('"""))\n')
+
+ fout.write('\ncmd = cmdlist("""\n')
+ fout.write('%s\n' % cmd[0])
+ for arg in cmd[1:]:
+ fout.write(' %s\n' % arg)
+ fout.write('""") + sys.argv[1:]\n')
+ fout.write('\nproc = subprocess.Popen(cmd, env=env)\nproc.wait()\n')
+ set_executable(setup_fname)
+
+
+def setup(self, args):
+ if not args.spec:
+ tty.die("spack setup requires a package spec argument.")
+
+ specs = spack.cmd.parse_specs(args.spec)
+ if len(specs) > 1:
+ tty.die("spack setup only takes one spec.")
+
+ # Take a write lock before checking for existence.
+ with spack.store.db.write_transaction():
+ spec = specs[0]
+ if not spack.repo.exists(spec.name):
+ tty.die("No package for '{0}' was found.".format(spec.name),
+ " Use `spack create` to create a new package")
+ if not spec.versions.concrete:
+ tty.die(
+ "spack setup spec must have a single, concrete version. "
+ "Did you forget a package version number?")
+
+ spec.concretize()
+ package = spack.repo.get(spec)
+ if not isinstance(package, spack.CMakePackage):
+ tty.die(
+ 'Support for {0} derived packages not yet implemented'.format(
+ package.build_system_class
+ )
+ )
+
+ # It's OK if the package is already installed.
+
+ # Forces the build to run out of the current directory.
+ package.stage = DIYStage(os.getcwd())
+
+ # TODO: make this an argument, not a global.
+ spack.do_checksum = False
+
+ # Install dependencies if requested to do so
+ if not args.ignore_deps:
+ parser = argparse.ArgumentParser()
+ install.setup_parser(parser)
+ inst_args = copy.deepcopy(args)
+ inst_args = parser.parse_args(
+ ['--only=dependencies'] + args.spec,
+ namespace=inst_args
+ )
+ install.install(parser, inst_args)
+ # Generate spconfig.py
+ tty.msg(
+ 'Generating spconfig.py [{0}]'.format(package.spec.cshort_spec)
+ )
+ write_spconfig(package)
+ # Install this package to register it in the DB and permit
+ # module file regeneration
+ inst_args = copy.deepcopy(args)
+ inst_args = parser.parse_args(
+ ['--only=package', '--fake'] + args.spec,
+ namespace=inst_args
+ )
+ install.install(parser, inst_args)
diff --git a/lib/spack/spack/cmd/spec.py b/lib/spack/spack/cmd/spec.py
index 321e3e429b..4ecd4d6e54 100644
--- a/lib/spack/spack/cmd/spec.py
+++ b/lib/spack/spack/cmd/spec.py
@@ -23,37 +23,66 @@
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
import argparse
-import spack.cmd
-
-import llnl.util.tty as tty
import spack
-import spack.url as url
+import spack.cmd
+import spack.cmd.common.arguments as arguments
description = "print out abstract and concrete versions of a spec."
+
def setup_parser(subparser):
- subparser.add_argument('-i', '--ids', action='store_true',
- help="show numerical ids for dependencies.")
- subparser.add_argument('specs', nargs=argparse.REMAINDER, help="specs of packages")
+ arguments.add_common_arguments(subparser, ['long', 'very_long'])
+ subparser.add_argument(
+ '-y', '--yaml', action='store_true', default=False,
+ help='Print concrete spec as YAML.')
+ subparser.add_argument(
+ '-c', '--cover', action='store',
+ default='nodes', choices=['nodes', 'edges', 'paths'],
+ help='How extensively to traverse the DAG. (default: nodes).')
+ subparser.add_argument(
+ '-N', '--namespaces', action='store_true', default=False,
+ help='Show fully qualified package names.')
+ subparser.add_argument(
+ '-I', '--install-status', action='store_true', default=False,
+ help='Show install status of packages. Packages can be: '
+ 'installed [+], missing and needed by an installed package [-], '
+ 'or not installed (no annotation).')
+ subparser.add_argument(
+ '-t', '--types', action='store_true', default=False,
+ help='Show dependency types.')
+ subparser.add_argument(
+ 'specs', nargs=argparse.REMAINDER, help="specs of packages")
def spec(parser, args):
- kwargs = { 'ids' : args.ids,
- 'indent' : 2,
- 'color' : True }
+ name_fmt = '$.' if args.namespaces else '$_'
+ kwargs = {'color': True,
+ 'cover': args.cover,
+ 'format': name_fmt + '$@$%@+$+$=',
+ 'hashes': args.long or args.very_long,
+ 'hashlen': None if args.very_long else 7,
+ 'show_types': args.types,
+ 'install_status': args.install_status}
for spec in spack.cmd.parse_specs(args.specs):
+ # With -y, just print YAML to output.
+ if args.yaml:
+ spec.concretize()
+ print spec.to_yaml()
+ continue
+
+ # Print some diagnostic info by default.
print "Input spec"
- print "------------------------------"
+ print "--------------------------------"
print spec.tree(**kwargs)
print "Normalized"
- print "------------------------------"
+ print "--------------------------------"
spec.normalize()
print spec.tree(**kwargs)
print "Concretized"
- print "------------------------------"
+ print "--------------------------------"
spec.concretize()
print spec.tree(**kwargs)
diff --git a/lib/spack/spack/cmd/stage.py b/lib/spack/spack/cmd/stage.py
index 61e9c6d9ff..bfc2e5f456 100644
--- a/lib/spack/spack/cmd/stage.py
+++ b/lib/spack/spack/cmd/stage.py
@@ -22,14 +22,14 @@
# License along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
-import os
import argparse
import llnl.util.tty as tty
import spack
import spack.cmd
-description="Expand downloaded archive in preparation for install"
+description = "Expand downloaded archive in preparation for install"
+
def setup_parser(subparser):
subparser.add_argument(
diff --git a/lib/spack/spack/cmd/test-install.py b/lib/spack/spack/cmd/test-install.py
deleted file mode 100644
index 45592a7dda..0000000000
--- a/lib/spack/spack/cmd/test-install.py
+++ /dev/null
@@ -1,225 +0,0 @@
-##############################################################################
-# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
-# Produced at the Lawrence Livermore National Laboratory.
-#
-# This file is part of Spack.
-# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
-# LLNL-CODE-647188
-#
-# For details, see https://github.com/llnl/spack
-# Please also see the LICENSE file for our notice and the LGPL.
-#
-# This program is free software; you can redistribute it and/or modify
-# it under the terms of the GNU Lesser General Public License (as
-# published by the Free Software Foundation) version 2.1, February 1999.
-#
-# This program is distributed in the hope that it will be useful, but
-# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
-# conditions of the GNU Lesser General Public License for more details.
-#
-# You should have received a copy of the GNU Lesser General Public
-# License along with this program; if not, write to the Free Software
-# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
-##############################################################################
-import argparse
-import codecs
-import os
-import time
-import xml.dom.minidom
-import xml.etree.ElementTree as ET
-
-import llnl.util.tty as tty
-import spack
-import spack.cmd
-from llnl.util.filesystem import *
-from spack.build_environment import InstallError
-from spack.fetch_strategy import FetchError
-
-description = "Run package installation as a unit test, output formatted results."
-
-
-def setup_parser(subparser):
- subparser.add_argument('-j',
- '--jobs',
- action='store',
- type=int,
- help="Explicitly set number of make jobs. Default is #cpus.")
-
- subparser.add_argument('-n',
- '--no-checksum',
- action='store_true',
- dest='no_checksum',
- help="Do not check packages against checksum")
-
- subparser.add_argument('-o', '--output', action='store', help="test output goes in this file")
-
- subparser.add_argument('package', nargs=argparse.REMAINDER, help="spec of package to install")
-
-
-class TestResult(object):
- PASSED = 0
- FAILED = 1
- SKIPPED = 2
- ERRORED = 3
-
-
-class TestSuite(object):
- def __init__(self, filename):
- self.filename = filename
- self.root = ET.Element('testsuite')
- self.tests = []
-
- def __enter__(self):
- return self
-
- def append(self, item):
- if not isinstance(item, TestCase):
- raise TypeError('only TestCase instances may be appended to a TestSuite instance')
- self.tests.append(item) # Append the item to the list of tests
-
- def __exit__(self, exc_type, exc_val, exc_tb):
- # Prepare the header for the entire test suite
- number_of_errors = sum(x.result_type == TestResult.ERRORED for x in self.tests)
- self.root.set('errors', str(number_of_errors))
- number_of_failures = sum(x.result_type == TestResult.FAILED for x in self.tests)
- self.root.set('failures', str(number_of_failures))
- self.root.set('tests', str(len(self.tests)))
-
- for item in self.tests:
- self.root.append(item.element)
-
- with open(self.filename, 'wb') as file:
- xml_string = ET.tostring(self.root)
- xml_string = xml.dom.minidom.parseString(xml_string).toprettyxml()
- file.write(xml_string)
-
-
-class TestCase(object):
-
- results = {
- TestResult.PASSED: None,
- TestResult.SKIPPED: 'skipped',
- TestResult.FAILED: 'failure',
- TestResult.ERRORED: 'error',
- }
-
- def __init__(self, classname, name, time=None):
- self.element = ET.Element('testcase')
- self.element.set('classname', str(classname))
- self.element.set('name', str(name))
- if time is not None:
- self.element.set('time', str(time))
- self.result_type = None
-
- def set_result(self, result_type, message=None, error_type=None, text=None):
- self.result_type = result_type
- result = TestCase.results[self.result_type]
- if result is not None and result is not TestResult.PASSED:
- subelement = ET.SubElement(self.element, result)
- if error_type is not None:
- subelement.set('type', error_type)
- if message is not None:
- subelement.set('message', str(message))
- if text is not None:
- subelement.text = text
-
-
-def fetch_log(path):
- if not os.path.exists(path):
- return list()
- with codecs.open(path, 'rb', 'utf-8') as F:
- return list(line.strip() for line in F.readlines())
-
-
-def failed_dependencies(spec):
- return set(item for item in spec.dependencies.itervalues() if not spack.repo.get(item).installed)
-
-
-def get_top_spec_or_die(args):
- specs = spack.cmd.parse_specs(args.package, concretize=True)
- if len(specs) > 1:
- tty.die("Only 1 top-level package can be specified")
- top_spec = iter(specs).next()
- return top_spec
-
-
-def install_single_spec(spec, number_of_jobs):
- package = spack.repo.get(spec)
-
- # If it is already installed, skip the test
- if spack.repo.get(spec).installed:
- testcase = TestCase(package.name, package.spec.short_spec, time=0.0)
- testcase.set_result(TestResult.SKIPPED, message='Skipped [already installed]', error_type='already_installed')
- return testcase
-
- # If it relies on dependencies that did not install, skip
- if failed_dependencies(spec):
- testcase = TestCase(package.name, package.spec.short_spec, time=0.0)
- testcase.set_result(TestResult.SKIPPED, message='Skipped [failed dependencies]', error_type='dep_failed')
- return testcase
-
- # Otherwise try to install the spec
- try:
- start_time = time.time()
- package.do_install(keep_prefix=False,
- keep_stage=True,
- ignore_deps=False,
- make_jobs=number_of_jobs,
- verbose=True,
- fake=False)
- duration = time.time() - start_time
- testcase = TestCase(package.name, package.spec.short_spec, duration)
- testcase.set_result(TestResult.PASSED)
- except InstallError:
- # An InstallError is considered a failure (the recipe didn't work correctly)
- duration = time.time() - start_time
- # Try to get the log
- lines = fetch_log(package.build_log_path)
- text = '\n'.join(lines)
- testcase = TestCase(package.name, package.spec.short_spec, duration)
- testcase.set_result(TestResult.FAILED, message='Installation failure', text=text)
-
- except FetchError:
- # A FetchError is considered an error (we didn't even start building)
- duration = time.time() - start_time
- testcase = TestCase(package.name, package.spec.short_spec, duration)
- testcase.set_result(TestResult.ERRORED, message='Unable to fetch package')
-
- return testcase
-
-
-def get_filename(args, top_spec):
- if not args.output:
- fname = 'test-{x.name}-{x.version}-{hash}.xml'.format(x=top_spec, hash=top_spec.dag_hash())
- output_directory = join_path(os.getcwd(), 'test-output')
- if not os.path.exists(output_directory):
- os.mkdir(output_directory)
- output_filename = join_path(output_directory, fname)
- else:
- output_filename = args.output
- return output_filename
-
-
-def test_install(parser, args):
- # Check the input
- if not args.package:
- tty.die("install requires a package argument")
-
- if args.jobs is not None:
- if args.jobs <= 0:
- tty.die("The -j option must be a positive integer!")
-
- if args.no_checksum:
- spack.do_checksum = False # TODO: remove this global.
-
- # Get the one and only top spec
- top_spec = get_top_spec_or_die(args)
- # Get the filename of the test
- output_filename = get_filename(args, top_spec)
- # TEST SUITE
- with TestSuite(output_filename) as test_suite:
- # Traverse in post order : each spec is a test case
- for spec in top_spec.traverse(order='post'):
- test_case = install_single_spec(spec, args.jobs)
- test_suite.append(test_case)
diff --git a/lib/spack/spack/cmd/test.py b/lib/spack/spack/cmd/test.py
index cb9dd26c71..9d92037bb6 100644
--- a/lib/spack/spack/cmd/test.py
+++ b/lib/spack/spack/cmd/test.py
@@ -22,48 +22,87 @@
# License along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
+import sys
import os
-from pprint import pprint
+import re
+import argparse
+import pytest
+from StringIO import StringIO
-from llnl.util.filesystem import join_path, mkdirp
+from llnl.util.filesystem import *
from llnl.util.tty.colify import colify
-from llnl.util.lang import list_modules
import spack
-import spack.test
-description ="Run unit tests"
+description = "A thin wrapper around the pytest command."
+
def setup_parser(subparser):
subparser.add_argument(
- 'names', nargs='*', help="Names of tests to run.")
- subparser.add_argument(
- '-l', '--list', action='store_true', dest='list', help="Show available tests")
- subparser.add_argument(
- '--createXmlOutput', action='store_true', dest='createXmlOutput',
- help="Create JUnit XML from test results")
- subparser.add_argument(
- '--xmlOutputDir', dest='xmlOutputDir',
- help="Nose creates XML files in this directory")
+ '-H', '--pytest-help', action='store_true', default=False,
+ help="print full pytest help message, showing advanced options.")
+
+ list_group = subparser.add_mutually_exclusive_group()
+ list_group.add_argument(
+ '-l', '--list', action='store_true', default=False,
+ help="list basic test names.")
+ list_group.add_argument(
+ '-L', '--long-list', action='store_true', default=False,
+ help="list the entire hierarchy of tests.")
subparser.add_argument(
- '-v', '--verbose', action='store_true', dest='verbose',
- help="verbose output")
+ 'tests', nargs=argparse.REMAINDER,
+ help="list of tests to run (will be passed to pytest -k).")
-def test(parser, args):
- if args.list:
- print "Available tests:"
- colify(spack.test.list_tests(), indent=2)
+def do_list(args, unknown_args):
+ """Print a lists of tests than what pytest offers."""
+ # Run test collection and get the tree out.
+ old_output = sys.stdout
+ try:
+ sys.stdout = output = StringIO()
+ pytest.main(['--collect-only'])
+ finally:
+ sys.stdout = old_output
+
+ # put the output in a more readable tree format.
+ lines = output.getvalue().split('\n')
+ output_lines = []
+ for line in lines:
+ match = re.match(r"(\s*)<([^ ]*) '([^']*)'", line)
+ if not match:
+ continue
+ indent, nodetype, name = match.groups()
- else:
- if not args.createXmlOutput:
- outputDir = None
+ # only print top-level for short list
+ if args.list:
+ if not indent:
+ output_lines.append(
+ os.path.basename(name).replace('.py', ''))
else:
- if not args.xmlOutputDir:
- outputDir = join_path(os.getcwd(), "test-output")
- else:
- outputDir = os.path.abspath(args.xmlOutputDir)
-
- if not os.path.exists(outputDir):
- mkdirp(outputDir)
- spack.test.run(args.names, outputDir, args.verbose)
+ print indent + name
+
+ if args.list:
+ colify(output_lines)
+
+
+def test(parser, args, unknown_args):
+ if args.pytest_help:
+ # make the pytest.main help output more accurate
+ sys.argv[0] = 'spack test'
+ pytest.main(['-h'])
+ return
+
+ # pytest.ini lives in the root of the sapck repository.
+ with working_dir(spack.prefix):
+ # --list and --long-list print the test output better.
+ if args.list or args.long_list:
+ do_list(args, unknown_args)
+ return
+
+ # Allow keyword search without -k if no options are specified
+ if (args.tests and not unknown_args and
+ not any(arg.startswith('-') for arg in args.tests)):
+ return pytest.main(['-k'] + args.tests)
+
+ # Just run the pytest command
+ return pytest.main(unknown_args + args.tests)
diff --git a/lib/spack/spack/cmd/uninstall.py b/lib/spack/spack/cmd/uninstall.py
index 3bffc2633b..0fc22ce538 100644
--- a/lib/spack/spack/cmd/uninstall.py
+++ b/lib/spack/spack/cmd/uninstall.py
@@ -29,8 +29,8 @@ import argparse
import llnl.util.tty as tty
import spack
import spack.cmd
+import spack.store
import spack.repository
-from spack.cmd.find import display_specs
description = "Remove an installed package"
@@ -39,65 +39,69 @@ error_message = """You can either:
b) use spack uninstall -a to uninstall ALL matching specs.
"""
-
-def ask_for_confirmation(message):
- while True:
- tty.msg(message + '[y/n]')
- choice = raw_input().lower()
- if choice == 'y':
- break
- elif choice == 'n':
- raise SystemExit('Operation aborted')
- tty.warn('Please reply either "y" or "n"')
+# Arguments for display_specs when we find ambiguity
+display_args = {
+ 'long': True,
+ 'show_flags': True,
+ 'variants': True
+}
def setup_parser(subparser):
subparser.add_argument(
'-f', '--force', action='store_true', dest='force',
help="Remove regardless of whether other packages depend on this one.")
+
subparser.add_argument(
'-a', '--all', action='store_true', dest='all',
- help="USE CAREFULLY. Remove ALL installed packages that match each " +
- "supplied spec. i.e., if you say uninstall libelf, ALL versions of " +
- "libelf are uninstalled. This is both useful and dangerous, like rm -r.")
+ help="USE CAREFULLY. Remove ALL installed packages that match each "
+ "supplied spec. i.e., if you say uninstall `libelf`,"
+ " ALL versions of `libelf` are uninstalled. If no spec is "
+ "supplied all installed software will be uninstalled. This "
+ "is both useful and dangerous, like rm -r.")
+
subparser.add_argument(
'-d', '--dependents', action='store_true', dest='dependents',
- help='Also uninstall any packages that depend on the ones given via command line.'
- )
+ help='Also uninstall any packages that depend on the ones given '
+ 'via command line.')
+
subparser.add_argument(
'-y', '--yes-to-all', action='store_true', dest='yes_to_all',
- help='Assume "yes" is the answer to every confirmation asked to the user.'
+ help='Assume "yes" is the answer to every confirmation requested')
- )
- subparser.add_argument('packages', nargs=argparse.REMAINDER, help="specs of packages to uninstall")
+ subparser.add_argument(
+ 'packages',
+ nargs=argparse.REMAINDER,
+ help="specs of packages to uninstall")
def concretize_specs(specs, allow_multiple_matches=False, force=False):
- """
- Returns a list of specs matching the non necessarily concretized specs given from cli
+ """Returns a list of specs matching the non necessarily
+ concretized specs given from cli
Args:
specs: list of specs to be matched against installed packages
- allow_multiple_matches : boolean (if True multiple matches for each item in specs are admitted)
+ allow_multiple_matches : if True multiple matches are admitted
Return:
list of specs
"""
- specs_from_cli = [] # List of specs that match expressions given via command line
+ # List of specs that match expressions given via command line
+ specs_from_cli = []
has_errors = False
for spec in specs:
- matching = spack.installed_db.query(spec)
+ matching = spack.store.db.query(spec)
# For each spec provided, make sure it refers to only one package.
# Fail and ask user to be unambiguous if it doesn't
if not allow_multiple_matches and len(matching) > 1:
tty.error("%s matches multiple packages:" % spec)
print()
- display_specs(matching, long=True)
+ spack.cmd.display_specs(matching, **display_args)
print()
has_errors = True
# No installed package matches the query
- if len(matching) == 0 and not force:
+ if len(matching) == 0 and spec is not any:
tty.error("%s does not match any installed packages." % spec)
has_errors = True
@@ -109,8 +113,8 @@ def concretize_specs(specs, allow_multiple_matches=False, force=False):
def installed_dependents(specs):
- """
- Returns a dictionary that maps a spec with a list of its installed dependents
+ """Returns a dictionary that maps a spec with a list of its
+ installed dependents
Args:
specs: list of specs to be checked for dependents
@@ -120,7 +124,8 @@ def installed_dependents(specs):
"""
dependents = {}
for item in specs:
- lst = [x for x in item.package.installed_dependents if x not in specs]
+ lst = [x for x in spack.store.db.installed_dependents(item)
+ if x not in specs]
if lst:
lst = list(set(lst))
dependents[item] = lst
@@ -140,7 +145,7 @@ def do_uninstall(specs, force):
try:
# should work if package is known to spack
packages.append(item.package)
- except spack.repository.UnknownPackageError as e:
+ except spack.repository.UnknownPackageError:
# The package.py file has gone away -- but still
# want to uninstall.
spack.Package(item).do_uninstall(force=True)
@@ -148,47 +153,63 @@ def do_uninstall(specs, force):
# Sort packages to be uninstalled by the number of installed dependents
# This ensures we do things in the right order
def num_installed_deps(pkg):
- return len(pkg.installed_dependents)
+ return len(spack.store.db.installed_dependents(pkg.spec))
packages.sort(key=num_installed_deps)
for item in packages:
item.do_uninstall(force=force)
-def uninstall(parser, args):
- if not args.packages:
- tty.die("uninstall requires at least one package argument.")
-
- with spack.installed_db.write_transaction():
+def get_uninstall_list(args):
+ specs = [any]
+ if args.packages:
specs = spack.cmd.parse_specs(args.packages)
- # Gets the list of installed specs that match the ones give via cli
- uninstall_list = concretize_specs(specs, args.all, args.force) # takes care of '-a' is given in the cli
- dependent_list = installed_dependents(uninstall_list) # takes care of '-d'
-
- # Process dependent_list and update uninstall_list
- has_error = False
- if dependent_list and not args.dependents and not args.force:
- for spec, lst in dependent_list.items():
- tty.error("Will not uninstall %s" % spec.format("$_$@$%@$#", color=True))
- print('')
- print("The following packages depend on it:")
- display_specs(lst, long=True)
- print('')
- has_error = True
- elif args.dependents:
- for key, lst in dependent_list.items():
- uninstall_list.extend(lst)
- uninstall_list = list(set(uninstall_list))
-
- if has_error:
- tty.die('You can use spack uninstall --dependents to uninstall these dependencies as well')
-
- if not args.yes_to_all:
- tty.msg("The following packages will be uninstalled : ")
+
+ # Gets the list of installed specs that match the ones give via cli
+ # takes care of '-a' is given in the cli
+ uninstall_list = concretize_specs(specs, args.all, args.force)
+
+ # Takes care of '-d'
+ dependent_list = installed_dependents(uninstall_list)
+
+ # Process dependent_list and update uninstall_list
+ has_error = False
+ if dependent_list and not args.dependents and not args.force:
+ for spec, lst in dependent_list.items():
+ tty.error("Will not uninstall %s" %
+ spec.format("$_$@$%@$#", color=True))
print('')
- display_specs(uninstall_list, long=True)
+ print("The following packages depend on it:")
+ spack.cmd.display_specs(lst, **display_args)
print('')
- ask_for_confirmation('Do you want to proceed ? ')
+ has_error = True
+ elif args.dependents:
+ for key, lst in dependent_list.items():
+ uninstall_list.extend(lst)
+ uninstall_list = list(set(uninstall_list))
+ if has_error:
+ tty.die('You can use spack uninstall --dependents '
+ 'to uninstall these dependencies as well')
+
+ return uninstall_list
+
+
+def uninstall(parser, args):
+ if not args.packages and not args.all:
+ tty.die("uninstall requires at least one package argument.")
+
+ uninstall_list = get_uninstall_list(args)
+
+ if not uninstall_list:
+ tty.msg("There are no package to uninstall.")
+ return
+
+ if not args.yes_to_all:
+ tty.msg("The following packages will be uninstalled : ")
+ print('')
+ spack.cmd.display_specs(uninstall_list, **display_args)
+ print('')
+ spack.cmd.ask_for_confirmation('Do you want to proceed ? ')
- # Uninstall everything on the list
- do_uninstall(uninstall_list, args.force)
+ # Uninstall everything on the list
+ do_uninstall(uninstall_list, args.force)
diff --git a/lib/spack/spack/cmd/unload.py b/lib/spack/spack/cmd/unload.py
index 7bd15750ed..b52bedb7b4 100644
--- a/lib/spack/spack/cmd/unload.py
+++ b/lib/spack/spack/cmd/unload.py
@@ -25,13 +25,15 @@
import argparse
import spack.modules
-description ="Remove package from environment using module."
+description = "Remove package from environment using module."
+
def setup_parser(subparser):
"""Parser is only constructed so that this prints a nice help
message with -h. """
subparser.add_argument(
- 'spec', nargs=argparse.REMAINDER, help='Spec of package to unload with modules.')
+ 'spec', nargs=argparse.REMAINDER,
+ help='Spec of package to unload with modules.')
def unload(parser, args):
diff --git a/lib/spack/spack/cmd/unuse.py b/lib/spack/spack/cmd/unuse.py
index 789a690e9c..6403cf6162 100644
--- a/lib/spack/spack/cmd/unuse.py
+++ b/lib/spack/spack/cmd/unuse.py
@@ -25,13 +25,15 @@
import argparse
import spack.modules
-description ="Remove package from environment using dotkit."
+description = "Remove package from environment using dotkit."
+
def setup_parser(subparser):
"""Parser is only constructed so that this prints a nice help
message with -h. """
subparser.add_argument(
- 'spec', nargs=argparse.REMAINDER, help='Spec of package to unuse with dotkit.')
+ 'spec', nargs=argparse.REMAINDER,
+ help='Spec of package to unuse with dotkit.')
def unuse(parser, args):
diff --git a/lib/spack/spack/cmd/url-parse.py b/lib/spack/spack/cmd/url_parse.py
index ce12a17d13..2af9671459 100644
--- a/lib/spack/spack/cmd/url-parse.py
+++ b/lib/spack/spack/cmd/url_parse.py
@@ -22,28 +22,28 @@
# License along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
-import sys
-
import llnl.util.tty as tty
import spack
import spack.url
from spack.util.web import find_versions_of_archive
-description = "Show parsing of a URL, optionally spider web for other versions."
+description = "Show parsing of a URL, optionally spider web for versions."
+
def setup_parser(subparser):
subparser.add_argument('url', help="url of a package archive")
subparser.add_argument(
- '-s', '--spider', action='store_true', help="Spider the source page for versions.")
+ '-s', '--spider', action='store_true',
+ help="Spider the source page for versions.")
def print_name_and_version(url):
name, ns, nl, ntup, ver, vs, vl, vtup = spack.url.substitution_offsets(url)
- underlines = [" "] * max(ns+nl, vs+vl)
- for i in range(ns, ns+nl):
+ underlines = [" "] * max(ns + nl, vs + vl)
+ for i in range(ns, ns + nl):
underlines[i] = '-'
- for i in range(vs, vs+vl):
+ for i in range(vs, vs + vl):
underlines[i] = '~'
print " %s" % url
@@ -53,15 +53,19 @@ def print_name_and_version(url):
def url_parse(parser, args):
url = args.url
- ver, vs, vl = spack.url.parse_version_offset(url)
- name, ns, nl = spack.url.parse_name_offset(url, ver)
+ ver, vs, vl = spack.url.parse_version_offset(url, debug=True)
+ name, ns, nl = spack.url.parse_name_offset(url, ver, debug=True)
+ print
- tty.msg("Parsing URL:")
+ tty.msg("Detected:")
try:
print_name_and_version(url)
except spack.url.UrlParseError as e:
tty.error(str(e))
+ print ' name: %s' % name
+ print ' version: %s' % ver
+
print
tty.msg("Substituting version 9.9.9b:")
newurl = spack.url.substitute_version(url, '9.9.9b')
diff --git a/lib/spack/spack/cmd/urls.py b/lib/spack/spack/cmd/urls.py
index 2fe2019a22..f151581d7d 100644
--- a/lib/spack/spack/cmd/urls.py
+++ b/lib/spack/spack/cmd/urls.py
@@ -22,12 +22,12 @@
# License along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
-import sys
import spack
import spack.url
description = "Inspect urls used by packages in spack."
+
def setup_parser(subparser):
subparser.add_argument(
'-c', '--color', action='store_true',
@@ -53,6 +53,7 @@ def urls(parser, args):
for url in sorted(urls):
if args.color or args.extrapolation:
- print spack.url.color_url(url, subs=args.extrapolation, errors=True)
+ print spack.url.color_url(
+ url, subs=args.extrapolation, errors=True)
else:
print url
diff --git a/lib/spack/spack/cmd/use.py b/lib/spack/spack/cmd/use.py
index bbb90fde1b..e3612ace48 100644
--- a/lib/spack/spack/cmd/use.py
+++ b/lib/spack/spack/cmd/use.py
@@ -25,13 +25,15 @@
import argparse
import spack.modules
-description ="Add package to environment using dotkit."
+description = "Add package to environment using dotkit."
+
def setup_parser(subparser):
"""Parser is only constructed so that this prints a nice help
message with -h. """
subparser.add_argument(
- 'spec', nargs=argparse.REMAINDER, help='Spec of package to use with dotkit.')
+ 'spec', nargs=argparse.REMAINDER,
+ help='Spec of package to use with dotkit.')
def use(parser, args):
diff --git a/lib/spack/spack/cmd/versions.py b/lib/spack/spack/cmd/versions.py
index ec3a4b2e34..1e95225ab8 100644
--- a/lib/spack/spack/cmd/versions.py
+++ b/lib/spack/spack/cmd/versions.py
@@ -22,15 +22,16 @@
# License along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
-import os
from llnl.util.tty.colify import colify
import llnl.util.tty as tty
import spack
-description ="List available versions of a package"
+description = "List available versions of a package"
+
def setup_parser(subparser):
- subparser.add_argument('package', metavar='PACKAGE', help='Package to list versions for')
+ subparser.add_argument('package', metavar='PACKAGE',
+ help='Package to list versions for')
def versions(parser, args):
diff --git a/lib/spack/spack/cmd/view.py b/lib/spack/spack/cmd/view.py
new file mode 100644
index 0000000000..869a58f15c
--- /dev/null
+++ b/lib/spack/spack/cmd/view.py
@@ -0,0 +1,303 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+'''Produce a "view" of a Spack DAG.
+
+A "view" is file hierarchy representing the union of a number of
+Spack-installed package file hierarchies. The union is formed from:
+
+- specs resolved from the package names given by the user (the seeds)
+
+- all depenencies of the seeds unless user specifies `--no-depenencies`
+
+- less any specs with names matching the regular expressions given by
+ `--exclude`
+
+The `view` can be built and tore down via a number of methods (the "actions"):
+
+- symlink :: a file system view which is a directory hierarchy that is
+ the union of the hierarchies of the installed packages in the DAG
+ where installed files are referenced via symlinks.
+
+- hardlink :: like the symlink view but hardlinks are used.
+
+- statlink :: a view producing a status report of a symlink or
+ hardlink view.
+
+The file system view concept is imspired by Nix, implemented by
+brett.viren@gmail.com ca 2016.
+
+'''
+# Implementation notes:
+#
+# This is implemented as a visitor pattern on the set of package specs.
+#
+# The command line ACTION maps to a visitor_*() function which takes
+# the set of package specs and any args which may be specific to the
+# ACTION.
+#
+# To add a new view:
+# 1. add a new cmd line args sub parser ACTION
+# 2. add any action-specific options/arguments, most likely a list of specs.
+# 3. add a visitor_MYACTION() function
+# 4. add any visitor_MYALIAS assignments to match any command line aliases
+
+import os
+import re
+import spack
+import spack.cmd
+import llnl.util.tty as tty
+
+description = "Produce a single-rooted directory view of a spec."
+
+
+def setup_parser(sp):
+ setup_parser.parser = sp
+
+ sp.add_argument(
+ '-v', '--verbose', action='store_true', default=False,
+ help="Display verbose output.")
+ sp.add_argument(
+ '-e', '--exclude', action='append', default=[],
+ help="Exclude packages with names matching the given regex pattern.")
+ sp.add_argument(
+ '-d', '--dependencies', choices=['true', 'false', 'yes', 'no'],
+ default='true',
+ help="Follow dependencies.")
+
+ ssp = sp.add_subparsers(metavar='ACTION', dest='action')
+
+ specs_opts = dict(metavar='spec', nargs='+',
+ help="Seed specs of the packages to view.")
+
+ # The action parameterizes the command but in keeping with Spack
+ # patterns we make it a subcommand.
+ file_system_view_actions = [
+ ssp.add_parser(
+ 'symlink', aliases=['add', 'soft'],
+ help='Add package files to a filesystem view via symbolic links.'),
+ ssp.add_parser(
+ 'hardlink', aliases=['hard'],
+ help='Add packages files to a filesystem via via hard links.'),
+ ssp.add_parser(
+ 'remove', aliases=['rm'],
+ help='Remove packages from a filesystem view.'),
+ ssp.add_parser(
+ 'statlink', aliases=['status', 'check'],
+ help='Check status of packages in a filesystem view.')
+ ]
+ # All these options and arguments are common to every action.
+ for act in file_system_view_actions:
+ act.add_argument('path', nargs=1,
+ help="Path to file system view directory.")
+ act.add_argument('specs', **specs_opts)
+
+ return
+
+
+def assuredir(path):
+ 'Assure path exists as a directory'
+ if not os.path.exists(path):
+ os.makedirs(path)
+
+
+def relative_to(prefix, path):
+ 'Return end of `path` relative to `prefix`'
+ assert 0 == path.find(prefix)
+ reldir = path[len(prefix):]
+ if reldir.startswith('/'):
+ reldir = reldir[1:]
+ return reldir
+
+
+def transform_path(spec, path, prefix=None):
+ 'Return the a relative path corresponding to given path spec.prefix'
+ if os.path.isabs(path):
+ path = relative_to(spec.prefix, path)
+ subdirs = path.split(os.path.sep)
+ if subdirs[0] == '.spack':
+ lst = ['.spack', spec.name] + subdirs[1:]
+ path = os.path.join(*lst)
+ if prefix:
+ path = os.path.join(prefix, path)
+ return path
+
+
+def purge_empty_directories(path):
+ '''Ascend up from the leaves accessible from `path`
+ and remove empty directories.'''
+ for dirpath, subdirs, files in os.walk(path, topdown=False):
+ for sd in subdirs:
+ sdp = os.path.join(dirpath, sd)
+ try:
+ os.rmdir(sdp)
+ except OSError:
+ pass
+
+
+def filter_exclude(specs, exclude):
+ 'Filter specs given sequence of exclude regex'
+ to_exclude = [re.compile(e) for e in exclude]
+
+ def exclude(spec):
+ for e in to_exclude:
+ if e.match(spec.name):
+ return True
+ return False
+ return [s for s in specs if not exclude(s)]
+
+
+def flatten(seeds, descend=True):
+ 'Normalize and flattend seed specs and descend hiearchy'
+ flat = set()
+ for spec in seeds:
+ if not descend:
+ flat.add(spec)
+ continue
+ flat.update(spec.normalized().traverse())
+ return flat
+
+
+def check_one(spec, path, verbose=False):
+ 'Check status of view in path against spec'
+ dotspack = os.path.join(path, '.spack', spec.name)
+ if os.path.exists(os.path.join(dotspack)):
+ tty.info('Package in view: "%s"' % spec.name)
+ return
+ tty.info('Package not in view: "%s"' % spec.name)
+ return
+
+
+def remove_one(spec, path, verbose=False):
+ 'Remove any files found in `spec` from `path` and purge empty directories.'
+
+ if not os.path.exists(path):
+ return # done, short circuit
+
+ dotspack = transform_path(spec, '.spack', path)
+ if not os.path.exists(dotspack):
+ if verbose:
+ tty.info('Skipping nonexistent package: "%s"' % spec.name)
+ return
+
+ if verbose:
+ tty.info('Removing package: "%s"' % spec.name)
+ for dirpath, dirnames, filenames in os.walk(spec.prefix):
+ if not filenames:
+ continue
+ targdir = transform_path(spec, dirpath, path)
+ for fname in filenames:
+ dst = os.path.join(targdir, fname)
+ if not os.path.exists(dst):
+ continue
+ os.unlink(dst)
+
+
+def link_one(spec, path, link=os.symlink, verbose=False):
+ 'Link all files in `spec` into directory `path`.'
+
+ dotspack = transform_path(spec, '.spack', path)
+ if os.path.exists(dotspack):
+ tty.warn('Skipping existing package: "%s"' % spec.name)
+ return
+
+ if verbose:
+ tty.info('Linking package: "%s"' % spec.name)
+ for dirpath, dirnames, filenames in os.walk(spec.prefix):
+ if not filenames:
+ continue # avoid explicitly making empty dirs
+
+ targdir = transform_path(spec, dirpath, path)
+ assuredir(targdir)
+
+ for fname in filenames:
+ src = os.path.join(dirpath, fname)
+ dst = os.path.join(targdir, fname)
+ if os.path.exists(dst):
+ if '.spack' in dst.split(os.path.sep):
+ continue # silence these
+ tty.warn("Skipping existing file: %s" % dst)
+ continue
+ link(src, dst)
+
+
+def visitor_symlink(specs, args):
+ 'Symlink all files found in specs'
+ path = args.path[0]
+ assuredir(path)
+ for spec in specs:
+ link_one(spec, path, verbose=args.verbose)
+
+
+visitor_add = visitor_symlink
+visitor_soft = visitor_symlink
+
+
+def visitor_hardlink(specs, args):
+ 'Hardlink all files found in specs'
+ path = args.path[0]
+ assuredir(path)
+ for spec in specs:
+ link_one(spec, path, os.link, verbose=args.verbose)
+
+
+visitor_hard = visitor_hardlink
+
+
+def visitor_remove(specs, args):
+ 'Remove all files and directories found in specs from args.path'
+ path = args.path[0]
+ for spec in specs:
+ remove_one(spec, path, verbose=args.verbose)
+ purge_empty_directories(path)
+
+
+visitor_rm = visitor_remove
+
+
+def visitor_statlink(specs, args):
+ 'Give status of view in args.path relative to specs'
+ path = args.path[0]
+ for spec in specs:
+ check_one(spec, path, verbose=args.verbose)
+
+
+visitor_status = visitor_statlink
+visitor_check = visitor_statlink
+
+
+def view(parser, args):
+ 'Produce a view of a set of packages.'
+
+ # Process common args
+ seeds = [spack.cmd.disambiguate_spec(s) for s in args.specs]
+ specs = flatten(seeds, args.dependencies.lower() in ['yes', 'true'])
+ specs = filter_exclude(specs, args.exclude)
+
+ # Execute the visitation.
+ try:
+ visitor = globals()['visitor_' + args.action]
+ except KeyError:
+ tty.error('Unknown action: "%s"' % args.action)
+ visitor(specs, args)
diff --git a/lib/spack/spack/compiler.py b/lib/spack/spack/compiler.py
index e2da272212..9e9c7cbcb4 100644
--- a/lib/spack/spack/compiler.py
+++ b/lib/spack/spack/compiler.py
@@ -25,21 +25,20 @@
import os
import re
import itertools
-from datetime import datetime
import llnl.util.tty as tty
-from llnl.util.lang import memoized
from llnl.util.filesystem import join_path
import spack.error
import spack.spec
+import spack.architecture
from spack.util.multiproc import parmap
from spack.util.executable import *
from spack.util.environment import get_path
-from spack.version import Version
__all__ = ['Compiler', 'get_compiler_version']
+
def _verify_executables(*paths):
for path in paths:
if not os.path.isfile(path) and os.access(path, os.X_OK):
@@ -48,8 +47,9 @@ def _verify_executables(*paths):
_version_cache = {}
+
def get_compiler_version(compiler_path, version_arg, regex='(.*)'):
- if not compiler_path in _version_cache:
+ if compiler_path not in _version_cache:
compiler = Executable(compiler_path)
output = compiler(version_arg, output=str, error=str)
@@ -107,22 +107,47 @@ class Compiler(object):
@property
def fc_rpath_arg(self):
return '-Wl,-rpath,'
+ # Cray PrgEnv name that can be used to load this compiler
+ PrgEnv = None
+ # Name of module used to switch versions of this compiler
+ PrgEnv_compiler = None
+
+ def __init__(self, cspec, operating_system, target,
+ paths, modules=[], alias=None, environment=None,
+ extra_rpaths=None, **kwargs):
+ self.spec = cspec
+ self.operating_system = str(operating_system)
+ self.target = target
+ self.modules = modules
+ self.alias = alias
-
- def __init__(self, cspec, cc, cxx, f77, fc):
def check(exe):
if exe is None:
return None
+ exe = self._find_full_path(exe)
_verify_executables(exe)
return exe
- self.cc = check(cc)
- self.cxx = check(cxx)
- self.f77 = check(f77)
- self.fc = check(fc)
-
- self.spec = cspec
-
+ self.cc = check(paths[0])
+ self.cxx = check(paths[1])
+ if len(paths) > 2:
+ self.f77 = check(paths[2])
+ if len(paths) == 3:
+ self.fc = self.f77
+ else:
+ self.fc = check(paths[3])
+
+ self.environment = environment
+ self.extra_rpaths = extra_rpaths or []
+
+ # Unfortunately have to make sure these params are accepted
+ # in the same order they are returned by sorted(flags)
+ # in compilers/__init__.py
+ self.flags = {}
+ for flag in spack.spec.FlagMap.valid_compiler_flags():
+ value = kwargs.get(flag, None)
+ if value is not None:
+ self.flags[flag] = value.split()
@property
def version(self):
@@ -133,31 +158,40 @@ class Compiler(object):
@property
def openmp_flag(self):
# If it is not overridden, assume it is not supported and warn the user
- tty.die("The compiler you have chosen does not currently support OpenMP.",
- "If you think it should, please edit the compiler subclass and",
- "submit a pull request or issue.")
-
+ tty.die(
+ "The compiler you have chosen does not currently support OpenMP.",
+ "If you think it should, please edit the compiler subclass and",
+ "submit a pull request or issue.")
# This property should be overridden in the compiler subclass if
# C++11 is supported by that compiler
@property
def cxx11_flag(self):
# If it is not overridden, assume it is not supported and warn the user
- tty.die("The compiler you have chosen does not currently support C++11.",
- "If you think it should, please edit the compiler subclass and",
- "submit a pull request or issue.")
-
+ tty.die(
+ "The compiler you have chosen does not currently support C++11.",
+ "If you think it should, please edit the compiler subclass and",
+ "submit a pull request or issue.")
# This property should be overridden in the compiler subclass if
# C++14 is supported by that compiler
@property
def cxx14_flag(self):
# If it is not overridden, assume it is not supported and warn the user
- tty.die("The compiler you have chosen does not currently support C++14.",
- "If you think it should, please edit the compiler subclass and",
- "submit a pull request or issue.")
-
+ tty.die(
+ "The compiler you have chosen does not currently support C++14.",
+ "If you think it should, please edit the compiler subclass and",
+ "submit a pull request or issue.")
+ # This property should be overridden in the compiler subclass if
+ # C++17 is supported by that compiler
+ @property
+ def cxx17_flag(self):
+ # If it is not overridden, assume it is not supported and warn the user
+ tty.die(
+ "The compiler you have chosen does not currently support C++17.",
+ "If you think it should, please edit the compiler subclass and",
+ "submit a pull request or issue.")
#
# Compiler classes have methods for querying the version of
@@ -166,7 +200,6 @@ class Compiler(object):
# Compiler *instances* are just data objects, and can only be
# constructed from an actual set of executables.
#
-
@classmethod
def default_version(cls, cc):
"""Override just this to override all compiler version functions."""
@@ -188,7 +221,6 @@ class Compiler(object):
def fc_version(cls, fc):
return cls.default_version(fc)
-
@classmethod
def _find_matches_in_path(cls, compiler_names, detect_version, *path):
"""Finds compilers in the paths supplied.
@@ -234,91 +266,61 @@ class Compiler(object):
version = detect_version(full_path)
return (version, prefix, suffix, full_path)
except ProcessError, e:
- tty.debug("Couldn't get version for compiler %s" % full_path, e)
+ tty.debug(
+ "Couldn't get version for compiler %s" % full_path, e)
return None
except Exception, e:
# Catching "Exception" here is fine because it just
# means something went wrong running a candidate executable.
- tty.debug("Error while executing candidate compiler %s" % full_path,
- "%s: %s" %(e.__class__.__name__, e))
+ tty.debug("Error while executing candidate compiler %s"
+ % full_path,
+ "%s: %s" % (e.__class__.__name__, e))
return None
- successful = [key for key in parmap(check, checks) if key is not None]
+ successful = [k for k in parmap(check, checks) if k is not None]
+
# The 'successful' list is ordered like the input paths.
# Reverse it here so that the dict creation (last insert wins)
# does not spoil the intented precedence.
successful.reverse()
return dict(((v, p, s), path) for v, p, s, path in successful)
- @classmethod
- def find(cls, *path):
- """Try to find this type of compiler in the user's
- environment. For each set of compilers found, this returns
- compiler objects with the cc, cxx, f77, fc paths and the
- version filled in.
-
- This will search for compilers with the names in cc_names,
- cxx_names, etc. and it will group them if they have common
- prefixes, suffixes, and versions. e.g., gcc-mp-4.7 would
- be grouped with g++-mp-4.7 and gfortran-mp-4.7.
- """
- dicts = parmap(
- lambda t: cls._find_matches_in_path(*t),
- [(cls.cc_names, cls.cc_version) + tuple(path),
- (cls.cxx_names, cls.cxx_version) + tuple(path),
- (cls.f77_names, cls.f77_version) + tuple(path),
- (cls.fc_names, cls.fc_version) + tuple(path)])
-
- all_keys = set()
- for d in dicts:
- all_keys.update(d)
-
- compilers = {}
- for k in all_keys:
- ver, pre, suf = k
-
- # Skip compilers with unknown version.
- if ver == 'unknown':
- continue
-
- paths = tuple(pn[k] if k in pn else None for pn in dicts)
- spec = spack.spec.CompilerSpec(cls.name, ver)
-
- if ver in compilers:
- prev = compilers[ver]
+ def _find_full_path(self, path):
+ """Return the actual path for a tool.
- # prefer the one with more compilers.
- prev_paths = [prev.cc, prev.cxx, prev.f77, prev.fc]
- newcount = len([p for p in paths if p is not None])
- prevcount = len([p for p in prev_paths if p is not None])
-
- # Don't add if it's not an improvement over prev compiler.
- if newcount <= prevcount:
- continue
-
- compilers[ver] = cls(spec, *paths)
-
- return list(compilers.values())
+ Some toolchains use forwarding executables (particularly Xcode-based
+ toolchains) which can be manipulated by external environment variables.
+ This method should be used to extract the actual path used for a tool
+ by finding out the end executable the forwarding executables end up
+ running.
+ """
+ return path
+ def setup_custom_environment(self, pkg, env):
+ """Set any environment variables necessary to use the compiler."""
+ pass
def __repr__(self):
"""Return a string representation of the compiler toolchain."""
return self.__str__()
-
def __str__(self):
"""Return a string representation of the compiler toolchain."""
return "%s(%s)" % (
- self.name, '\n '.join((str(s) for s in (self.cc, self.cxx, self.f77, self.fc))))
+ self.name, '\n '.join((str(s) for s in (
+ self.cc, self.cxx, self.f77, self.fc, self.modules,
+ str(self.operating_system)))))
class CompilerAccessError(spack.error.SpackError):
+
def __init__(self, path):
super(CompilerAccessError, self).__init__(
"'%s' is not a valid compiler." % path)
class InvalidCompilerError(spack.error.SpackError):
+
def __init__(self):
super(InvalidCompilerError, self).__init__(
"Compiler has no executables.")
diff --git a/lib/spack/spack/compilers/__init__.py b/lib/spack/spack/compilers/__init__.py
index 692e5518aa..6e65f50269 100644
--- a/lib/spack/spack/compilers/__init__.py
+++ b/lib/spack/spack/compilers/__init__.py
@@ -26,10 +26,8 @@
system and configuring Spack to use multiple compilers.
"""
import imp
-import os
-import platform
-from llnl.util.lang import memoized, list_modules
+from llnl.util.lang import list_modules
from llnl.util.filesystem import join_path
import spack
@@ -38,20 +36,13 @@ import spack.spec
import spack.config
import spack.architecture
-from spack.util.multiproc import parmap
-from spack.compiler import Compiler
-from spack.util.executable import which
from spack.util.naming import mod_to_class
-from spack.util.environment import get_path
_imported_compilers_module = 'spack.compilers'
-_required_instance_vars = ['cc', 'cxx', 'f77', 'fc']
-
-# TODO: customize order in config file
-if platform.system() == 'Darwin':
- _default_order = ['clang', 'gcc', 'intel']
-else:
- _default_order = ['gcc', 'intel', 'pgi', 'clang', 'xlc', 'nag']
+_path_instance_vars = ['cc', 'cxx', 'f77', 'fc']
+_other_instance_vars = ['modules', 'operating_system', 'environment',
+ 'extra_rpaths']
+_cache_config_file = []
def _auto_compiler_spec(function):
@@ -64,151 +55,129 @@ def _auto_compiler_spec(function):
def _to_dict(compiler):
"""Return a dict version of compiler suitable to insert in YAML."""
- return {
- str(compiler.spec) : dict(
- (attr, getattr(compiler, attr, None))
- for attr in _required_instance_vars)
- }
+ d = {}
+ d['spec'] = str(compiler.spec)
+ d['paths'] = dict((attr, getattr(compiler, attr, None))
+ for attr in _path_instance_vars)
+ d['flags'] = dict((fname, fvals) for fname, fvals in compiler.flags)
+ d['operating_system'] = str(compiler.operating_system)
+ d['target'] = str(compiler.target)
+ d['modules'] = compiler.modules if compiler.modules else []
+ d['environment'] = compiler.environment if compiler.environment else {}
+ d['extra_rpaths'] = compiler.extra_rpaths if compiler.extra_rpaths else []
+
+ if compiler.alias:
+ d['alias'] = compiler.alias
+
+ return {'compiler': d}
-def get_compiler_config(arch=None, scope=None):
+def get_compiler_config(scope=None, init_config=True):
"""Return the compiler configuration for the specified architecture.
"""
- # Check whether we're on a front-end (native) architecture.
- my_arch = spack.architecture.sys_type()
- if arch is None:
- arch = my_arch
-
def init_compiler_config():
"""Compiler search used when Spack has no compilers."""
- config[arch] = {}
- compilers = find_compilers(*get_path('PATH'))
+ compilers = find_compilers()
+ compilers_dict = []
for compiler in compilers:
- config[arch].update(_to_dict(compiler))
- spack.config.update_config('compilers', config, scope=scope)
+ compilers_dict.append(_to_dict(compiler))
+ spack.config.update_config('compilers', compilers_dict, scope=scope)
config = spack.config.get_config('compilers', scope=scope)
-
# Update the configuration if there are currently no compilers
# configured. Avoid updating automatically if there ARE site
# compilers configured but no user ones.
- if arch == my_arch and arch not in config:
+ if not config and init_config:
if scope is None:
# We know no compilers were configured in any scope.
init_compiler_config()
+ config = spack.config.get_config('compilers', scope=scope)
elif scope == 'user':
# Check the site config and update the user config if
# nothing is configured at the site level.
site_config = spack.config.get_config('compilers', scope='site')
if not site_config:
init_compiler_config()
-
- return config[arch] if arch in config else {}
+ config = spack.config.get_config('compilers', scope=scope)
+ return config
+ elif config:
+ return config
+ else:
+ return [] # Return empty list which we will later append to.
-def add_compilers_to_config(compilers, arch=None, scope=None):
+def add_compilers_to_config(compilers, scope=None, init_config=True):
"""Add compilers to the config for the specified architecture.
Arguments:
- compilers: a list of Compiler objects.
- - arch: arch to add compilers for.
- scope: configuration scope to modify.
"""
- if arch is None:
- arch = spack.architecture.sys_type()
-
- compiler_config = get_compiler_config(arch, scope)
+ compiler_config = get_compiler_config(scope, init_config)
for compiler in compilers:
- compiler_config[str(compiler.spec)] = dict(
- (c, getattr(compiler, c, "None"))
- for c in _required_instance_vars)
-
- update = { arch : compiler_config }
- spack.config.update_config('compilers', update, scope)
+ compiler_config.append(_to_dict(compiler))
+ global _cache_config_file
+ _cache_config_file = compiler_config
+ spack.config.update_config('compilers', compiler_config, scope)
@_auto_compiler_spec
-def remove_compiler_from_config(compiler_spec, arch=None, scope=None):
+def remove_compiler_from_config(compiler_spec, scope=None):
"""Remove compilers from the config, by spec.
Arguments:
- compiler_specs: a list of CompilerSpec objects.
- - arch: arch to add compilers for.
- scope: configuration scope to modify.
"""
- if arch is None:
- arch = spack.architecture.sys_type()
+ # Need a better way for this
+ global _cache_config_file
- compiler_config = get_compiler_config(arch, scope)
- del compiler_config[str(compiler_spec)]
- update = { arch : compiler_config }
+ compiler_config = get_compiler_config(scope)
+ config_length = len(compiler_config)
- spack.config.update_config('compilers', update, scope)
+ filtered_compiler_config = [
+ comp for comp in compiler_config
+ if spack.spec.CompilerSpec(comp['compiler']['spec']) != compiler_spec]
+ # Update the cache for changes
+ _cache_config_file = filtered_compiler_config
+ if len(filtered_compiler_config) == config_length: # No items removed
+ CompilerSpecInsufficientlySpecificError(compiler_spec)
+ spack.config.update_config('compilers', filtered_compiler_config, scope)
-def all_compilers_config(arch=None, scope=None):
+
+def all_compilers_config(scope=None, init_config=True):
"""Return a set of specs for all the compiler versions currently
available to build with. These are instances of CompilerSpec.
"""
# Get compilers for this architecture.
- arch_config = get_compiler_config(arch, scope)
-
- # Merge 'all' compilers with arch-specific ones.
- # Arch-specific compilers have higher precedence.
- merged_config = get_compiler_config('all', scope=scope)
- merged_config = spack.config._merge_yaml(merged_config, arch_config)
-
- return merged_config
+ # Create a cache of the config file so we don't load all the time.
+ global _cache_config_file
+ if not _cache_config_file:
+ _cache_config_file = get_compiler_config(scope, init_config)
+ return _cache_config_file
+ else:
+ return _cache_config_file
-def all_compilers(arch=None, scope=None):
+def all_compilers(scope=None, init_config=True):
# Return compiler specs from the merged config.
- return [spack.spec.CompilerSpec(s)
- for s in all_compilers_config(arch, scope)]
+ return [spack.spec.CompilerSpec(s['compiler']['spec'])
+ for s in all_compilers_config(scope, init_config)]
-def default_compiler():
- versions = []
- for name in _default_order:
- versions = find(name)
- if versions:
- break
- else:
- raise NoCompilersError()
-
- return sorted(versions)[-1]
-
-
-def find_compilers(*path):
+def find_compilers(*paths):
"""Return a list of compilers found in the suppied paths.
- This invokes the find() method for each Compiler class,
- and appends the compilers detected to a list.
+ This invokes the find_compilers() method for each operating
+ system associated with the host platform, and appends
+ the compilers detected to a list.
"""
- # Make sure path elements exist, and include /bin directories
- # under prefixes.
- filtered_path = []
- for p in path:
- # Eliminate symlinks and just take the real directories.
- p = os.path.realpath(p)
- if not os.path.isdir(p):
- continue
- filtered_path.append(p)
-
- # Check for a bin directory, add it if it exists
- bin = join_path(p, 'bin')
- if os.path.isdir(bin):
- filtered_path.append(os.path.realpath(bin))
-
- # Once the paths are cleaned up, do a search for each type of
- # compiler. We can spawn a bunch of parallel searches to reduce
- # the overhead of spelunking all these directories.
- types = all_compiler_types()
- compiler_lists = parmap(lambda cls: cls.find(*filtered_path), types)
-
- # ensure all the version calls we made are cached in the parent
- # process, as well. This speeds up Spack a lot.
- clist = reduce(lambda x,y: x+y, compiler_lists)
- return clist
+ # Find compilers for each operating system class
+ oss = all_os_classes()
+ compiler_lists = []
+ for o in oss:
+ compiler_lists.extend(o.find_compilers(*paths))
+ return compiler_lists
def supported_compilers():
@@ -227,47 +196,90 @@ def supported(compiler_spec):
@_auto_compiler_spec
-def find(compiler_spec, arch=None, scope=None):
+def find(compiler_spec, scope=None):
"""Return specs of available compilers that match the supplied
- compiler spec. Return an list if nothing found."""
- return [c for c in all_compilers(arch, scope) if c.satisfies(compiler_spec)]
+ compiler spec. Return an empty list if nothing found."""
+ return [c for c in all_compilers(scope) if c.satisfies(compiler_spec)]
@_auto_compiler_spec
-def compilers_for_spec(compiler_spec, arch=None, scope=None):
+def compilers_for_spec(compiler_spec, arch_spec=None, scope=None):
"""This gets all compilers that satisfy the supplied CompilerSpec.
Returns an empty list if none are found.
"""
- config = all_compilers_config(arch, scope)
-
- def get_compiler(cspec):
- items = config[str(cspec)]
-
- if not all(n in items for n in _required_instance_vars):
- raise InvalidCompilerConfigurationError(cspec)
-
- cls = class_for_compiler_name(cspec.name)
- compiler_paths = []
- for c in _required_instance_vars:
- compiler_path = items[c]
- if compiler_path != "None":
- compiler_paths.append(compiler_path)
- else:
- compiler_paths.append(None)
-
- return cls(cspec, *compiler_paths)
-
- matches = find(compiler_spec, arch, scope)
- return [get_compiler(cspec) for cspec in matches]
+ config = all_compilers_config(scope)
+
+ def get_compilers(cspec):
+ compilers = []
+
+ for items in config:
+ items = items['compiler']
+ if items['spec'] != str(cspec):
+ continue
+
+ # If an arch spec is given, confirm that this compiler
+ # is for the given operating system
+ os = items.get('operating_system', None)
+ if arch_spec and os != arch_spec.platform_os:
+ continue
+
+ # If an arch spec is given, confirm that this compiler
+ # is for the given target. If the target is 'any', match
+ # any given arch spec. If the compiler has no assigned
+ # target this is an old compiler config file, skip this logic.
+ target = items.get('target', None)
+ if arch_spec and target and (target != arch_spec.target and
+ target != 'any'):
+ continue
+
+ if not ('paths' in items and
+ all(n in items['paths'] for n in _path_instance_vars)):
+ raise InvalidCompilerConfigurationError(cspec)
+
+ cls = class_for_compiler_name(cspec.name)
+
+ compiler_paths = []
+ for c in _path_instance_vars:
+ compiler_path = items['paths'][c]
+ if compiler_path != 'None':
+ compiler_paths.append(compiler_path)
+ else:
+ compiler_paths.append(None)
+
+ mods = items.get('modules')
+ if mods == 'None':
+ mods = []
+
+ alias = items.get('alias', None)
+ compiler_flags = items.get('flags', {})
+ environment = items.get('environment', {})
+ extra_rpaths = items.get('extra_rpaths', [])
+
+ compilers.append(
+ cls(cspec, os, target, compiler_paths, mods, alias,
+ environment, extra_rpaths, **compiler_flags))
+
+ return compilers
+
+ matches = set(find(compiler_spec, scope))
+ compilers = []
+ for cspec in matches:
+ compilers.extend(get_compilers(cspec))
+ return compilers
@_auto_compiler_spec
-def compiler_for_spec(compiler_spec):
+def compiler_for_spec(compiler_spec, arch_spec):
"""Get the compiler that satisfies compiler_spec. compiler_spec must
be concrete."""
assert(compiler_spec.concrete)
- compilers = compilers_for_spec(compiler_spec)
- assert(len(compilers) == 1)
+ assert(arch_spec.concrete)
+
+ compilers = compilers_for_spec(compiler_spec, arch_spec=arch_spec)
+ if len(compilers) < 1:
+ raise NoCompilerForSpecError(compiler_spec, arch_spec.platform_os)
+ if len(compilers) > 1:
+ raise CompilerSpecInsufficientlySpecificError(compiler_spec)
return compilers[0]
@@ -285,18 +297,47 @@ def class_for_compiler_name(compiler_name):
return cls
+def all_os_classes():
+ """
+ Return the list of classes for all operating systems available on
+ this platform
+ """
+ classes = []
+
+ platform = spack.architecture.platform()
+ for os_class in platform.operating_sys.values():
+ classes.append(os_class)
+
+ return classes
+
+
def all_compiler_types():
return [class_for_compiler_name(c) for c in supported_compilers()]
class InvalidCompilerConfigurationError(spack.error.SpackError):
+
def __init__(self, compiler_spec):
super(InvalidCompilerConfigurationError, self).__init__(
"Invalid configuration for [compiler \"%s\"]: " % compiler_spec,
"Compiler configuration must contain entries for all compilers: %s"
- % _required_instance_vars)
+ % _path_instance_vars)
class NoCompilersError(spack.error.SpackError):
def __init__(self):
- super(NoCompilersError, self).__init__("Spack could not find any compilers!")
+ super(NoCompilersError, self).__init__(
+ "Spack could not find any compilers!")
+
+
+class NoCompilerForSpecError(spack.error.SpackError):
+ def __init__(self, compiler_spec, target):
+ super(NoCompilerForSpecError, self).__init__(
+ "No compilers for operating system %s satisfy spec %s"
+ % (target, compiler_spec))
+
+
+class CompilerSpecInsufficientlySpecificError(spack.error.SpackError):
+ def __init__(self, compiler_spec):
+ super(CompilerSpecInsufficientlySpecificError, self).__init__(
+ "Multiple compilers satisfy spec %s" % compiler_spec)
diff --git a/lib/spack/spack/test/tally_plugin.py b/lib/spack/spack/compilers/cce.py
index 96af1c9b21..43d000dd69 100644
--- a/lib/spack/spack/test/tally_plugin.py
+++ b/lib/spack/spack/compilers/cce.py
@@ -22,38 +22,34 @@
# License along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
-import os
+from spack.compiler import *
-from nose.plugins import Plugin
-class Tally(Plugin):
- name = 'tally'
+class Cce(Compiler):
+ """Cray compiler environment compiler."""
+ # Subclasses use possible names of C compiler
+ cc_names = ['cc']
- def __init__(self):
- super(Tally, self).__init__()
- self.successCount = 0
- self.failCount = 0
- self.errorCount = 0
+ # Subclasses use possible names of C++ compiler
+ cxx_names = ['CC']
- @property
- def numberOfTestsRun(self):
- """Excludes skipped tests"""
- return self.errorCount + self.failCount + self.successCount
+ # Subclasses use possible names of Fortran 77 compiler
+ f77_names = ['ftn']
- def options(self, parser, env=os.environ):
- super(Tally, self).options(parser, env=env)
+ # Subclasses use possible names of Fortran 90 compiler
+ fc_names = ['ftn']
- def configure(self, options, conf):
- super(Tally, self).configure(options, conf)
+ # MacPorts builds gcc versions with prefixes and -mp-X.Y suffixes.
+ suffixes = [r'-mp-\d\.\d']
- def addSuccess(self, test):
- self.successCount += 1
+ PrgEnv = 'PrgEnv-cray'
+ PrgEnv_compiler = 'cce'
- def addError(self, test, err):
- self.errorCount += 1
+ link_paths = {'cc': 'cc',
+ 'cxx': 'c++',
+ 'f77': 'f77',
+ 'fc': 'fc'}
- def addFailure(self, test, err):
- self.failCount += 1
-
- def finalize(self, result):
- pass
+ @classmethod
+ def default_version(cls, comp):
+ return get_compiler_version(comp, '-V', r'[Vv]ersion.*(\d+(\.\d+)+)')
diff --git a/lib/spack/spack/compilers/clang.py b/lib/spack/spack/compilers/clang.py
index 072bcd065f..3d68a37c44 100644
--- a/lib/spack/spack/compilers/clang.py
+++ b/lib/spack/spack/compilers/clang.py
@@ -23,11 +23,16 @@
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
import re
+import os
+import sys
+import spack
import spack.compiler as cpr
from spack.compiler import *
from spack.util.executable import *
import llnl.util.tty as tty
from spack.version import ver
+from shutil import copytree, ignore_patterns
+
class Clang(Compiler):
# Subclasses use possible names of C compiler
@@ -37,17 +42,18 @@ class Clang(Compiler):
cxx_names = ['clang++']
# Subclasses use possible names of Fortran 77 compiler
- f77_names = []
+ f77_names = ['gfortran']
# Subclasses use possible names of Fortran 90 compiler
- fc_names = []
+ fc_names = ['gfortran']
# Named wrapper links within spack.build_env_path
- link_paths = { 'cc' : 'clang/clang',
- 'cxx' : 'clang/clang++',
- # Use default wrappers for fortran, in case provided in compilers.yaml
- 'f77' : 'f77',
- 'fc' : 'f90' }
+ link_paths = {'cc': 'clang/clang',
+ 'cxx': 'clang/clang++',
+ # Use default wrappers for fortran, in case provided in
+ # compilers.yaml
+ 'f77': 'clang/gfortran',
+ 'fc': 'clang/gfortran'}
@property
def is_apple(self):
@@ -64,29 +70,68 @@ class Clang(Compiler):
@property
def cxx11_flag(self):
if self.is_apple:
- # FIXME: figure out from which version Apple's clang supports c++11
- return "-std=c++11"
+ # Adapted from CMake's AppleClang-CXX rules
+ # Spack's AppleClang detection only valid form Xcode >= 4.6
+ if self.version < ver('4.0.0'):
+ tty.die("Only Apple LLVM 4.0 and above support c++11")
+ else:
+ return "-std=c++11"
else:
if self.version < ver('3.3'):
tty.die("Only Clang 3.3 and above support c++11.")
else:
return "-std=c++11"
+ @property
+ def cxx14_flag(self):
+ if self.is_apple:
+ # Adapted from CMake's rules for AppleClang
+ if self.version < ver('5.1.0'):
+ tty.die("Only Apple LLVM 5.1 and above support c++14.")
+ elif self.version < ver('6.1.0'):
+ return "-std=c++1y"
+ else:
+ return "-std=c++14"
+ else:
+ if self.version < ver('3.4'):
+ tty.die("Only Clang 3.4 and above support c++14.")
+ elif self.version < ver('3.5'):
+ return "-std=c++1y"
+ else:
+ return "-std=c++14"
+
+ @property
+ def cxx17_flag(self):
+ if self.is_apple:
+ # Adapted from CMake's rules for AppleClang
+ if self.version < ver('6.1.0'):
+ tty.die("Only Apple LLVM 6.1 and above support c++17.")
+ else:
+ return "-std=c++1z"
+ else:
+ if self.version < ver('3.5'):
+ tty.die("Only Clang 3.5 and above support c++17.")
+ else:
+ return "-std=c++1z"
+
+ @property
+ def pic_flag(self):
+ return "-fPIC"
+
@classmethod
- def default_version(self, comp):
+ def default_version(cls, comp):
"""The '--version' option works for clang compilers.
- On most platforms, output looks like this::
-
- clang version 3.1 (trunk 149096)
- Target: x86_64-unknown-linux-gnu
- Thread model: posix
+ On most platforms, output looks like this::
- On Mac OS X, it looks like this:
+ clang version 3.1 (trunk 149096)
+ Target: x86_64-unknown-linux-gnu
+ Thread model: posix
- Apple LLVM version 7.0.2 (clang-700.1.81)
- Target: x86_64-apple-darwin15.2.0
- Thread model: posix
+ On Mac OS X, it looks like this::
+ Apple LLVM version 7.0.2 (clang-700.1.81)
+ Target: x86_64-apple-darwin15.2.0
+ Thread model: posix
"""
if comp not in cpr._version_cache:
compiler = Executable(comp)
@@ -99,10 +144,112 @@ class Clang(Compiler):
ver = match.group(1) + '-apple'
else:
# Normal clang compiler versions are left as-is
- match = re.search(r'^clang version ([^ )]+)', output)
+ match = re.search(r'clang version ([^ )]+)', output)
if match:
ver = match.group(1)
cpr._version_cache[comp] = ver
return cpr._version_cache[comp]
+
+ def _find_full_path(self, path):
+ basename = os.path.basename(path)
+
+ if not self.is_apple or basename not in ('clang', 'clang++'):
+ return super(Clang, self)._find_full_path(path)
+
+ xcrun = Executable('xcrun')
+ full_path = xcrun('-f', basename, output=str)
+ return full_path.strip()
+
+ @classmethod
+ def fc_version(cls, fc):
+ version = get_compiler_version(
+ fc, '-dumpversion',
+ # older gfortran versions don't have simple dumpversion output.
+ r'(?:GNU Fortran \(GCC\))?(\d+\.\d+(?:\.\d+)?)')
+ # This is horribly ad hoc, we need to map from gcc/gfortran version
+ # to clang version, but there could be multiple clang
+ # versions that work for a single gcc/gfortran version
+ if sys.platform == 'darwin':
+ clangversionfromgcc = {'6.2.0': '8.0.0-apple'}
+ else:
+ clangversionfromgcc = {}
+ if version in clangversionfromgcc:
+ return clangversionfromgcc[version]
+ else:
+ return 'unknown'
+
+ @classmethod
+ def f77_version(cls, f77):
+ return cls.fc_version(f77)
+
+ def setup_custom_environment(self, pkg, env):
+ """Set the DEVELOPER_DIR environment for the Xcode toolchain.
+
+ On macOS, not all buildsystems support querying CC and CXX for the
+ compilers to use and instead query the Xcode toolchain for what
+ compiler to run. This side-steps the spack wrappers. In order to inject
+ spack into this setup, we need to copy (a subset of) Xcode.app and
+ replace the compiler executables with symlinks to the spack wrapper.
+ Currently, the stage is used to store the Xcode.app copies. We then set
+ the 'DEVELOPER_DIR' environment variables to cause the xcrun and
+ related tools to use this Xcode.app.
+ """
+ super(Clang, self).setup_custom_environment(pkg, env)
+
+ if not self.is_apple or not pkg.use_xcode:
+ # if we do it for all packages, we get into big troubles with MPI:
+ # filter_compilers(self) will use mockup XCode compilers on macOS
+ # with Clang. Those point to Spack's compiler wrappers and
+ # consequently render MPI non-functional outside of Spack.
+ return
+
+ xcode_select = Executable('xcode-select')
+ real_root = xcode_select('--print-path', output=str).strip()
+ real_root = os.path.dirname(os.path.dirname(real_root))
+ developer_root = os.path.join(spack.stage_path,
+ 'xcode-select',
+ self.name,
+ str(self.version))
+ xcode_link = os.path.join(developer_root, 'Xcode.app')
+
+ if not os.path.exists(developer_root):
+ tty.warn('Copying Xcode from %s to %s in order to add spack '
+ 'wrappers to it. Please do not interrupt.'
+ % (real_root, developer_root))
+
+ # We need to make a new Xcode.app instance, but with symlinks to
+ # the spack wrappers for the compilers it ships. This is necessary
+ # because some projects insist on just asking xcrun and related
+ # tools where the compiler runs. These tools are very hard to trick
+ # as they do realpath and end up ignoring the symlinks in a
+ # "softer" tree of nothing but symlinks in the right places.
+ copytree(real_root, developer_root, symlinks=True,
+ ignore=ignore_patterns('AppleTV*.platform',
+ 'Watch*.platform',
+ 'iPhone*.platform',
+ 'Documentation',
+ 'swift*'))
+
+ real_dirs = [
+ 'Toolchains/XcodeDefault.xctoolchain/usr/bin',
+ 'usr/bin',
+ ]
+
+ bins = ['c++', 'c89', 'c99', 'cc', 'clang', 'clang++', 'cpp']
+
+ for real_dir in real_dirs:
+ dev_dir = os.path.join(developer_root,
+ 'Contents',
+ 'Developer',
+ real_dir)
+ for fname in os.listdir(dev_dir):
+ if fname in bins:
+ os.unlink(os.path.join(dev_dir, fname))
+ os.symlink(os.path.join(spack.build_env_path, 'cc'),
+ os.path.join(dev_dir, fname))
+
+ os.symlink(developer_root, xcode_link)
+
+ env.set('DEVELOPER_DIR', xcode_link)
diff --git a/lib/spack/spack/compilers/gcc.py b/lib/spack/spack/compilers/gcc.py
index 164bddeb3f..304f82a492 100644
--- a/lib/spack/spack/compilers/gcc.py
+++ b/lib/spack/spack/compilers/gcc.py
@@ -26,6 +26,7 @@ import llnl.util.tty as tty
from spack.compiler import *
from spack.version import ver
+
class Gcc(Compiler):
# Subclasses use possible names of C compiler
cc_names = ['gcc']
@@ -40,14 +41,18 @@ class Gcc(Compiler):
fc_names = ['gfortran']
# MacPorts builds gcc versions with prefixes and -mp-X.Y suffixes.
- # Homebrew and Linuxes may build gcc with -X, -X.Y suffixes
- suffixes = [r'-mp-\d\.\d', r'-\d\.\d', r'-\d']
+ # Homebrew and Linuxbrew may build gcc with -X, -X.Y suffixes.
+ # Old compatibility versions may contain XY suffixes.
+ suffixes = [r'-mp-\d\.\d', r'-\d\.\d', r'-\d', r'\d\d']
# Named wrapper links within spack.build_env_path
- link_paths = {'cc' : 'gcc/gcc',
- 'cxx' : 'gcc/g++',
- 'f77' : 'gcc/gfortran',
- 'fc' : 'gcc/gfortran' }
+ link_paths = {'cc': 'gcc/gcc',
+ 'cxx': 'gcc/g++',
+ 'f77': 'gcc/gfortran',
+ 'fc': 'gcc/gfortran'}
+
+ PrgEnv = 'PrgEnv-gnu'
+ PrgEnv_compiler = 'gcc'
@property
def openmp_flag(self):
@@ -66,9 +71,22 @@ class Gcc(Compiler):
def cxx14_flag(self):
if self.version < ver('4.8'):
tty.die("Only gcc 4.8 and above support c++14.")
+ elif self.version < ver('4.9'):
+ return "-std=c++1y"
else:
return "-std=c++14"
+ @property
+ def cxx17_flag(self):
+ if self.version < ver('5.0'):
+ tty.die("Only gcc 5.0 and above support c++17.")
+ else:
+ return "-std=c++1z"
+
+ @property
+ def pic_flag(self):
+ return "-fPIC"
+
@classmethod
def fc_version(cls, fc):
return get_compiler_version(
@@ -76,7 +94,10 @@ class Gcc(Compiler):
# older gfortran versions don't have simple dumpversion output.
r'(?:GNU Fortran \(GCC\))?(\d+\.\d+(?:\.\d+)?)')
-
@classmethod
def f77_version(cls, f77):
return cls.fc_version(f77)
+
+ @property
+ def stdcxx_libs(self):
+ return ('-lstdc++', )
diff --git a/lib/spack/spack/compilers/intel.py b/lib/spack/spack/compilers/intel.py
index 5007ece645..8461753962 100644
--- a/lib/spack/spack/compilers/intel.py
+++ b/lib/spack/spack/compilers/intel.py
@@ -26,6 +26,7 @@ from spack.compiler import *
import llnl.util.tty as tty
from spack.version import ver
+
class Intel(Compiler):
# Subclasses use possible names of C compiler
cc_names = ['icc']
@@ -40,10 +41,13 @@ class Intel(Compiler):
fc_names = ['ifort']
# Named wrapper links within spack.build_env_path
- link_paths = { 'cc' : 'intel/icc',
- 'cxx' : 'intel/icpc',
- 'f77' : 'intel/ifort',
- 'fc' : 'intel/ifort' }
+ link_paths = {'cc': 'intel/icc',
+ 'cxx': 'intel/icpc',
+ 'f77': 'intel/ifort',
+ 'fc': 'intel/ifort'}
+
+ PrgEnv = 'PrgEnv-intel'
+ PrgEnv_compiler = 'intel'
@property
def openmp_flag(self):
@@ -61,19 +65,36 @@ class Intel(Compiler):
else:
return "-std=c++11"
+ @property
+ def cxx14_flag(self):
+ # Adapted from CMake's Intel-CXX rules.
+ if self.version < ver('15'):
+ tty.die("Only intel 15.0 and above support c++14.")
+ elif self.version < ver('15.0.2'):
+ return "-std=c++1y"
+ else:
+ return "-std=c++14"
+
+ @property
+ def pic_flag(self):
+ return "-fPIC"
@classmethod
def default_version(cls, comp):
"""The '--version' option seems to be the most consistent one
- for intel compilers. Output looks like this::
+ for intel compilers. Output looks like this::
- icpc (ICC) 12.1.5 20120612
- Copyright (C) 1985-2012 Intel Corporation. All rights reserved.
+ icpc (ICC) 12.1.5 20120612
+ Copyright (C) 1985-2012 Intel Corporation. All rights reserved.
- or::
+ or::
- ifort (IFORT) 12.1.5 20120612
- Copyright (C) 1985-2012 Intel Corporation. All rights reserved.
+ ifort (IFORT) 12.1.5 20120612
+ Copyright (C) 1985-2012 Intel Corporation. All rights reserved.
"""
return get_compiler_version(
comp, '--version', r'\((?:IFORT|ICC)\) ([^ ]+)')
+
+ @property
+ def stdcxx_libs(self):
+ return ('-cxxlib', )
diff --git a/lib/spack/spack/compilers/nag.py b/lib/spack/spack/compilers/nag.py
index cee11bc97a..c1da95a6c3 100644
--- a/lib/spack/spack/compilers/nag.py
+++ b/lib/spack/spack/compilers/nag.py
@@ -23,7 +23,7 @@
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
from spack.compiler import *
-import llnl.util.tty as tty
+
class Nag(Compiler):
# Subclasses use possible names of C compiler
@@ -39,11 +39,12 @@ class Nag(Compiler):
fc_names = ['nagfor']
# Named wrapper links within spack.build_env_path
- link_paths = { # Use default wrappers for C and C++, in case provided in compilers.yaml
- 'cc' : 'cc',
- 'cxx' : 'c++',
- 'f77' : 'nag/nagfor',
- 'fc' : 'nag/nagfor' }
+ # Use default wrappers for C and C++, in case provided in compilers.yaml
+ link_paths = {
+ 'cc': 'cc',
+ 'cxx': 'c++',
+ 'f77': 'nag/nagfor',
+ 'fc': 'nag/nagfor'}
@property
def openmp_flag(self):
@@ -55,25 +56,28 @@ class Nag(Compiler):
# However, it can be mixed with a compiler that does support it
return "-std=c++11"
+ @property
+ def pic_flag(self):
+ return "-PIC"
+
# Unlike other compilers, the NAG compiler passes options to GCC, which
# then passes them to the linker. Therefore, we need to doubly wrap the
# options with '-Wl,-Wl,,'
@property
def f77_rpath_arg(self):
- return '-Wl,-Wl,,-rpath,'
+ return '-Wl,-Wl,,-rpath,,'
@property
def fc_rpath_arg(self):
- return '-Wl,-Wl,,-rpath,'
+ return '-Wl,-Wl,,-rpath,,'
@classmethod
def default_version(self, comp):
"""The '-V' option works for nag compilers.
- Output looks like this::
+ Output looks like this::
- NAG Fortran Compiler Release 6.0(Hibiya) Build 1037
- Product NPL6A60NA for x86-64 Linux
- Copyright 1990-2015 The Numerical Algorithms Group Ltd., Oxford, U.K.
+ NAG Fortran Compiler Release 6.0(Hibiya) Build 1037
+ Product NPL6A60NA for x86-64 Linux
"""
return get_compiler_version(
comp, '-V', r'NAG Fortran Compiler Release ([0-9.]+)')
diff --git a/lib/spack/spack/compilers/pgi.py b/lib/spack/spack/compilers/pgi.py
index d42148dc49..146c153041 100644
--- a/lib/spack/spack/compilers/pgi.py
+++ b/lib/spack/spack/compilers/pgi.py
@@ -23,7 +23,7 @@
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
from spack.compiler import *
-import llnl.util.tty as tty
+
class Pgi(Compiler):
# Subclasses use possible names of C compiler
@@ -39,10 +39,13 @@ class Pgi(Compiler):
fc_names = ['pgfortran', 'pgf95', 'pgf90']
# Named wrapper links within spack.build_env_path
- link_paths = { 'cc' : 'pgi/pgcc',
- 'cxx' : 'pgi/pgc++',
- 'f77' : 'pgi/pgfortran',
- 'fc' : 'pgi/pgfortran' }
+ link_paths = {'cc': 'pgi/pgcc',
+ 'cxx': 'pgi/pgc++',
+ 'f77': 'pgi/pgfortran',
+ 'fc': 'pgi/pgfortran'}
+
+ PrgEnv = 'PrgEnv-pgi'
+ PrgEnv_compiler = 'pgi'
@property
def openmp_flag(self):
@@ -52,15 +55,18 @@ class Pgi(Compiler):
def cxx11_flag(self):
return "-std=c++11"
+ @property
+ def pic_flag(self):
+ return "-fpic"
@classmethod
def default_version(cls, comp):
"""The '-V' option works for all the PGI compilers.
- Output looks like this::
+ Output looks like this::
- pgcc 15.10-0 64-bit target on x86-64 Linux -tp sandybridge
- The Portland Group - PGI Compilers and Tools
- Copyright (c) 2015, NVIDIA CORPORATION. All rights reserved.
+ pgcc 15.10-0 64-bit target on x86-64 Linux -tp sandybridge
+ The Portland Group - PGI Compilers and Tools
+ Copyright (c) 2015, NVIDIA CORPORATION. All rights reserved.
"""
return get_compiler_version(
comp, '-V', r'pg[^ ]* ([^ ]+) \d\d\d?-bit target')
diff --git a/lib/spack/spack/compilers/xl.py b/lib/spack/spack/compilers/xl.py
index bda2de4b87..f4b7c4237d 100644
--- a/lib/spack/spack/compilers/xl.py
+++ b/lib/spack/spack/compilers/xl.py
@@ -26,24 +26,26 @@ from spack.compiler import *
import llnl.util.tty as tty
from spack.version import ver
+
class Xl(Compiler):
# Subclasses use possible names of C compiler
- cc_names = ['xlc','xlc_r']
+ cc_names = ['xlc', 'xlc_r']
# Subclasses use possible names of C++ compiler
- cxx_names = ['xlC','xlC_r','xlc++','xlc++_r']
+ cxx_names = ['xlC', 'xlC_r', 'xlc++', 'xlc++_r']
# Subclasses use possible names of Fortran 77 compiler
- f77_names = ['xlf','xlf_r']
+ f77_names = ['xlf', 'xlf_r']
# Subclasses use possible names of Fortran 90 compiler
- fc_names = ['xlf90','xlf90_r','xlf95','xlf95_r','xlf2003','xlf2003_r','xlf2008','xlf2008_r']
+ fc_names = ['xlf90', 'xlf90_r', 'xlf95', 'xlf95_r',
+ 'xlf2003', 'xlf2003_r', 'xlf2008', 'xlf2008_r']
# Named wrapper links within spack.build_env_path
- link_paths = { 'cc' : 'xl/xlc',
- 'cxx' : 'xl/xlc++',
- 'f77' : 'xl/xlf',
- 'fc' : 'xl/xlf90' }
+ link_paths = {'cc': 'xl/xlc',
+ 'cxx': 'xl/xlc++',
+ 'f77': 'xl/xlf',
+ 'fc': 'xl/xlf90'}
@property
def openmp_flag(self):
@@ -56,8 +58,12 @@ class Xl(Compiler):
else:
return "-qlanglvl=extended0x"
+ @property
+ def pic_flag(self):
+ return "-qpic"
+
@classmethod
- def default_version(self, comp):
+ def default_version(cls, comp):
"""The '-qversion' is the standard option fo XL compilers.
Output looks like this::
@@ -81,28 +87,28 @@ class Xl(Compiler):
"""
return get_compiler_version(
- comp, '-qversion',r'([0-9]?[0-9]\.[0-9])')
+ comp, '-qversion', r'([0-9]?[0-9]\.[0-9])')
@classmethod
def fc_version(cls, fc):
- """The fortran and C/C++ versions of the XL compiler are always two units apart.
- By this we mean that the fortran release that goes with XL C/C++ 11.1 is 13.1.
- Having such a difference in version number is confusing spack quite a lot.
- Most notably if you keep the versions as is the default xl compiler will only
- have fortran and no C/C++.
- So we associate the Fortran compiler with the version associated to the C/C++
- compiler.
- One last stumble. Version numbers over 10 have at least a .1 those under 10
- a .0. There is no xlf 9.x or under currently available. BG/P and BG/L can
- such a compiler mix and possibly older version of AIX and linux on power.
+ """The fortran and C/C++ versions of the XL compiler are always
+ two units apart. By this we mean that the fortran release that
+ goes with XL C/C++ 11.1 is 13.1. Having such a difference in
+ version number is confusing spack quite a lot. Most notably
+ if you keep the versions as is the default xl compiler will
+ only have fortran and no C/C++. So we associate the Fortran
+ compiler with the version associated to the C/C++ compiler.
+ One last stumble. Version numbers over 10 have at least a .1
+ those under 10 a .0. There is no xlf 9.x or under currently
+ available. BG/P and BG/L can such a compiler mix and possibly
+ older version of AIX and linux on power.
"""
- fver = get_compiler_version(fc, '-qversion',r'([0-9]?[0-9]\.[0-9])')
+ fver = get_compiler_version(fc, '-qversion', r'([0-9]?[0-9]\.[0-9])')
cver = float(fver) - 2
- if cver < 10 :
- cver = cver - 0.1
+ if cver < 10:
+ cver = cver - 0.1
return str(cver)
-
@classmethod
def f77_version(cls, f77):
return cls.fc_version(f77)
diff --git a/lib/spack/spack/concretize.py b/lib/spack/spack/concretize.py
index f5e1c10b48..36e8b30196 100644
--- a/lib/spack/spack/concretize.py
+++ b/lib/spack/spack/concretize.py
@@ -33,6 +33,7 @@ or user preferences.
TODO: make this customizable and allow users to configure
concretization policies.
"""
+from __future__ import print_function
import spack
import spack.spec
import spack.compilers
@@ -40,11 +41,12 @@ import spack.architecture
import spack.error
from spack.version import *
from functools import partial
-from spec import DependencyMap
from itertools import chain
-from spack.config import *
+from spack.package_prefs import *
+
class DefaultConcretizer(object):
+
"""This class doesn't have any state, it just provides some methods for
concretization. You can subclass it to override just some of the
default concretization strategies, or you can override all of them.
@@ -60,14 +62,19 @@ class DefaultConcretizer(object):
if not providers:
raise UnsatisfiableProviderSpecError(providers[0], spec)
spec_w_preferred_providers = find_spec(
- spec, lambda(x): spack.pkgsort.spec_has_preferred_provider(x.name, spec.name))
+ spec,
+ lambda x: pkgsort().spec_has_preferred_provider(
+ x.name, spec.name))
if not spec_w_preferred_providers:
spec_w_preferred_providers = spec
- provider_cmp = partial(spack.pkgsort.provider_compare, spec_w_preferred_providers.name, spec.name)
+ provider_cmp = partial(pkgsort().provider_compare,
+ spec_w_preferred_providers.name,
+ spec.name)
candidates = sorted(providers, cmp=provider_cmp)
- # For each candidate package, if it has externals, add those to the usable list.
- # if it's not buildable, then *only* add the externals.
+ # For each candidate package, if it has externals, add those
+ # to the usable list. if it's not buildable, then *only* add
+ # the externals.
usable = []
for cspec in candidates:
if is_spec_buildable(cspec):
@@ -83,10 +90,15 @@ class DefaultConcretizer(object):
raise NoBuildError(spec)
def cmp_externals(a, b):
- if a.name != b.name:
+ if a.name != b.name and (not a.external or a.external_module and
+ not b.external and b.external_module):
# We're choosing between different providers, so
# maintain order from provider sort
- return candidates.index(a) - candidates.index(b)
+ index_of_a = next(i for i in range(0, len(candidates))
+ if a.satisfies(candidates[i]))
+ index_of_b = next(i for i in range(0, len(candidates))
+ if b.satisfies(candidates[i]))
+ return index_of_a - index_of_b
result = cmp_specs(a, b)
if result != 0:
@@ -101,7 +113,7 @@ class DefaultConcretizer(object):
usable.sort(cmp=cmp_externals)
return usable
-
+ # XXX(deptypes): Look here.
def choose_virtual_or_external(self, spec):
"""Given a list of candidate virtual and external packages, try to
find one that is most ABI compatible.
@@ -112,26 +124,26 @@ class DefaultConcretizer(object):
# Find the nearest spec in the dag that has a compiler. We'll
# use that spec to calibrate compiler compatibility.
- abi_exemplar = find_spec(spec, lambda(x): x.compiler)
+ abi_exemplar = find_spec(spec, lambda x: x.compiler)
if not abi_exemplar:
abi_exemplar = spec.root
# Make a list including ABI compatibility of specs with the exemplar.
strict = [spack.abi.compatible(c, abi_exemplar) for c in candidates]
- loose = [spack.abi.compatible(c, abi_exemplar, loose=True) for c in candidates]
+ loose = [spack.abi.compatible(c, abi_exemplar, loose=True)
+ for c in candidates]
keys = zip(strict, loose, candidates)
# Sort candidates from most to least compatibility.
# Note:
# 1. We reverse because True > False.
# 2. Sort is stable, so c's keep their order.
- keys.sort(key=lambda k:k[:2], reverse=True)
+ keys.sort(key=lambda k: k[:2], reverse=True)
# Pull the candidates back out and return them in order
- candidates = [c for s,l,c in keys]
+ candidates = [c for s, l, c in keys]
return candidates
-
def concretize_version(self, spec):
"""If the spec is already concrete, return. Otherwise take
the preferred version from spackconfig, and default to the package's
@@ -153,15 +165,56 @@ class DefaultConcretizer(object):
# If there are known available versions, return the most recent
# version that satisfies the spec
pkg = spec.package
- cmp_versions = partial(spack.pkgsort.version_compare, spec.name)
- valid_versions = sorted(
- [v for v in pkg.versions
- if any(v.satisfies(sv) for sv in spec.versions)],
- cmp=cmp_versions)
- def prefer_key(v):
- return pkg.versions.get(Version(v)).get('preferred', False)
- valid_versions.sort(key=prefer_key, reverse=True)
+ # ---------- Produce prioritized list of versions
+ # Get list of preferences from packages.yaml
+ preferred = pkgsort()
+ # NOTE: pkgsort() == spack.package_prefs.PreferredPackages()
+
+ yaml_specs = [
+ x[0] for x in
+ preferred._spec_for_pkgname(spec.name, 'version', None)]
+ n = len(yaml_specs)
+ yaml_index = dict(
+ [(spc, n - index) for index, spc in enumerate(yaml_specs)])
+
+ # List of versions we could consider, in sorted order
+ unsorted_versions = [
+ v for v in pkg.versions
+ if any(v.satisfies(sv) for sv in spec.versions)]
+
+ # The keys below show the order of precedence of factors used
+ # to select a version when concretizing. The item with
+ # the "largest" key will be selected.
+ #
+ # NOTE: When COMPARING VERSIONS, the '@develop' version is always
+ # larger than other versions. BUT when CONCRETIZING,
+ # the largest NON-develop version is selected by
+ # default.
+ keys = [(
+ # ------- Special direction from the user
+ # Respect order listed in packages.yaml
+ yaml_index.get(v, -1),
+
+ # The preferred=True flag (packages or packages.yaml or both?)
+ pkg.versions.get(Version(v)).get('preferred', False),
+
+ # ------- Regular case: use latest non-develop version by default.
+ # Avoid @develop version, which would otherwise be the "largest"
+ # in straight version comparisons
+ not v.isdevelop(),
+
+ # Compare the version itself
+ # This includes the logic:
+ # a) develop > everything (disabled by "not v.isdevelop() above)
+ # b) numeric > non-numeric
+ # c) Numeric or string comparison
+ v) for v in unsorted_versions]
+ keys.sort(reverse=True)
+
+ # List of versions in complete sorted order
+ valid_versions = [x[-1] for x in keys]
+ # --------------------------
if valid_versions:
spec.versions = ver([valid_versions[0]])
@@ -186,45 +239,55 @@ class DefaultConcretizer(object):
return True # Things changed
-
def concretize_architecture(self, spec):
- """If the spec already had an architecture, return. Otherwise if
- the root of the DAG has an architecture, then use that.
- Otherwise take the system's default architecture.
-
- Intuition: Architectures won't be set a lot, and generally you
- want the host system's architecture. When architectures are
- mised in a spec, it is likely because the tool requries a
- cross-compiled component, e.g. for tools that run on BlueGene
- or Cray machines. These constraints will likely come directly
- from packages, so require the user to be explicit if they want
- to mess with the architecture, and revert to the default when
- they're not explicit.
+ """If the spec is empty provide the defaults of the platform. If the
+ architecture is not a basestring, then check if either the platform,
+ target or operating system are concretized. If any of the fields are
+ changed then return True. If everything is concretized (i.e the
+ architecture attribute is a namedtuple of classes) then return False.
+ If the target is a string type, then convert the string into a
+ concretized architecture. If it has no architecture and the root of the
+ DAG has an architecture, then use the root otherwise use the defaults
+ on the platform.
"""
- if spec.architecture is not None:
- return False
+ root_arch = spec.root.architecture
+ sys_arch = spack.spec.ArchSpec(spack.architecture.sys_type())
+ spec_changed = False
- if spec.root.architecture:
- spec.architecture = spec.root.architecture
- else:
- spec.architecture = spack.architecture.sys_type()
+ if spec.architecture is None:
+ spec.architecture = spack.spec.ArchSpec(sys_arch)
+ spec_changed = True
+
+ default_archs = [root_arch, sys_arch]
+ while not spec.architecture.concrete and default_archs:
+ arch = default_archs.pop(0)
- assert(spec.architecture is not None)
- return True # changed
+ replacement_fields = [k for k, v in arch.to_cmp_dict().iteritems()
+ if v and not getattr(spec.architecture, k)]
+ for field in replacement_fields:
+ setattr(spec.architecture, field, getattr(arch, field))
+ spec_changed = True
+ return spec_changed
def concretize_variants(self, spec):
"""If the spec already has variants filled in, return. Otherwise, add
- the default variants from the package specification.
+ the user preferences from packages.yaml or the default variants from
+ the package specification.
"""
changed = False
+ preferred_variants = pkgsort().spec_preferred_variants(
+ spec.package_class.name)
for name, variant in spec.package_class.variants.items():
if name not in spec.variants:
- spec.variants[name] = spack.spec.VariantSpec(name, variant.default)
changed = True
+ if name in preferred_variants:
+ spec.variants[name] = preferred_variants.get(name)
+ else:
+ spec.variants[name] = \
+ spack.spec.VariantSpec(name, variant.default)
return changed
-
def concretize_compiler(self, spec):
"""If the spec already has a compiler, we're done. If not, then take
the compiler used for the nearest ancestor with a compiler
@@ -237,15 +300,32 @@ class DefaultConcretizer(object):
build with the compiler that will be used by libraries that
link to this one, to maximize compatibility.
"""
+ # Pass on concretizing the compiler if the target or operating system
+ # is not yet determined
+ if not (spec.architecture.platform_os and spec.architecture.target):
+ # We haven't changed, but other changes need to happen before we
+ # continue. `return True` here to force concretization to keep
+ # running.
+ return True
+
+ # Only use a matching compiler if it is of the proper style
+ # Takes advantage of the proper logic already existing in
+ # compiler_for_spec Should think whether this can be more
+ # efficient
+ def _proper_compiler_style(cspec, aspec):
+ return spack.compilers.compilers_for_spec(cspec, arch_spec=aspec)
+
all_compilers = spack.compilers.all_compilers()
if (spec.compiler and
spec.compiler.concrete and
- spec.compiler in all_compilers):
+ spec.compiler in all_compilers):
return False
- #Find the another spec that has a compiler, or the root if none do
- other_spec = spec if spec.compiler else find_spec(spec, lambda(x) : x.compiler)
+ # Find the another spec that has a compiler, or the root if none do
+ other_spec = spec if spec.compiler else find_spec(
+ spec, lambda x: x.compiler)
+
if not other_spec:
other_spec = spec.root
other_compiler = other_spec.compiler
@@ -256,25 +336,114 @@ class DefaultConcretizer(object):
spec.compiler = other_compiler.copy()
return True
- # Filter the compilers into a sorted list based on the compiler_order from spackconfig
- compiler_list = all_compilers if not other_compiler else spack.compilers.find(other_compiler)
- cmp_compilers = partial(spack.pkgsort.compiler_compare, other_spec.name)
+ # Filter the compilers into a sorted list based on the compiler_order
+ # from spackconfig
+ compiler_list = all_compilers if not other_compiler else \
+ spack.compilers.find(other_compiler)
+ cmp_compilers = partial(
+ pkgsort().compiler_compare, other_spec.name)
matches = sorted(compiler_list, cmp=cmp_compilers)
if not matches:
- raise UnavailableCompilerVersionError(other_compiler)
+ arch = spec.architecture
+ raise UnavailableCompilerVersionError(other_compiler,
+ arch.platform_os,
+ arch.target)
# copy concrete version into other_compiler
- spec.compiler = matches[0].copy()
+ try:
+ spec.compiler = next(
+ c for c in matches
+ if _proper_compiler_style(c, spec.architecture)).copy()
+ except StopIteration:
+ raise UnavailableCompilerVersionError(
+ spec.compiler, spec.architecture.platform_os,
+ spec.architecture.target
+ )
+
assert(spec.compiler.concrete)
return True # things changed.
+ def concretize_compiler_flags(self, spec):
+ """
+ The compiler flags are updated to match those of the spec whose
+ compiler is used, defaulting to no compiler flags in the spec.
+ Default specs set at the compiler level will still be added later.
+ """
+ # Pass on concretizing the compiler flags if the target or operating
+ # system is not set.
+ if not (spec.architecture.platform_os and spec.architecture.target):
+ # We haven't changed, but other changes need to happen before we
+ # continue. `return True` here to force concretization to keep
+ # running.
+ return True
+
+ ret = False
+ for flag in spack.spec.FlagMap.valid_compiler_flags():
+ try:
+ nearest = next(p for p in spec.traverse(direction='parents')
+ if ((p.compiler == spec.compiler and
+ p is not spec) and
+ flag in p.compiler_flags))
+ if flag not in spec.compiler_flags or \
+ not (sorted(spec.compiler_flags[flag]) >=
+ sorted(nearest.compiler_flags[flag])):
+ if flag in spec.compiler_flags:
+ spec.compiler_flags[flag] = list(
+ set(spec.compiler_flags[flag]) |
+ set(nearest.compiler_flags[flag]))
+ else:
+ spec.compiler_flags[
+ flag] = nearest.compiler_flags[flag]
+ ret = True
+
+ except StopIteration:
+ if (flag in spec.root.compiler_flags and
+ ((flag not in spec.compiler_flags) or
+ sorted(spec.compiler_flags[flag]) !=
+ sorted(spec.root.compiler_flags[flag]))):
+ if flag in spec.compiler_flags:
+ spec.compiler_flags[flag] = list(
+ set(spec.compiler_flags[flag]) |
+ set(spec.root.compiler_flags[flag]))
+ else:
+ spec.compiler_flags[
+ flag] = spec.root.compiler_flags[flag]
+ ret = True
+ else:
+ if flag not in spec.compiler_flags:
+ spec.compiler_flags[flag] = []
+
+ # Include the compiler flag defaults from the config files
+ # This ensures that spack will detect conflicts that stem from a change
+ # in default compiler flags.
+ compiler = spack.compilers.compiler_for_spec(
+ spec.compiler, spec.architecture)
+ for flag in compiler.flags:
+ if flag not in spec.compiler_flags:
+ spec.compiler_flags[flag] = compiler.flags[flag]
+ if compiler.flags[flag] != []:
+ ret = True
+ else:
+ if ((sorted(spec.compiler_flags[flag]) !=
+ sorted(compiler.flags[flag])) and
+ (not set(spec.compiler_flags[flag]) >=
+ set(compiler.flags[flag]))):
+ ret = True
+ spec.compiler_flags[flag] = list(
+ set(spec.compiler_flags[flag]) |
+ set(compiler.flags[flag]))
+
+ return ret
+
def find_spec(spec, condition):
"""Searches the dag from spec in an intelligent order and looks
for a spec that matches a condition"""
# First search parents, then search children
- dagiter = chain(spec.traverse(direction='parents', root=False),
- spec.traverse(direction='children', root=False))
+ deptype = ('build', 'link')
+ dagiter = chain(
+ spec.traverse(direction='parents', deptype=deptype, root=False),
+ spec.traverse(direction='children', deptype=deptype, root=False))
visited = set()
for relative in dagiter:
if condition(relative):
@@ -282,9 +451,11 @@ def find_spec(spec, condition):
visited.add(id(relative))
# Then search all other relatives in the DAG *except* spec
- for relative in spec.root.traverse():
- if relative is spec: continue
- if id(relative) in visited: continue
+ for relative in spec.root.traverse(deptypes=spack.alldeps):
+ if relative is spec:
+ continue
+ if id(relative) in visited:
+ continue
if condition(relative):
return relative
@@ -295,62 +466,35 @@ def find_spec(spec, condition):
return None # Nothing matched the condition.
-def cmp_specs(lhs, rhs):
- # Package name sort order is not configurable, always goes alphabetical
- if lhs.name != rhs.name:
- return cmp(lhs.name, rhs.name)
-
- # Package version is second in compare order
- pkgname = lhs.name
- if lhs.versions != rhs.versions:
- return spack.pkgsort.version_compare(
- pkgname, lhs.versions, rhs.versions)
-
- # Compiler is third
- if lhs.compiler != rhs.compiler:
- return spack.pkgsort.compiler_compare(
- pkgname, lhs.compiler, rhs.compiler)
-
- # Variants
- if lhs.variants != rhs.variants:
- return spack.pkgsort.variant_compare(
- pkgname, lhs.variants, rhs.variants)
-
- # Architecture
- if lhs.architecture != rhs.architecture:
- return spack.pkgsort.architecture_compare(
- pkgname, lhs.architecture, rhs.architecture)
-
- # Dependency is not configurable
- lhash, rhash = hash(lhs), hash(rhs)
- if lhash != rhash:
- return -1 if lhash < rhash else 1
-
- # Equal specs
- return 0
-
-
-
class UnavailableCompilerVersionError(spack.error.SpackError):
+
"""Raised when there is no available compiler that satisfies a
compiler spec."""
- def __init__(self, compiler_spec):
+
+ def __init__(self, compiler_spec, operating_system, target):
super(UnavailableCompilerVersionError, self).__init__(
- "No available compiler version matches '%s'" % compiler_spec,
+ "No available compiler version matches '%s' on operating_system %s"
+ "for target %s"
+ % (compiler_spec, operating_system, target),
"Run 'spack compilers' to see available compiler Options.")
class NoValidVersionError(spack.error.SpackError):
+
"""Raised when there is no way to have a concrete version for a
particular spec."""
+
def __init__(self, spec):
super(NoValidVersionError, self).__init__(
- "There are no valid versions for %s that match '%s'" % (spec.name, spec.versions))
+ "There are no valid versions for %s that match '%s'"
+ % (spec.name, spec.versions))
class NoBuildError(spack.error.SpackError):
"""Raised when a package is configured with the buildable option False, but
no satisfactory external versions can be found"""
+
def __init__(self, spec):
- super(NoBuildError, self).__init__(
- "The spec '%s' is configured as not buildable, and no matching external installs were found" % spec.name)
+ msg = ("The spec '%s' is configured as not buildable, "
+ "and no matching external installs were found")
+ super(NoBuildError, self).__init__(msg % spec.name)
diff --git a/lib/spack/spack/config.py b/lib/spack/spack/config.py
index ec37bd290c..56c6421457 100644
--- a/lib/spack/spack/config.py
+++ b/lib/spack/spack/config.py
@@ -1,4 +1,3 @@
-# flake8: noqa
##############################################################################
# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
# Produced at the Lawrence Livermore National Laboratory.
@@ -25,113 +24,48 @@
##############################################################################
"""This module implements Spack's configuration file handling.
-Configuration file scopes
-===============================
-
-When Spack runs, it pulls configuration data from several config
-directories, each of which contains configuration files. In Spack,
-there are two configuration scopes:
-
- 1. ``site``: Spack loads site-wide configuration options from
- ``$(prefix)/etc/spack/``.
-
- 2. ``user``: Spack next loads per-user configuration options from
- ~/.spack/.
-
-Spack may read configuration files from both of these locations. When
-configurations conflict, the user config options take precedence over
-the site configurations. Each configuration directory may contain
-several configuration files, such as compilers.yaml or mirrors.yaml.
-
-Configuration file format
-===============================
-
-Configuration files are formatted using YAML syntax. This format is
-implemented by libyaml (included with Spack as an external module),
-and it's easy to read and versatile.
-
-Config files are structured as trees, like this ``compiler`` section::
-
- compilers:
- chaos_5_x86_64_ib:
- gcc@4.4.7:
- cc: /usr/bin/gcc
- cxx: /usr/bin/g++
- f77: /usr/bin/gfortran
- fc: /usr/bin/gfortran
- bgqos_0:
- xlc@12.1:
- cc: /usr/local/bin/mpixlc
- ...
-
-In this example, entries like ''compilers'' and ''xlc@12.1'' are used to
-categorize entries beneath them in the tree. At the root of the tree,
-entries like ''cc'' and ''cxx'' are specified as name/value pairs.
-
-``config.get_config()`` returns these trees as nested dicts, but it
-strips the first level off. So, ``config.get_config('compilers')``
-would return something like this for the above example:
-
- { 'chaos_5_x86_64_ib' :
- { 'gcc@4.4.7' :
- { 'cc' : '/usr/bin/gcc',
- 'cxx' : '/usr/bin/g++'
- 'f77' : '/usr/bin/gfortran'
- 'fc' : '/usr/bin/gfortran' }
- }
- { 'bgqos_0' :
- { 'cc' : '/usr/local/bin/mpixlc' } }
-
-Likewise, the ``mirrors.yaml`` file's first line must be ``mirrors:``,
-but ``get_config()`` strips that off too.
-
-Precedence
-===============================
-
-``config.py`` routines attempt to recursively merge configuration
-across scopes. So if there are ``compilers.py`` files in both the
-site scope and the user scope, ``get_config('compilers')`` will return
-merged dictionaries of *all* the compilers available. If a user
-compiler conflicts with a site compiler, Spack will overwrite the site
-configuration wtih the user configuration. If both the user and site
-``mirrors.yaml`` files contain lists of mirrors, then ``get_config()``
-will return a concatenated list of mirrors, with the user config items
-first.
-
-Sometimes, it is useful to *completely* override a site setting with a
-user one. To accomplish this, you can use *two* colons at the end of
-a key in a configuration file. For example, this:
-
- compilers::
- chaos_5_x86_64_ib:
- gcc@4.4.7:
- cc: /usr/bin/gcc
- cxx: /usr/bin/g++
- f77: /usr/bin/gfortran
- fc: /usr/bin/gfortran
- bgqos_0:
- xlc@12.1:
- cc: /usr/local/bin/mpixlc
- ...
-
-Will make Spack take compilers *only* from the user configuration, and
-the site configuration will be ignored.
+This implements Spack's configuration system, which handles merging
+multiple scopes with different levels of precedence. See the
+documentation on :ref:`configuration-scopes` for details on how Spack's
+configuration system behaves. The scopes are:
+
+ #. ``default``
+ #. ``site``
+ #. ``user``
+
+And corresponding :ref:`per-platform scopes <platform-scopes>`. Important
+functions in this module are:
+
+* :py:func:`get_config`
+* :py:func:`update_config`
+
+``get_config`` reads in YAML data for a particular scope and returns
+it. Callers can then modify the data and write it back with
+``update_config``.
+
+When read in, Spack validates configurations with jsonschemas. The
+schemas are in submodules of :py:mod:`spack.schema`.
"""
+
import copy
import os
import re
import sys
-import jsonschema
-import llnl.util.tty as tty
-import spack
import yaml
+import jsonschema
+from yaml.error import MarkedYAMLError
from jsonschema import Draft4Validator, validators
-from llnl.util.filesystem import mkdirp
from ordereddict_backport import OrderedDict
+
+import llnl.util.tty as tty
+from llnl.util.filesystem import mkdirp
+
+import spack
+import spack.architecture
from spack.error import SpackError
-from yaml.error import MarkedYAMLError
+import spack.schema
# Hacked yaml for configuration files preserves line numbers.
import spack.util.spack_yaml as syaml
@@ -139,223 +73,12 @@ import spack.util.spack_yaml as syaml
"""Dict from section names -> schema for that section."""
section_schemas = {
- 'compilers': {
- '$schema': 'http://json-schema.org/schema#',
- 'title': 'Spack compiler configuration file schema',
- 'type': 'object',
- 'additionalProperties': False,
- 'patternProperties': {
- 'compilers:?': { # optional colon for overriding site config.
- 'type': 'object',
- 'default': {},
- 'additionalProperties': False,
- 'patternProperties': {
- r'\w[\w-]*': { # architecture
- 'type': 'object',
- 'additionalProperties': False,
- 'patternProperties': {
- r'\w[\w-]*@\w[\w-]*': { # compiler spec
- 'type': 'object',
- 'additionalProperties': False,
- 'required': ['cc', 'cxx', 'f77', 'fc'],
- 'properties': {
- 'cc': { 'anyOf': [ {'type' : 'string' },
- {'type' : 'null' }]},
- 'cxx': { 'anyOf': [ {'type' : 'string' },
- {'type' : 'null' }]},
- 'f77': { 'anyOf': [ {'type' : 'string' },
- {'type' : 'null' }]},
- 'fc': { 'anyOf': [ {'type' : 'string' },
- {'type' : 'null' }]},
- },},},},},},},},
-
- 'mirrors': {
- '$schema': 'http://json-schema.org/schema#',
- 'title': 'Spack mirror configuration file schema',
- 'type': 'object',
- 'additionalProperties': False,
- 'patternProperties': {
- r'mirrors:?': {
- 'type': 'object',
- 'default': {},
- 'additionalProperties': False,
- 'patternProperties': {
- r'\w[\w-]*': {
- 'type': 'string'},},},},},
-
- 'repos': {
- '$schema': 'http://json-schema.org/schema#',
- 'title': 'Spack repository configuration file schema',
- 'type': 'object',
- 'additionalProperties': False,
- 'patternProperties': {
- r'repos:?': {
- 'type': 'array',
- 'default': [],
- 'items': {
- 'type': 'string'},},},},
-
- 'packages': {
- '$schema': 'http://json-schema.org/schema#',
- 'title': 'Spack package configuration file schema',
- 'type': 'object',
- 'additionalProperties': False,
- 'patternProperties': {
- r'packages:?': {
- 'type': 'object',
- 'default': {},
- 'additionalProperties': False,
- 'patternProperties': {
- r'\w[\w-]*': { # package name
- 'type': 'object',
- 'default': {},
- 'additionalProperties': False,
- 'properties': {
- 'version': {
- 'type' : 'array',
- 'default' : [],
- 'items' : { 'anyOf' : [ { 'type' : 'string' },
- { 'type' : 'number'}]}}, #version strings
- 'compiler': {
- 'type' : 'array',
- 'default' : [],
- 'items' : { 'type' : 'string' } }, #compiler specs
- 'buildable': {
- 'type': 'boolean',
- 'default': True,
- },
- 'providers': {
- 'type': 'object',
- 'default': {},
- 'additionalProperties': False,
- 'patternProperties': {
- r'\w[\w-]*': {
- 'type' : 'array',
- 'default' : [],
- 'items' : { 'type' : 'string' },},},},
- 'paths': {
- 'type' : 'object',
- 'default' : {},
- }
- },},},},},},
-
- 'modules': {
- '$schema': 'http://json-schema.org/schema#',
- 'title': 'Spack module file configuration file schema',
- 'type': 'object',
- 'additionalProperties': False,
- 'definitions': {
- 'array_of_strings': {
- 'type': 'array',
- 'default': [],
- 'items': {
- 'type': 'string'
- }
- },
- 'dictionary_of_strings': {
- 'type': 'object',
- 'patternProperties': {
- r'\w[\w-]*': { # key
- 'type': 'string'
- }
- }
- },
- 'dependency_selection': {
- 'type': 'string',
- 'enum': ['none', 'direct', 'all']
- },
- 'module_file_configuration': {
- 'type': 'object',
- 'default': {},
- 'additionalProperties': False,
- 'properties': {
- 'filter': {
- 'type': 'object',
- 'default': {},
- 'additionalProperties': False,
- 'properties': {
- 'environment_blacklist': {
- 'type': 'array',
- 'default': [],
- 'items': {
- 'type': 'string'
- }
- }
- }
- },
- 'autoload': {'$ref': '#/definitions/dependency_selection'},
- 'prerequisites': {'$ref': '#/definitions/dependency_selection'},
- 'conflict': {'$ref': '#/definitions/array_of_strings'},
- 'environment': {
- 'type': 'object',
- 'default': {},
- 'additionalProperties': False,
- 'properties': {
- 'set': {'$ref': '#/definitions/dictionary_of_strings'},
- 'unset': {'$ref': '#/definitions/array_of_strings'},
- 'prepend_path': {'$ref': '#/definitions/dictionary_of_strings'},
- 'append_path': {'$ref': '#/definitions/dictionary_of_strings'}
- }
- }
- }
- },
- 'module_type_configuration': {
- 'type': 'object',
- 'default': {},
- 'anyOf': [
- {
- 'properties': {
- 'whitelist': {'$ref': '#/definitions/array_of_strings'},
- 'blacklist': {'$ref': '#/definitions/array_of_strings'},
- 'naming_scheme': {
- 'type': 'string' # Can we be more specific here?
- }
- }
- },
- {
- 'patternProperties': {r'\w[\w-]*': {'$ref': '#/definitions/module_file_configuration'}}
- }
- ]
- }
- },
- 'patternProperties': {
- r'modules:?': {
- 'type': 'object',
- 'default': {},
- 'additionalProperties': False,
- 'properties': {
- 'prefix_inspections': {
- 'type': 'object',
- 'patternProperties': {
- r'\w[\w-]*': { # path to be inspected for existence (relative to prefix)
- '$ref': '#/definitions/array_of_strings'
- }
- }
- },
- 'enable': {
- 'type': 'array',
- 'default': [],
- 'items': {
- 'type': 'string',
- 'enum': ['tcl', 'dotkit']
- }
- },
- 'tcl': {
- 'allOf': [
- {'$ref': '#/definitions/module_type_configuration'}, # Base configuration
- {} # Specific tcl extensions
- ]
- },
- 'dotkit': {
- 'allOf': [
- {'$ref': '#/definitions/module_type_configuration'}, # Base configuration
- {} # Specific dotkit extensions
- ]
- },
- }
- },
- },
- },
+ 'compilers': spack.schema.compilers.schema,
+ 'mirrors': spack.schema.mirrors.schema,
+ 'repos': spack.schema.repos.schema,
+ 'packages': spack.schema.packages.schema,
+ 'modules': spack.schema.modules.schema,
+ 'config': spack.schema.config.schema,
}
"""OrderedDict of config scopes keyed by name.
@@ -372,7 +95,7 @@ def validate_section_name(section):
def extend_with_default(validator_class):
- """Add support for the 'default' attribute for properties and patternProperties.
+ """Add support for the 'default' attr for properties and patternProperties.
jsonschema does not handle this out of the box -- it only
validates. This allows us to set default values for configs
@@ -381,13 +104,15 @@ def extend_with_default(validator_class):
"""
validate_properties = validator_class.VALIDATORS["properties"]
- validate_pattern_properties = validator_class.VALIDATORS["patternProperties"]
+ validate_pattern_properties = validator_class.VALIDATORS[
+ "patternProperties"]
def set_defaults(validator, properties, instance, schema):
for property, subschema in properties.iteritems():
if "default" in subschema:
instance.setdefault(property, subschema["default"])
- for err in validate_properties(validator, properties, instance, schema):
+ for err in validate_properties(
+ validator, properties, instance, schema):
yield err
def set_pp_defaults(validator, properties, instance, schema):
@@ -398,7 +123,8 @@ def extend_with_default(validator_class):
if re.match(property, key) and val is None:
instance[key] = subschema["default"]
- for err in validate_pattern_properties(validator, properties, instance, schema):
+ for err in validate_pattern_properties(
+ validator, properties, instance, schema):
yield err
return validators.extend(validator_class, {
@@ -463,15 +189,41 @@ class ConfigScope(object):
except jsonschema.ValidationError as e:
raise ConfigSanityError(e, data)
except (yaml.YAMLError, IOError) as e:
- raise ConfigFileError("Error writing to config file: '%s'" % str(e))
+ raise ConfigFileError(
+ "Error writing to config file: '%s'" % str(e))
def clear(self):
"""Empty cached config information."""
self.sections = {}
+ def __repr__(self):
+ return '<ConfigScope: %s: %s>' % (self.name, self.path)
-ConfigScope('site', os.path.join(spack.etc_path, 'spack')),
-ConfigScope('user', os.path.expanduser('~/.spack'))
+
+#
+# Below are configuration scopes.
+#
+# Each scope can have per-platfom overrides in subdirectories of the
+# configuration directory.
+#
+_platform = spack.architecture.platform().name
+
+"""Default configuration scope is the lowest-level scope. These are
+ versioned with Spack and can be overridden by sites or users."""
+_defaults_path = os.path.join(spack.etc_path, 'spack', 'defaults')
+ConfigScope('defaults', _defaults_path)
+ConfigScope('defaults/%s' % _platform, os.path.join(_defaults_path, _platform))
+
+"""Site configuration is per spack instance, for sites or projects.
+ No site-level configs should be checked into spack by default."""
+_site_path = os.path.join(spack.etc_path, 'spack')
+ConfigScope('site', _site_path)
+ConfigScope('site/%s' % _platform, os.path.join(_site_path, _platform))
+
+"""User configuration can override both spack defaults and site config."""
+_user_path = spack.user_config_path
+ConfigScope('user', _user_path)
+ConfigScope('user/%s' % _platform, os.path.join(_user_path, _platform))
def highest_precedence_scope():
@@ -515,7 +267,7 @@ def _read_config_file(filename, schema):
try:
tty.debug("Reading config file %s" % filename)
with open(filename) as f:
- data = syaml.load(f)
+ data = _mark_overrides(syaml.load(f))
if data:
validate_section(data, schema)
@@ -537,6 +289,34 @@ def clear_config_caches():
scope.clear()
+def override(string):
+ """Test if a spack YAML string is an override.
+
+ See ``spack_yaml`` for details. Keys in Spack YAML can end in `::`,
+ and if they do, their values completely replace lower-precedence
+ configs instead of merging into them.
+
+ """
+ return hasattr(string, 'override') and string.override
+
+
+def _mark_overrides(data):
+ if isinstance(data, list):
+ return [_mark_overrides(elt) for elt in data]
+
+ elif isinstance(data, dict):
+ marked = {}
+ for key, val in data.iteritems():
+ if isinstance(key, basestring) and key.endswith(':'):
+ key = syaml.syaml_str(key[:-1])
+ key.override = True
+ marked[key] = _mark_overrides(val)
+ return marked
+
+ else:
+ return data
+
+
def _merge_yaml(dest, source):
"""Merges source into dest; entries in source take precedence over dest.
@@ -563,16 +343,17 @@ def _merge_yaml(dest, source):
# Source list is prepended (for precedence)
if they_are(list):
- seen = set(source)
- dest[:] = source + [x for x in dest if x not in seen]
+ dest[:] = source + [x for x in dest if x not in source]
return dest
# Source dict is merged into dest.
elif they_are(dict):
for sk, sv in source.iteritems():
- if sk not in dest:
+ if override(sk) or sk not in dest:
+ # if sk ended with ::, or if it's new, completely override
dest[sk] = copy.copy(sv)
else:
+ # otherwise, merge the YAML
dest[sk] = _merge_yaml(dest[sk], source[sk])
return dest
@@ -584,7 +365,26 @@ def _merge_yaml(dest, source):
def get_config(section, scope=None):
"""Get configuration settings for a section.
- Strips off the top-level section name from the YAML dict.
+ If ``scope`` is ``None`` or not provided, return the merged contents
+ of all of Spack's configuration scopes. If ``scope`` is provided,
+ return only the confiugration as specified in that scope.
+
+ This off the top-level name from the YAML section. That is, for a
+ YAML config file that looks like this::
+
+ config:
+ install_tree: $spack/opt/spack
+ module_roots:
+ lmod: $spack/share/spack/lmod
+
+ ``get_config('config')`` will return::
+
+ { 'install_tree': '$spack/opt/spack',
+ 'module_roots: {
+ 'lmod': '$spack/share/spack/lmod'
+ }
+ }
+
"""
validate_section_name(section)
merged_section = syaml.syaml_dict()
@@ -602,18 +402,18 @@ def get_config(section, scope=None):
if not data or not isinstance(data, dict):
continue
- # Allow complete override of site config with '<section>::'
- override_key = section + ':'
- if not (section in data or override_key in data):
+ if section not in data:
tty.warn("Skipping bad configuration file: '%s'" % scope.path)
continue
- if override_key in data:
- merged_section = data[override_key]
- else:
- merged_section = _merge_yaml(merged_section, data[section])
+ merged_section = _merge_yaml(merged_section, data)
+
+ # no config files -- empty config.
+ if section not in merged_section:
+ return {}
- return merged_section
+ # take the top key off before returning.
+ return merged_section[section]
def get_config_filename(scope, section):
@@ -655,42 +455,10 @@ def print_section(section):
data = syaml.syaml_dict()
data[section] = get_config(section)
syaml.dump(data, stream=sys.stdout, default_flow_style=False)
- except (yaml.YAMLError, IOError) as e:
+ except (yaml.YAMLError, IOError):
raise ConfigError("Error reading configuration: %s" % section)
-def spec_externals(spec):
- """Return a list of external specs (with external directory path filled in),
- one for each known external installation."""
- allpkgs = get_config('packages')
- name = spec.name
-
- external_specs = []
- pkg_paths = allpkgs.get(name, {}).get('paths', None)
- if not pkg_paths:
- return []
-
- for external_spec, path in pkg_paths.iteritems():
- if not path:
- # skip entries without paths (avoid creating extra Specs)
- continue
-
- external_spec = spack.spec.Spec(external_spec, external=path)
- if external_spec.satisfies(spec):
- external_specs.append(external_spec)
- return external_specs
-
-
-def is_spec_buildable(spec):
- """Return true if the spec pkgspec is configured as buildable"""
- allpkgs = get_config('packages')
- if spec.name not in allpkgs:
- return True
- if 'buildable' not in allpkgs[spec.name]:
- return True
- return allpkgs[spec.name]['buildable']
-
-
class ConfigError(SpackError):
pass
@@ -708,6 +476,7 @@ def get_path(path, data):
class ConfigFormatError(ConfigError):
"""Raised when a configuration format does not match its schema."""
+
def __init__(self, validation_error, data):
# Try to get line number from erroneous instance and its parent
instance_mark = getattr(validation_error.instance, '_start_mark', None)
diff --git a/lib/spack/spack/database.py b/lib/spack/spack/database.py
index 73bc8143c8..ff55223351 100644
--- a/lib/spack/spack/database.py
+++ b/lib/spack/spack/database.py
@@ -33,46 +33,53 @@ The database serves two purposes:
2. It will allow us to track external installations as well as lost
packages and their dependencies.
-Prior ot the implementation of this store, a direcotry layout served
+Prior to the implementation of this store, a directory layout served
as the authoritative database of packages in Spack. This module
provides a cache and a sanity checking mechanism for what is in the
filesystem.
"""
import os
-import time
import socket
-import yaml
from yaml.error import MarkedYAMLError, YAMLError
import llnl.util.tty as tty
from llnl.util.filesystem import *
from llnl.util.lock import *
-import spack.spec
+import spack.store
+import spack.repository
+from spack.directory_layout import DirectoryLayoutError
from spack.version import Version
-from spack.spec import Spec
+import spack.spec
from spack.error import SpackError
-from spack.repository import UnknownPackageError
+import spack.util.spack_yaml as syaml
+import spack.util.spack_json as sjson
+
# DB goes in this directory underneath the root
_db_dirname = '.spack-db'
# DB version. This is stuck in the DB file to track changes in format.
-_db_version = Version('0.9')
+_db_version = Version('0.9.2')
# Default timeout for spack database locks is 5 min.
_db_lock_timeout = 60
+# Types of dependencies tracked by the database
+_tracked_deps = ('link', 'run')
+
def _autospec(function):
"""Decorator that automatically converts the argument of a single-arg
function to a Spec."""
+
def converter(self, spec_like, *args, **kwargs):
if not isinstance(spec_like, spack.spec.Spec):
spec_like = spack.spec.Spec(spec_like)
return function(self, spec_like, *args, **kwargs)
+
return converter
@@ -92,6 +99,7 @@ class InstallRecord(object):
dependents left.
"""
+
def __init__(self, spec, path, installed, ref_count=0, explicit=False):
self.spec = spec
self.path = str(path)
@@ -100,19 +108,23 @@ class InstallRecord(object):
self.explicit = explicit
def to_dict(self):
- return { 'spec' : self.spec.to_node_dict(),
- 'path' : self.path,
- 'installed' : self.installed,
- 'ref_count' : self.ref_count,
- 'explicit' : self.explicit }
+ return {
+ 'spec': self.spec.to_node_dict(),
+ 'path': self.path,
+ 'installed': self.installed,
+ 'ref_count': self.ref_count,
+ 'explicit': self.explicit
+ }
@classmethod
def from_dict(cls, spec, dictionary):
d = dictionary
- return InstallRecord(spec, d['path'], d['installed'], d['ref_count'], d.get('explicit', False))
+ return InstallRecord(spec, d['path'], d['installed'], d['ref_count'],
+ d.get('explicit', False))
class Database(object):
+
def __init__(self, root, db_dir=None):
"""Create a Database for Spack installations under ``root``.
@@ -123,10 +135,12 @@ class Database(object):
under ``root/.spack-db``, which is created if it does not
exist. This is the ``db_dir``.
- The Database will attempt to read an ``index.yaml`` file in
- ``db_dir``. If it does not find one, it will be created when
- needed by scanning the entire Database root for ``spec.yaml``
- files according to Spack's ``DirectoryLayout``.
+ The Database will attempt to read an ``index.json`` file in
+ ``db_dir``. If it does not find one, it will fall back to read
+ an ``index.yaml`` if one is present. If that does not exist, it
+ will create a database when needed by scanning the entire
+ Database root for ``spec.yaml`` files according to Spack's
+ ``DirectoryLayout``.
Caller may optionally provide a custom ``db_dir`` parameter
where data will be stored. This is intended to be used for
@@ -143,33 +157,34 @@ class Database(object):
self._db_dir = db_dir
# Set up layout of database files within the db dir
- self._index_path = join_path(self._db_dir, 'index.yaml')
- self._lock_path = join_path(self._db_dir, 'lock')
+ self._old_yaml_index_path = join_path(self._db_dir, 'index.yaml')
+ self._index_path = join_path(self._db_dir, 'index.json')
+ self._lock_path = join_path(self._db_dir, 'lock')
+
+ # This is for other classes to use to lock prefix directories.
+ self.prefix_lock_path = join_path(self._db_dir, 'prefix_lock')
# Create needed directories and files
if not os.path.exists(self._db_dir):
mkdirp(self._db_dir)
- if not os.path.exists(self._lock_path):
- touch(self._lock_path)
-
# initialize rest of state.
self.lock = Lock(self._lock_path)
self._data = {}
+ # whether there was an error at the start of a read transaction
+ self._error = None
def write_transaction(self, timeout=_db_lock_timeout):
"""Get a write lock context manager for use in a `with` block."""
- return WriteTransaction(self, self._read, self._write, timeout)
-
+ return WriteTransaction(self.lock, self._read, self._write, timeout)
def read_transaction(self, timeout=_db_lock_timeout):
"""Get a read lock context manager for use in a `with` block."""
- return ReadTransaction(self, self._read, None, timeout)
-
+ return ReadTransaction(self.lock, self._read, timeout=timeout)
- def _write_to_yaml(self, stream):
- """Write out the databsae to a YAML file.
+ def _write_to_file(self, stream):
+ """Write out the databsae to a JSON file.
This function does not do any locking or transactions.
"""
@@ -183,125 +198,192 @@ class Database(object):
# different paths, it can't differentiate.
# TODO: fix this before we support multiple install locations.
database = {
- 'database' : {
- 'installs' : installs,
- 'version' : str(_db_version)
+ 'database': {
+ 'installs': installs,
+ 'version': str(_db_version)
}
}
try:
- return yaml.dump(database, stream=stream, default_flow_style=False)
+ sjson.dump(database, stream)
except YAMLError as e:
- raise SpackYAMLError("error writing YAML database:", str(e))
-
+ raise syaml.SpackYAMLError(
+ "error writing YAML database:", str(e))
- def _read_spec_from_yaml(self, hash_key, installs, parent_key=None):
+ def _read_spec_from_dict(self, hash_key, installs):
"""Recursively construct a spec from a hash in a YAML database.
Does not do any locking.
"""
- if hash_key not in installs:
- parent = read_spec(installs[parent_key]['path'])
-
spec_dict = installs[hash_key]['spec']
+ # Install records don't include hash with spec, so we add it in here
+ # to ensure it is read properly.
+ for name in spec_dict:
+ spec_dict[name]['hash'] = hash_key
+
# Build spec from dict first.
- spec = Spec.from_node_dict(spec_dict)
+ spec = spack.spec.Spec.from_node_dict(spec_dict)
+ return spec
+ def _assign_dependencies(self, hash_key, installs, data):
# Add dependencies from other records in the install DB to
# form a full spec.
- for dep_hash in spec_dict[spec.name]['dependencies'].values():
- child = self._read_spec_from_yaml(dep_hash, installs, hash_key)
- spec._add_dependency(child)
+ spec = data[hash_key].spec
+ spec_dict = installs[hash_key]['spec']
- # Specs from the database need to be marked concrete because
- # they represent actual installations.
- spec._mark_concrete()
- return spec
+ if 'dependencies' in spec_dict[spec.name]:
+ yaml_deps = spec_dict[spec.name]['dependencies']
+ for dname, dhash, dtypes in spack.spec.Spec.read_yaml_dep_specs(
+ yaml_deps):
+ if dhash not in data:
+ tty.warn("Missing dependency not in database: ",
+ "%s needs %s-%s" % (
+ spec.format('$_$#'), dname, dhash[:7]))
+ continue
+ child = data[dhash].spec
+ spec._add_dependency(child, dtypes)
- def _read_from_yaml(self, stream):
+ def _read_from_file(self, stream, format='json'):
"""
- Fill database from YAML, do not maintain old data
+ Fill database from file, do not maintain old data
Translate the spec portions from node-dict form to spec form
Does not do any locking.
"""
+ if format.lower() == 'json':
+ load = sjson.load
+ elif format.lower() == 'yaml':
+ load = syaml.load
+ else:
+ raise ValueError("Invalid database format: %s" % format)
+
try:
if isinstance(stream, basestring):
with open(stream, 'r') as f:
- yfile = yaml.load(f)
+ fdata = load(f)
else:
- yfile = yaml.load(stream)
-
+ fdata = load(stream)
except MarkedYAMLError as e:
- raise SpackYAMLError("error parsing YAML database:", str(e))
+ raise syaml.SpackYAMLError("error parsing YAML database:", str(e))
+ except Exception as e:
+ raise CorruptDatabaseError("error parsing database:", str(e))
- if yfile is None:
+ if fdata is None:
return
def check(cond, msg):
- if not cond: raise CorruptDatabaseError(self._index_path, msg)
+ if not cond:
+ raise CorruptDatabaseError(
+ "Spack database is corrupt: %s" % msg, self._index_path)
- check('database' in yfile, "No 'database' attribute in YAML.")
+ check('database' in fdata, "No 'database' attribute in YAML.")
# High-level file checks
- db = yfile['database']
+ db = fdata['database']
check('installs' in db, "No 'installs' in YAML DB.")
- check('version' in db, "No 'version' in YAML DB.")
+ check('version' in db, "No 'version' in YAML DB.")
+
+ installs = db['installs']
# TODO: better version checking semantics.
version = Version(db['version'])
- if version != _db_version:
+ if version > _db_version:
raise InvalidDatabaseVersionError(_db_version, version)
-
- # Iterate through database and check each record.
- installs = db['installs']
+ elif version < _db_version:
+ self.reindex(spack.store.layout)
+ installs = dict((k, v.to_dict()) for k, v in self._data.items())
+
+ def invalid_record(hash_key, error):
+ msg = ("Invalid record in Spack database: "
+ "hash: %s, cause: %s: %s")
+ msg %= (hash_key, type(e).__name__, str(e))
+ raise CorruptDatabaseError(msg, self._index_path)
+
+ # Build up the database in three passes:
+ #
+ # 1. Read in all specs without dependencies.
+ # 2. Hook dependencies up among specs.
+ # 3. Mark all specs concrete.
+ #
+ # The database is built up so that ALL specs in it share nodes
+ # (i.e., its specs are a true Merkle DAG, unlike most specs.)
+
+ # Pass 1: Iterate through database and build specs w/o dependencies
data = {}
for hash_key, rec in installs.items():
try:
# This constructs a spec DAG from the list of all installs
- spec = self._read_spec_from_yaml(hash_key, installs)
-
- # Validate the spec by ensuring the stored and actual
- # hashes are the same.
- spec_hash = spec.dag_hash()
- if not spec_hash == hash_key:
- tty.warn("Hash mismatch in database: %s -> spec with hash %s"
- % (hash_key, spec_hash))
- continue # TODO: is skipping the right thing to do?
+ spec = self._read_spec_from_dict(hash_key, installs)
# Insert the brand new spec in the database. Each
# spec has its own copies of its dependency specs.
- # TODO: would a more immmutable spec implementation simplify this?
+ # TODO: would a more immmutable spec implementation simplify
+ # this?
data[hash_key] = InstallRecord.from_dict(spec, rec)
except Exception as e:
- tty.warn("Invalid database reecord:",
- "file: %s" % self._index_path,
- "hash: %s" % hash_key,
- "cause: %s" % str(e))
- raise
+ invalid_record(hash_key, e)
- self._data = data
+ # Pass 2: Assign dependencies once all specs are created.
+ for hash_key in data:
+ try:
+ self._assign_dependencies(hash_key, installs, data)
+ except Exception as e:
+ invalid_record(hash_key, e)
+
+ # Pass 3: Mark all specs concrete. Specs representing real
+ # installations must be explicitly marked.
+ # We do this *after* all dependencies are connected because if we
+ # do it *while* we're constructing specs,it causes hashes to be
+ # cached prematurely.
+ for hash_key, rec in data.items():
+ rec.spec._mark_concrete()
+ self._data = data
def reindex(self, directory_layout):
- """Build database index from scratch based from a directory layout.
+ """Build database index from scratch based on a directory layout.
Locks the DB if it isn't locked already.
"""
- with self.write_transaction():
+ # Special transaction to avoid recursive reindex calls and to
+ # ignore errors if we need to rebuild a corrupt database.
+ def _read_suppress_error():
+ try:
+ if os.path.isfile(self._index_path):
+ self._read_from_file(self._index_path)
+ except CorruptDatabaseError as e:
+ self._error = e
+ self._data = {}
+
+ transaction = WriteTransaction(
+ self.lock, _read_suppress_error, self._write, _db_lock_timeout)
+
+ with transaction:
+ if self._error:
+ tty.warn(
+ "Spack database was corrupt. Will rebuild. Error was:",
+ str(self._error))
+ self._error = None
+
old_data = self._data
try:
self._data = {}
# Ask the directory layout to traverse the filesystem.
for spec in directory_layout.all_specs():
- # Create a spec for each known package and add it.
- path = directory_layout.path_for_spec(spec)
- self._add(spec, path, directory_layout)
+ # Try to recover explicit value from old DB, but
+ # default it to False if DB was corrupt.
+ explicit = False
+ if old_data is not None:
+ old_info = old_data.get(spec.dag_hash())
+ if old_info is not None:
+ explicit = old_info.explicit
+
+ self._add(spec, directory_layout, explicit=explicit)
self._check_ref_counts()
@@ -310,7 +392,6 @@ class Database(object):
self._data = old_data
raise
-
def _check_ref_counts(self):
"""Ensure consistency of reference counts in the DB.
@@ -321,7 +402,7 @@ class Database(object):
counts = {}
for key, rec in self._data.items():
counts.setdefault(key, 0)
- for dep in rec.spec.dependencies.values():
+ for dep in rec.spec.dependencies(_tracked_deps):
dep_key = dep.dag_hash()
counts.setdefault(dep_key, 0)
counts[dep_key] += 1
@@ -332,23 +413,32 @@ class Database(object):
found = rec.ref_count
if not expected == found:
raise AssertionError(
- "Invalid ref_count: %s: %d (expected %d), in DB %s"
- % (key, found, expected, self._index_path))
-
+ "Invalid ref_count: %s: %d (expected %d), in DB %s" %
+ (key, found, expected, self._index_path))
- def _write(self):
+ def _write(self, type, value, traceback):
"""Write the in-memory database index to its file path.
- Does no locking.
+ This is a helper function called by the WriteTransaction context
+ manager. If there is an exception while the write lock is active,
+ nothing will be written to the database file, but the in-memory
+ database *may* be left in an inconsistent state. It will be consistent
+ after the start of the next transaction, when it read from disk again.
+
+ This routine does no locking.
"""
+ # Do not write if exceptions were raised
+ if type is not None:
+ return
+
temp_file = self._index_path + (
'.%s.%s.temp' % (socket.getfqdn(), os.getpid()))
# Write a temporary database file them move it into place
try:
with open(temp_file, 'w') as f:
- self._write_to_yaml(f)
+ self._write_to_file(f)
os.rename(temp_file, self._index_path)
except:
# Clean up temp file if something goes wrong.
@@ -356,68 +446,89 @@ class Database(object):
os.remove(temp_file)
raise
-
def _read(self):
"""Re-read Database from the data in the set location.
- This does no locking.
+ This does no locking, with one exception: it will automatically
+ migrate an index.yaml to an index.json if possible. This requires
+ taking a write lock.
+
"""
if os.path.isfile(self._index_path):
- # Read from YAML file if a database exists
- self._read_from_yaml(self._index_path)
+ # Read from JSON file if a JSON database exists
+ self._read_from_file(self._index_path, format='json')
+
+ elif os.path.isfile(self._old_yaml_index_path):
+ if os.access(self._db_dir, os.R_OK | os.W_OK):
+ # if we can write, then read AND write a JSON file.
+ self._read_from_file(self._old_yaml_index_path, format='yaml')
+ with WriteTransaction(self.lock, timeout=_db_lock_timeout):
+ self._write(None, None, None)
+ else:
+ # Read chck for a YAML file if we can't find JSON.
+ self._read_from_file(self._old_yaml_index_path, format='yaml')
else:
# The file doesn't exist, try to traverse the directory.
# reindex() takes its own write lock, so no lock here.
- self.reindex(spack.install_layout)
+ self.reindex(spack.store.layout)
+ def _add(self, spec, directory_layout=None, explicit=False):
+ """Add an install record for this spec to the database.
- def _add(self, spec, path, directory_layout=None, explicit=False):
- """Add an install record for spec at path to the database.
+ Assumes spec is installed in ``layout.path_for_spec(spec)``.
- This assumes that the spec is not already installed. It
- updates the ref counts on dependencies of the spec in the DB.
-
- This operation is in-memory, and does not lock the DB.
+ Also ensures dependencies are present and updated in the DB as
+ either intsalled or missing.
"""
- key = spec.dag_hash()
- if key in self._data:
- rec = self._data[key]
- rec.installed = True
+ if not spec.concrete:
+ raise NonConcreteSpecAddError(
+ "Specs added to DB must be concrete.")
- # TODO: this overwrites a previous install path (when path !=
- # self._data[key].path), and the old path still has a
- # dependent in the DB. We could consider re-RPATH-ing the
- # dependents. This case is probably infrequent and may not be
- # worth fixing, but this is where we can discover it.
- rec.path = path
+ for dep in spec.dependencies(_tracked_deps):
+ dkey = dep.dag_hash()
+ if dkey not in self._data:
+ self._add(dep, directory_layout, explicit=False)
- else:
- self._data[key] = InstallRecord(spec, path, True, explicit=explicit)
- for dep in spec.dependencies.values():
- self._increment_ref_count(dep, directory_layout)
-
-
- def _increment_ref_count(self, spec, directory_layout=None):
- """Recursively examine dependencies and update their DB entries."""
key = spec.dag_hash()
if key not in self._data:
installed = False
path = None
- if directory_layout:
+ if not spec.external and directory_layout:
path = directory_layout.path_for_spec(spec)
- installed = os.path.isdir(path)
+ try:
+ directory_layout.check_installed(spec)
+ installed = True
+ except DirectoryLayoutError as e:
+ tty.warn(
+ 'Dependency missing due to corrupt install directory:',
+ path, str(e))
+
+ # Create a new install record with no deps initially.
+ new_spec = spec.copy(deps=False)
+ self._data[key] = InstallRecord(
+ new_spec, path, installed, ref_count=0, explicit=explicit)
+
+ # Connect dependencies from the DB to the new copy.
+ for name, dep in spec.dependencies_dict(_tracked_deps).iteritems():
+ dkey = dep.spec.dag_hash()
+ new_spec._add_dependency(self._data[dkey].spec, dep.deptypes)
+ self._data[dkey].ref_count += 1
+
+ # Mark concrete once everything is built, and preserve
+ # the original hash of concrete specs.
+ new_spec._mark_concrete()
+ new_spec._hash = key
- self._data[key] = InstallRecord(spec.copy(), path, installed)
-
- for dep in spec.dependencies.values():
- self._increment_ref_count(dep)
+ else:
+ # If it is already there, mark it as installed.
+ self._data[key].installed = True
- self._data[key].ref_count += 1
+ self._data[key].explicit = explicit
@_autospec
- def add(self, spec, path, explicit=False):
+ def add(self, spec, directory_layout, explicit=False):
"""Add spec at path to database, locking and reading DB to sync.
``add()`` will lock and read from the DB on disk.
@@ -426,30 +537,27 @@ class Database(object):
# TODO: ensure that spec is concrete?
# Entire add is transactional.
with self.write_transaction():
- self._add(spec, path, explicit=explicit)
-
+ self._add(spec, directory_layout, explicit=explicit)
def _get_matching_spec_key(self, spec, **kwargs):
"""Get the exact spec OR get a single spec that matches."""
key = spec.dag_hash()
- if not key in self._data:
+ if key not in self._data:
match = self.query_one(spec, **kwargs)
if match:
return match.dag_hash()
raise KeyError("No such spec in database! %s" % spec)
return key
-
@_autospec
def get_record(self, spec, **kwargs):
key = self._get_matching_spec_key(spec, **kwargs)
return self._data[key]
-
def _decrement_ref_count(self, spec):
key = spec.dag_hash()
- if not key in self._data:
+ if key not in self._data:
# TODO: print something here? DB is corrupt, but
# not much we can do.
return
@@ -459,10 +567,9 @@ class Database(object):
if rec.ref_count == 0 and not rec.installed:
del self._data[key]
- for dep in spec.dependencies.values():
+ for dep in spec.dependencies(_tracked_deps):
self._decrement_ref_count(dep)
-
def _remove(self, spec):
"""Non-locking version of remove(); does real work.
"""
@@ -474,14 +581,13 @@ class Database(object):
return rec.spec
del self._data[key]
- for dep in rec.spec.dependencies.values():
+ for dep in rec.spec.dependencies(_tracked_deps):
self._decrement_ref_count(dep)
# Returns the concrete spec so we know it in the case where a
# query spec was passed in.
return rec.spec
-
@_autospec
def remove(self, spec):
"""Removes a spec from the database. To be called on uninstall.
@@ -498,6 +604,14 @@ class Database(object):
with self.write_transaction():
return self._remove(spec)
+ @_autospec
+ def installed_dependents(self, spec):
+ """List the installed specs that depend on this one."""
+ dependents = set()
+ for spec in self.query(spec):
+ for dependent in spec.traverse(direction='parents', root=False):
+ dependents.add(dependent)
+ return dependents
@_autospec
def installed_extensions_for(self, extendee_spec):
@@ -509,12 +623,11 @@ class Database(object):
try:
if s.package.extends(extendee_spec):
yield s.package
- except UnknownPackageError as e:
+ except spack.repository.UnknownPackageError:
continue
# skips unknown packages
# TODO: conditional way to do this instead of catching exceptions
-
def query(self, query_spec=any, known=any, installed=True, explicit=any):
"""Run a query on the database.
@@ -551,20 +664,32 @@ class Database(object):
"""
with self.read_transaction():
+ # Just look up concrete specs with hashes; no fancy search.
+ if (isinstance(query_spec, spack.spec.Spec) and
+ query_spec._concrete):
+
+ hash_key = query_spec.dag_hash()
+ if hash_key in self._data:
+ return [self._data[hash_key].spec]
+ else:
+ return []
+
+ # Abstract specs require more work -- currently we test
+ # against everything.
results = []
for key, rec in self._data.items():
if installed is not any and rec.installed != installed:
continue
if explicit is not any and rec.explicit != explicit:
continue
- if known is not any and spack.repo.exists(rec.spec.name) != known:
+ if known is not any and spack.repo.exists(
+ rec.spec.name) != known:
continue
if query_spec is any or rec.spec.satisfies(query_spec):
results.append(rec.spec)
return sorted(results)
-
def query_one(self, query_spec, known=any, installed=True):
"""Query for exactly one spec that matches the query spec.
@@ -576,61 +701,25 @@ class Database(object):
assert len(concrete_specs) <= 1
return concrete_specs[0] if concrete_specs else None
-
def missing(self, spec):
with self.read_transaction():
- key = spec.dag_hash()
+ key = spec.dag_hash()
return key in self._data and not self._data[key].installed
-class _Transaction(object):
- """Simple nested transaction context manager that uses a file lock.
-
- This class can trigger actions when the lock is acquired for the
- first time and released for the last.
-
- Timeout for lock is customizable.
- """
- def __init__(self, db, acquire_fn=None, release_fn=None,
- timeout=_db_lock_timeout):
- self._db = db
- self._timeout = timeout
- self._acquire_fn = acquire_fn
- self._release_fn = release_fn
-
- def __enter__(self):
- if self._enter() and self._acquire_fn:
- self._acquire_fn()
-
- def __exit__(self, type, value, traceback):
- if self._exit() and self._release_fn:
- self._release_fn()
-
-
-class ReadTransaction(_Transaction):
- def _enter(self):
- return self._db.lock.acquire_read(self._timeout)
-
- def _exit(self):
- return self._db.lock.release_read()
-
-
-class WriteTransaction(_Transaction):
- def _enter(self):
- return self._db.lock.acquire_write(self._timeout)
+class CorruptDatabaseError(SpackError):
+ """Raised when errors are found while reading the database."""
- def _exit(self):
- return self._db.lock.release_write()
-
-class CorruptDatabaseError(SpackError):
- def __init__(self, path, msg=''):
- super(CorruptDatabaseError, self).__init__(
- "Spack database is corrupt: %s. %s" %(path, msg))
+class NonConcreteSpecAddError(SpackError):
+ """Raised when attemptint to add non-concrete spec to DB."""
class InvalidDatabaseVersionError(SpackError):
+
def __init__(self, expected, found):
super(InvalidDatabaseVersionError, self).__init__(
- "Expected database version %s but found version %s"
- % (expected, found))
+ "Expected database version %s but found version %s."
+ % (expected, found),
+ "`spack reindex` may fix this, or you may need a newer "
+ "Spack version.")
diff --git a/lib/spack/spack/directives.py b/lib/spack/spack/directives.py
index 51b26773e2..58eabb9e3b 100644
--- a/lib/spack/spack/directives.py
+++ b/lib/spack/spack/directives.py
@@ -46,132 +46,184 @@ The available directives are:
"""
-import re
-import os.path
+import collections
import functools
+import inspect
+import os.path
+import re
-from llnl.util.lang import *
-from llnl.util.filesystem import join_path
-
+import llnl.util.lang
import spack
-import spack.spec
import spack.error
+import spack.spec
import spack.url
-from spack.version import Version
+from llnl.util.filesystem import join_path
+from spack.fetch_strategy import from_kwargs
from spack.patch import Patch
-from spack.variant import Variant
-from spack.spec import Spec, parse_anonymous_spec
from spack.resource import Resource
-from spack.fetch_strategy import from_kwargs
-
-__all__ = ['depends_on', 'extends', 'provides', 'patch', 'version', 'variant',
- 'resource']
-
-#
-# This is a list of all directives, built up as they are defined in
-# this file.
-#
-directives = {}
-
-
-def ensure_dicts(pkg):
- """Ensure that a package has all the dicts required by directives."""
- for name, d in directives.items():
- d.ensure_dicts(pkg)
-
-
-class directive(object):
- """Decorator for Spack directives.
-
- Spack directives allow you to modify a package while it is being
- defined, e.g. to add version or dependency information. Directives
- are one of the key pieces of Spack's package "language", which is
- embedded in python.
-
- Here's an example directive:
-
- @directive(dicts='versions')
- version(pkg, ...):
- ...
-
- This directive allows you write:
-
- class Foo(Package):
- version(...)
-
- The ``@directive`` decorator handles a couple things for you:
-
- 1. Adds the class scope (pkg) as an initial parameter when
- called, like a class method would. This allows you to modify
- a package from within a directive, while the package is still
- being defined.
-
- 2. It automatically adds a dictionary called "versions" to the
- package so that you can refer to pkg.versions.
+from spack.spec import Spec, parse_anonymous_spec
+from spack.variant import Variant
+from spack.version import Version
- The ``(dicts='versions')`` part ensures that ALL packages in Spack
- will have a ``versions`` attribute after they're constructed, and
- that if no directive actually modified it, it will just be an
- empty dict.
+__all__ = []
- This is just a modular way to add storage attributes to the
- Package class, and it's how Spack gets information from the
- packages to the core.
+class DirectiveMetaMixin(type):
+ """Flushes the directives that were temporarily stored in the staging
+ area into the package.
"""
- def __init__(self, dicts=None):
+ # Set of all known directives
+ _directive_names = set()
+ _directives_to_be_executed = []
+
+ def __new__(mcs, name, bases, attr_dict):
+ # Initialize the attribute containing the list of directives
+ # to be executed. Here we go reversed because we want to execute
+ # commands:
+ # 1. in the order they were defined
+ # 2. following the MRO
+ attr_dict['_directives_to_be_executed'] = []
+ for base in reversed(bases):
+ try:
+ directive_from_base = base._directives_to_be_executed
+ attr_dict['_directives_to_be_executed'].extend(
+ directive_from_base
+ )
+ except AttributeError:
+ # The base class didn't have the required attribute.
+ # Continue searching
+ pass
+ # De-duplicates directives from base classes
+ attr_dict['_directives_to_be_executed'] = [
+ x for x in llnl.util.lang.dedupe(
+ attr_dict['_directives_to_be_executed']
+ )
+ ]
+
+ # Move things to be executed from module scope (where they
+ # are collected first) to class scope
+ if DirectiveMetaMixin._directives_to_be_executed:
+ attr_dict['_directives_to_be_executed'].extend(
+ DirectiveMetaMixin._directives_to_be_executed
+ )
+ DirectiveMetaMixin._directives_to_be_executed = []
+
+ return super(DirectiveMetaMixin, mcs).__new__(
+ mcs, name, bases, attr_dict
+ )
+
+ def __init__(cls, name, bases, attr_dict):
+ # The class is being created: if it is a package we must ensure
+ # that the directives are called on the class to set it up
+ module = inspect.getmodule(cls)
+ if 'spack.pkg' in module.__name__:
+ # Package name as taken
+ # from llnl.util.lang.get_calling_module_name
+ pkg_name = module.__name__.split('.')[-1]
+ setattr(cls, 'name', pkg_name)
+ # Ensure the presence of the dictionaries associated
+ # with the directives
+ for d in DirectiveMetaMixin._directive_names:
+ setattr(cls, d, {})
+ # Lazy execution of directives
+ for command in cls._directives_to_be_executed:
+ command(cls)
+
+ super(DirectiveMetaMixin, cls).__init__(name, bases, attr_dict)
+
+ @staticmethod
+ def directive(dicts=None):
+ """Decorator for Spack directives.
+
+ Spack directives allow you to modify a package while it is being
+ defined, e.g. to add version or dependency information. Directives
+ are one of the key pieces of Spack's package "language", which is
+ embedded in python.
+
+ Here's an example directive:
+
+ @directive(dicts='versions')
+ version(pkg, ...):
+ ...
+
+ This directive allows you write:
+
+ class Foo(Package):
+ version(...)
+
+ The ``@directive`` decorator handles a couple things for you:
+
+ 1. Adds the class scope (pkg) as an initial parameter when
+ called, like a class method would. This allows you to modify
+ a package from within a directive, while the package is still
+ being defined.
+
+ 2. It automatically adds a dictionary called "versions" to the
+ package so that you can refer to pkg.versions.
+
+ The ``(dicts='versions')`` part ensures that ALL packages in Spack
+ will have a ``versions`` attribute after they're constructed, and
+ that if no directive actually modified it, it will just be an
+ empty dict.
+
+ This is just a modular way to add storage attributes to the
+ Package class, and it's how Spack gets information from the
+ packages to the core.
+
+ """
+ global __all__
+
if isinstance(dicts, basestring):
dicts = (dicts, )
- elif type(dicts) not in (list, tuple):
- raise TypeError(
- "dicts arg must be list, tuple, or string. Found %s" %
- type(dicts))
+ if not isinstance(dicts, collections.Sequence):
+ message = "dicts arg must be list, tuple, or string. Found {0}"
+ raise TypeError(message.format(type(dicts)))
+ # Add the dictionary names if not already there
+ DirectiveMetaMixin._directive_names |= set(dicts)
- self.dicts = dicts
+ # This decorator just returns the directive functions
+ def _decorator(decorated_function):
+ __all__.append(decorated_function.__name__)
- def ensure_dicts(self, pkg):
- """Ensure that a package has the dicts required by this directive."""
- for d in self.dicts:
- if not hasattr(pkg, d):
- setattr(pkg, d, {})
+ @functools.wraps(decorated_function)
+ def _wrapper(*args, **kwargs):
+ # A directive returns either something that is callable on a
+ # package or a sequence of them
+ values = decorated_function(*args, **kwargs)
- attr = getattr(pkg, d)
- if not isinstance(attr, dict):
- raise spack.error.SpackError(
- "Package %s has non-dict %s attribute!" % (pkg, d))
+ # ...so if it is not a sequence make it so
+ if not isinstance(values, collections.Sequence):
+ values = (values, )
- def __call__(self, directive_function):
- directives[directive_function.__name__] = self
+ DirectiveMetaMixin._directives_to_be_executed.extend(values)
+ return _wrapper
- @functools.wraps(directive_function)
- def wrapped(*args, **kwargs):
- pkg = DictWrapper(caller_locals())
- self.ensure_dicts(pkg)
+ return _decorator
- pkg.name = get_calling_module_name()
- return directive_function(pkg, *args, **kwargs)
- return wrapped
+directive = DirectiveMetaMixin.directive
@directive('versions')
-def version(pkg, ver, checksum=None, **kwargs):
+def version(ver, checksum=None, **kwargs):
"""Adds a version and metadata describing how to fetch it.
- Metadata is just stored as a dict in the package's versions
- dictionary. Package must turn it into a valid fetch strategy
- later.
+ Metadata is just stored as a dict in the package's versions
+ dictionary. Package must turn it into a valid fetch strategy
+ later.
"""
- # TODO: checksum vs md5 distinction is confusing -- fix this.
- # special case checksum for backward compatibility
- if checksum:
- kwargs['md5'] = checksum
+ def _execute(pkg):
+ # TODO: checksum vs md5 distinction is confusing -- fix this.
+ # special case checksum for backward compatibility
+ if checksum:
+ kwargs['md5'] = checksum
- # Store kwargs for the package to later with a fetch_strategy.
- pkg.versions[Version(ver)] = kwargs
+ # Store kwargs for the package to later with a fetch_strategy.
+ pkg.versions[Version(ver)] = kwargs
+ return _execute
-def _depends_on(pkg, spec, when=None):
+def _depends_on(pkg, spec, when=None, type=None):
# If when is False do nothing
if when is False:
return
@@ -180,10 +232,29 @@ def _depends_on(pkg, spec, when=None):
when = pkg.name
when_spec = parse_anonymous_spec(when, pkg.name)
+ if type is None:
+ # The default deptype is build and link because the common case is to
+ # build against a library which then turns into a runtime dependency
+ # due to the linker.
+ # XXX(deptype): Add 'run' to this? It's an uncommon dependency type,
+ # but is most backwards-compatible.
+ type = ('build', 'link')
+
+ if isinstance(type, str):
+ type = spack.spec.special_types.get(type, (type,))
+
+ for deptype in type:
+ if deptype not in spack.spec.alldeps:
+ raise UnknownDependencyTypeError('depends_on', pkg.name, deptype)
+
dep_spec = Spec(spec)
if pkg.name == dep_spec.name:
raise CircularReferenceError('depends_on', pkg.name)
+ pkg_deptypes = pkg.dependency_types.setdefault(dep_spec.name, set())
+ for deptype in type:
+ pkg_deptypes.add(deptype)
+
conditions = pkg.dependencies.setdefault(dep_spec.name, {})
if when_spec in conditions:
conditions[when_spec].constrain(dep_spec, deps=False)
@@ -191,14 +262,19 @@ def _depends_on(pkg, spec, when=None):
conditions[when_spec] = dep_spec
-@directive('dependencies')
-def depends_on(pkg, spec, when=None):
- """Creates a dict of deps with specs defining when they apply."""
- _depends_on(pkg, spec, when=when)
+@directive(('dependencies', 'dependency_types'))
+def depends_on(spec, when=None, type=None):
+ """Creates a dict of deps with specs defining when they apply.
+ This directive is to be used inside a Package definition to declare
+ that the package requires other packages to be built first.
+ @see The section "Dependency specs" in the Spack Packaging Guide."""
+ def _execute(pkg):
+ _depends_on(pkg, spec, when=when, type=type)
+ return _execute
-@directive(('extendees', 'dependencies'))
-def extends(pkg, spec, **kwargs):
+@directive(('extendees', 'dependencies', 'dependency_types'))
+def extends(spec, **kwargs):
"""Same as depends_on, but dependency is symlinked into parent prefix.
This is for Python and other language modules where the module
@@ -212,102 +288,118 @@ def extends(pkg, spec, **kwargs):
mechanism.
"""
- if pkg.extendees:
- raise DirectiveError("Packages can extend at most one other package.")
+ def _execute(pkg):
+ # if pkg.extendees:
+ # directive = 'extends'
+ # msg = 'Packages can extend at most one other package.'
+ # raise DirectiveError(directive, msg)
- when = kwargs.pop('when', pkg.name)
- _depends_on(pkg, spec, when=when)
- pkg.extendees[spec] = (Spec(spec), kwargs)
+ when = kwargs.pop('when', pkg.name)
+ _depends_on(pkg, spec, when=when)
+ pkg.extendees[spec] = (Spec(spec), kwargs)
+ return _execute
@directive('provided')
-def provides(pkg, *specs, **kwargs):
+def provides(*specs, **kwargs):
"""Allows packages to provide a virtual dependency. If a package provides
'mpi', other packages can declare that they depend on "mpi", and spack
can use the providing package to satisfy the dependency.
"""
- spec_string = kwargs.get('when', pkg.name)
- provider_spec = parse_anonymous_spec(spec_string, pkg.name)
+ def _execute(pkg):
+ spec_string = kwargs.get('when', pkg.name)
+ provider_spec = parse_anonymous_spec(spec_string, pkg.name)
- for string in specs:
- for provided_spec in spack.spec.parse(string):
- if pkg.name == provided_spec.name:
- raise CircularReferenceError('depends_on', pkg.name)
- pkg.provided[provided_spec] = provider_spec
+ for string in specs:
+ for provided_spec in spack.spec.parse(string):
+ if pkg.name == provided_spec.name:
+ raise CircularReferenceError('depends_on', pkg.name)
+ if provided_spec not in pkg.provided:
+ pkg.provided[provided_spec] = set()
+ pkg.provided[provided_spec].add(provider_spec)
+ return _execute
@directive('patches')
-def patch(pkg, url_or_filename, level=1, when=None):
+def patch(url_or_filename, level=1, when=None, **kwargs):
"""Packages can declare patches to apply to source. You can
- optionally provide a when spec to indicate that a particular
- patch should only be applied when the package's spec meets
- certain conditions (e.g. a particular version).
+ optionally provide a when spec to indicate that a particular
+ patch should only be applied when the package's spec meets
+ certain conditions (e.g. a particular version).
"""
- if when is None:
- when = pkg.name
- when_spec = parse_anonymous_spec(when, pkg.name)
- cur_patches = pkg.patches.setdefault(when_spec, [])
- # if this spec is identical to some other, then append this
- # patch to the existing list.
- cur_patches.append(Patch(pkg, url_or_filename, level))
+ def _execute(pkg):
+ constraint = pkg.name if when is None else when
+ when_spec = parse_anonymous_spec(constraint, pkg.name)
+ cur_patches = pkg.patches.setdefault(when_spec, [])
+ # if this spec is identical to some other, then append this
+ # patch to the existing list.
+ cur_patches.append(Patch.create(pkg, url_or_filename, level, **kwargs))
+ return _execute
@directive('variants')
-def variant(pkg, name, default=False, description=""):
+def variant(name, default=False, description=""):
"""Define a variant for the package. Packager can specify a default
value (on or off) as well as a text description."""
-
- default = bool(default)
description = str(description).strip()
- if not re.match(spack.spec.identifier_re, name):
- raise DirectiveError("Invalid variant name in %s: '%s'" %
- (pkg.name, name))
+ def _execute(pkg):
+ if not re.match(spack.spec.identifier_re, name):
+ directive = 'variant'
+ msg = "Invalid variant name in {0}: '{1}'"
+ raise DirectiveError(directive, msg.format(pkg.name, name))
- pkg.variants[name] = Variant(default, description)
+ pkg.variants[name] = Variant(default, description)
+ return _execute
@directive('resources')
-def resource(pkg, **kwargs):
- """
- Define an external resource to be fetched and staged when building the
+def resource(**kwargs):
+ """Define an external resource to be fetched and staged when building the
package. Based on the keywords present in the dictionary the appropriate
FetchStrategy will be used for the resource. Resources are fetched and
- staged in their own folder inside spack stage area, and then linked into
+ staged in their own folder inside spack stage area, and then moved into
the stage area of the package that needs them.
List of recognized keywords:
* 'when' : (optional) represents the condition upon which the resource is
- needed
- * 'destination' : (optional) path where to link the resource. This path
- must be relative to the main package stage area.
+ needed
+ * 'destination' : (optional) path where to move the resource. This path
+ must be relative to the main package stage area.
* 'placement' : (optional) gives the possibility to fine tune how the
- resource is linked into the main package stage area.
+ resource is moved into the main package stage area.
"""
- when = kwargs.get('when', pkg.name)
- destination = kwargs.get('destination', "")
- placement = kwargs.get('placement', None)
- # Check if the path is relative
- if os.path.isabs(destination):
- message = "The destination keyword of a resource directive can't be"
- " an absolute path.\n"
- message += "\tdestination : '{dest}\n'".format(dest=destination)
- raise RuntimeError(message)
- # Check if the path falls within the main package stage area
- test_path = 'stage_folder_root'
- normalized_destination = os.path.normpath(join_path(test_path, destination)
- ) # Normalized absolute path
- if test_path not in normalized_destination:
- message = "The destination folder of a resource must fall within the"
- " main package stage directory.\n"
- message += "\tdestination : '{dest}'\n".format(dest=destination)
- raise RuntimeError(message)
- when_spec = parse_anonymous_spec(when, pkg.name)
- resources = pkg.resources.setdefault(when_spec, [])
- name = kwargs.get('name')
- fetcher = from_kwargs(**kwargs)
- resources.append(Resource(name, fetcher, destination, placement))
+ def _execute(pkg):
+ when = kwargs.get('when', pkg.name)
+ destination = kwargs.get('destination', "")
+ placement = kwargs.get('placement', None)
+
+ # Check if the path is relative
+ if os.path.isabs(destination):
+ message = 'The destination keyword of a resource directive '
+ 'can\'t be an absolute path.\n'
+ message += "\tdestination : '{dest}\n'".format(dest=destination)
+ raise RuntimeError(message)
+
+ # Check if the path falls within the main package stage area
+ test_path = 'stage_folder_root'
+ normalized_destination = os.path.normpath(
+ join_path(test_path, destination)
+ ) # Normalized absolute path
+
+ if test_path not in normalized_destination:
+ message = "The destination folder of a resource must fall "
+ "within the main package stage directory.\n"
+ message += "\tdestination : '{dest}'\n".format(dest=destination)
+ raise RuntimeError(message)
+
+ when_spec = parse_anonymous_spec(when, pkg.name)
+ resources = pkg.resources.setdefault(when_spec, [])
+ name = kwargs.get('name')
+ fetcher = from_kwargs(**kwargs)
+ resources.append(Resource(name, fetcher, destination, placement))
+ return _execute
class DirectiveError(spack.error.SpackError):
@@ -326,3 +418,14 @@ class CircularReferenceError(DirectiveError):
directive,
"Package '%s' cannot pass itself to %s" % (package, directive))
self.package = package
+
+
+class UnknownDependencyTypeError(DirectiveError):
+ """This is raised when a dependency is of an unknown type."""
+
+ def __init__(self, directive, package, deptype):
+ super(UnknownDependencyTypeError, self).__init__(
+ directive,
+ "Package '%s' cannot depend on a package via %s."
+ % (package, deptype))
+ self.package = package
diff --git a/lib/spack/spack/directory_layout.py b/lib/spack/spack/directory_layout.py
index 32d27d7bd0..28e6584fb2 100644
--- a/lib/spack/spack/directory_layout.py
+++ b/lib/spack/spack/directory_layout.py
@@ -22,19 +22,17 @@
# License along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
-import re
import os
import exceptions
-import hashlib
import shutil
import glob
import tempfile
import yaml
-import llnl.util.tty as tty
from llnl.util.filesystem import join_path, mkdirp
-from spack.spec import Spec
+import spack
+import spack.spec
from spack.error import SpackError
@@ -50,10 +48,10 @@ class DirectoryLayout(object):
install, and they can use this to customize the nesting structure of
spack installs.
"""
+
def __init__(self, root):
self.root = root
-
@property
def hidden_file_paths(self):
"""Return a list of hidden files used by the directory layout.
@@ -66,25 +64,21 @@ class DirectoryLayout(object):
"""
raise NotImplementedError()
-
def all_specs(self):
"""To be implemented by subclasses to traverse all specs for which there is
a directory within the root.
"""
raise NotImplementedError()
-
def relative_path_for_spec(self, spec):
"""Implemented by subclasses to return a relative path from the install
root to a unique location for the provided spec."""
raise NotImplementedError()
-
def create_install_directory(self, spec):
"""Creates the installation directory for a spec."""
raise NotImplementedError()
-
def check_installed(self, spec):
"""Checks whether a spec is installed.
@@ -94,7 +88,6 @@ class DirectoryLayout(object):
"""
raise NotImplementedError()
-
def extension_map(self, spec):
"""Get a dict of currently installed extension packages for a spec.
@@ -103,7 +96,6 @@ class DirectoryLayout(object):
"""
raise NotImplementedError()
-
def check_extension_conflict(self, spec, ext_spec):
"""Ensure that ext_spec can be activated in spec.
@@ -112,7 +104,6 @@ class DirectoryLayout(object):
"""
raise NotImplementedError()
-
def check_activated(self, spec, ext_spec):
"""Ensure that ext_spec can be removed from spec.
@@ -120,26 +111,22 @@ class DirectoryLayout(object):
"""
raise NotImplementedError()
-
def add_extension(self, spec, ext_spec):
"""Add to the list of currently installed extensions."""
raise NotImplementedError()
-
def remove_extension(self, spec, ext_spec):
"""Remove from the list of currently installed extensions."""
raise NotImplementedError()
-
def path_for_spec(self, spec):
- """Return an absolute path from the root to a directory for the spec."""
+ """Return absolute path from the root to a directory for the spec."""
_check_concrete(spec)
path = self.relative_path_for_spec(spec)
assert(not path.startswith(self.root))
return os.path.join(self.root, path)
-
def remove_install_directory(self, spec):
"""Removes a prefix and any empty parent directories from the root.
Raised RemoveFailedError if something goes wrong.
@@ -165,7 +152,7 @@ class DirectoryLayout(object):
class YamlDirectoryLayout(DirectoryLayout):
"""Lays out installation directories like this::
<install root>/
- <architecture>/
+ <platform-os-target>/
<compiler>-<compiler version>/
<name>-<version>-<variants>-<hash>
@@ -176,6 +163,7 @@ class YamlDirectoryLayout(DirectoryLayout):
only enabled variants are included in the install path.
Disabled variants are omitted.
"""
+
def __init__(self, root, **kwargs):
super(YamlDirectoryLayout, self).__init__(root)
self.metadata_dir = kwargs.get('metadata_dir', '.spack')
@@ -190,12 +178,10 @@ class YamlDirectoryLayout(DirectoryLayout):
# Cache of already written/read extension maps.
self._extension_maps = {}
-
@property
def hidden_file_paths(self):
return (self.metadata_dir,)
-
def relative_path_for_spec(self, spec):
_check_concrete(spec)
@@ -214,49 +200,47 @@ class YamlDirectoryLayout(DirectoryLayout):
return path
-
def write_spec(self, spec, path):
"""Write a spec out to a file."""
_check_concrete(spec)
with open(path, 'w') as f:
spec.to_yaml(f)
-
def read_spec(self, path):
"""Read the contents of a file and parse them as a spec"""
- with open(path) as f:
- spec = Spec.from_yaml(f)
+ try:
+ with open(path) as f:
+ spec = spack.spec.Spec.from_yaml(f)
+ except Exception as e:
+ if spack.debug:
+ raise
+ raise SpecReadError(
+ 'Unable to read file: %s' % path, 'Cause: ' + str(e))
# Specs read from actual installations are always concrete
spec._mark_concrete()
return spec
-
def spec_file_path(self, spec):
"""Gets full path to spec file"""
_check_concrete(spec)
return join_path(self.metadata_path(spec), self.spec_file_name)
-
def metadata_path(self, spec):
return join_path(self.path_for_spec(spec), self.metadata_dir)
-
def build_log_path(self, spec):
return join_path(self.path_for_spec(spec), self.metadata_dir,
self.build_log_name)
-
def build_env_path(self, spec):
return join_path(self.path_for_spec(spec), self.metadata_dir,
self.build_env_name)
-
def build_packages_path(self, spec):
return join_path(self.path_for_spec(spec), self.metadata_dir,
self.packages_dir)
-
def create_install_directory(self, spec):
_check_concrete(spec)
@@ -267,7 +251,6 @@ class YamlDirectoryLayout(DirectoryLayout):
mkdirp(self.metadata_path(spec))
self.write_spec(spec, self.spec_file_path(spec))
-
def check_installed(self, spec):
_check_concrete(spec)
path = self.path_for_spec(spec)
@@ -278,20 +261,26 @@ class YamlDirectoryLayout(DirectoryLayout):
if not os.path.isfile(spec_file_path):
raise InconsistentInstallDirectoryError(
- 'Inconsistent state: install prefix exists but contains no spec.yaml:',
+ 'Install prefix exists but contains no spec.yaml:',
" " + path)
installed_spec = self.read_spec(spec_file_path)
if installed_spec == spec:
return path
+ # DAG hashes currently do not include build dependencies.
+ #
+ # TODO: remove this when we do better concretization and don't
+ # ignore build-only deps in hashes.
+ elif installed_spec == spec.copy(deps=('link', 'run')):
+ return path
+
if spec.dag_hash() == installed_spec.dag_hash():
- raise SpecHashCollisionError(installed_hash, spec_hash)
+ raise SpecHashCollisionError(spec, installed_spec)
else:
raise InconsistentInstallDirectoryError(
'Spec file in %s does not match hash!' % spec_file_path)
-
def all_specs(self):
if not os.path.isdir(self.root):
return []
@@ -301,20 +290,17 @@ class YamlDirectoryLayout(DirectoryLayout):
spec_files = glob.glob(pattern)
return [self.read_spec(s) for s in spec_files]
-
def specs_by_hash(self):
by_hash = {}
for spec in self.all_specs():
by_hash[spec.dag_hash()] = spec
return by_hash
-
def extension_file_path(self, spec):
"""Gets full path to an installed package's extension file"""
_check_concrete(spec)
return join_path(self.metadata_path(spec), self.extension_file_name)
-
def _write_extensions(self, spec, extensions):
path = self.extension_file_path(spec)
@@ -326,23 +312,22 @@ class YamlDirectoryLayout(DirectoryLayout):
# write tmp file
with tmp:
yaml.dump({
- 'extensions' : [
- { ext.name : {
- 'hash' : ext.dag_hash(),
- 'path' : str(ext.prefix)
+ 'extensions': [
+ {ext.name: {
+ 'hash': ext.dag_hash(),
+ 'path': str(ext.prefix)
}} for ext in sorted(extensions.values())]
}, tmp, default_flow_style=False)
# Atomic update by moving tmpfile on top of old one.
os.rename(tmp.name, path)
-
def _extension_map(self, spec):
"""Get a dict<name -> spec> for all extensions currently
installed for this package."""
_check_concrete(spec)
- if not spec in self._extension_maps:
+ if spec not in self._extension_maps:
path = self.extension_file_path(spec)
if not os.path.exists(path):
self._extension_maps[spec] = {}
@@ -357,14 +342,14 @@ class YamlDirectoryLayout(DirectoryLayout):
dag_hash = entry[name]['hash']
prefix = entry[name]['path']
- if not dag_hash in by_hash:
+ if dag_hash not in by_hash:
raise InvalidExtensionSpecError(
"Spec %s not found in %s" % (dag_hash, prefix))
ext_spec = by_hash[dag_hash]
- if not prefix == ext_spec.prefix:
+ if prefix != ext_spec.prefix:
raise InvalidExtensionSpecError(
- "Prefix %s does not match spec with hash %s: %s"
+ "Prefix %s does not match spec hash %s: %s"
% (prefix, dag_hash, ext_spec))
exts[ext_spec.name] = ext_spec
@@ -372,13 +357,11 @@ class YamlDirectoryLayout(DirectoryLayout):
return self._extension_maps[spec]
-
def extension_map(self, spec):
"""Defensive copying version of _extension_map() for external API."""
_check_concrete(spec)
return self._extension_map(spec).copy()
-
def check_extension_conflict(self, spec, ext_spec):
exts = self._extension_map(spec)
if ext_spec.name in exts:
@@ -388,13 +371,11 @@ class YamlDirectoryLayout(DirectoryLayout):
else:
raise ExtensionConflictError(spec, ext_spec, installed_spec)
-
def check_activated(self, spec, ext_spec):
exts = self._extension_map(spec)
- if (not ext_spec.name in exts) or (ext_spec != exts[ext_spec.name]):
+ if (ext_spec.name not in exts) or (ext_spec != exts[ext_spec.name]):
raise NoSuchExtensionError(spec, ext_spec)
-
def add_extension(self, spec, ext_spec):
_check_concrete(spec)
_check_concrete(ext_spec)
@@ -407,7 +388,6 @@ class YamlDirectoryLayout(DirectoryLayout):
exts[ext_spec.name] = ext_spec
self._write_extensions(spec, exts)
-
def remove_extension(self, spec, ext_spec):
_check_concrete(spec)
_check_concrete(ext_spec)
@@ -423,64 +403,76 @@ class YamlDirectoryLayout(DirectoryLayout):
class DirectoryLayoutError(SpackError):
"""Superclass for directory layout errors."""
+
def __init__(self, message, long_msg=None):
super(DirectoryLayoutError, self).__init__(message, long_msg)
class SpecHashCollisionError(DirectoryLayoutError):
"""Raised when there is a hash collision in an install layout."""
+
def __init__(self, installed_spec, new_spec):
super(SpecHashCollisionError, self).__init__(
'Specs %s and %s have the same SHA-1 prefix!'
- % installed_spec, new_spec)
+ % (installed_spec, new_spec))
class RemoveFailedError(DirectoryLayoutError):
"""Raised when a DirectoryLayout cannot remove an install prefix."""
+
def __init__(self, installed_spec, prefix, error):
super(RemoveFailedError, self).__init__(
'Could not remove prefix %s for %s : %s'
- % prefix, installed_spec.short_spec, error)
+ % (prefix, installed_spec.short_spec, error))
self.cause = error
class InconsistentInstallDirectoryError(DirectoryLayoutError):
"""Raised when a package seems to be installed to the wrong place."""
+
def __init__(self, message, long_msg=None):
- super(InconsistentInstallDirectoryError, self).__init__(message, long_msg)
+ super(InconsistentInstallDirectoryError, self).__init__(
+ message, long_msg)
class InstallDirectoryAlreadyExistsError(DirectoryLayoutError):
"""Raised when create_install_directory is called unnecessarily."""
+
def __init__(self, path):
super(InstallDirectoryAlreadyExistsError, self).__init__(
"Install path %s already exists!")
+class SpecReadError(DirectoryLayoutError):
+ """Raised when directory layout can't read a spec."""
+
+
class InvalidExtensionSpecError(DirectoryLayoutError):
"""Raised when an extension file has a bad spec in it."""
- def __init__(self, message):
- super(InvalidExtensionSpecError, self).__init__(message)
class ExtensionAlreadyInstalledError(DirectoryLayoutError):
"""Raised when an extension is added to a package that already has it."""
+
def __init__(self, spec, ext_spec):
super(ExtensionAlreadyInstalledError, self).__init__(
- "%s is already installed in %s" % (ext_spec.short_spec, spec.short_spec))
+ "%s is already installed in %s"
+ % (ext_spec.short_spec, spec.short_spec))
class ExtensionConflictError(DirectoryLayoutError):
"""Raised when an extension is added to a package that already has it."""
+
def __init__(self, spec, ext_spec, conflict):
super(ExtensionConflictError, self).__init__(
- "%s cannot be installed in %s because it conflicts with %s"% (
- ext_spec.short_spec, spec.short_spec, conflict.short_spec))
+ "%s cannot be installed in %s because it conflicts with %s"
+ % (ext_spec.short_spec, spec.short_spec, conflict.short_spec))
class NoSuchExtensionError(DirectoryLayoutError):
"""Raised when an extension isn't there on deactivate."""
+
def __init__(self, spec, ext_spec):
super(NoSuchExtensionError, self).__init__(
- "%s cannot be removed from %s because it's not activated."% (
- ext_spec.short_spec, spec.short_spec))
+ "%s cannot be removed from %s because it's not activated."
+ % (ext_spec.short_spec, spec.short_spec))
diff --git a/lib/spack/spack/environment.py b/lib/spack/spack/environment.py
index af642dcc9b..de30a9c7be 100644
--- a/lib/spack/spack/environment.py
+++ b/lib/spack/spack/environment.py
@@ -24,44 +24,60 @@
##############################################################################
import collections
import inspect
+import json
import os
import os.path
+import subprocess
class NameModifier(object):
+
def __init__(self, name, **kwargs):
self.name = name
self.args = {'name': name}
self.args.update(kwargs)
+ def update_args(self, **kwargs):
+ self.__dict__.update(kwargs)
+ self.args.update(kwargs)
+
class NameValueModifier(object):
+
def __init__(self, name, value, **kwargs):
self.name = name
self.value = value
self.separator = kwargs.get('separator', ':')
- self.args = {'name': name, 'value': value, 'delim': self.separator}
+ self.args = {'name': name, 'value': value, 'separator': self.separator}
+ self.args.update(kwargs)
+
+ def update_args(self, **kwargs):
+ self.__dict__.update(kwargs)
self.args.update(kwargs)
class SetEnv(NameValueModifier):
+
def execute(self):
os.environ[self.name] = str(self.value)
class UnsetEnv(NameModifier):
+
def execute(self):
# Avoid throwing if the variable was not set
os.environ.pop(self.name, None)
class SetPath(NameValueModifier):
+
def execute(self):
string_path = concatenate_paths(self.value, separator=self.separator)
os.environ[self.name] = string_path
class AppendPath(NameValueModifier):
+
def execute(self):
environment_value = os.environ.get(self.name, '')
directories = environment_value.split(
@@ -71,6 +87,7 @@ class AppendPath(NameValueModifier):
class PrependPath(NameValueModifier):
+
def execute(self):
environment_value = os.environ.get(self.name, '')
directories = environment_value.split(
@@ -80,6 +97,7 @@ class PrependPath(NameValueModifier):
class RemovePath(NameValueModifier):
+
def execute(self):
environment_value = os.environ.get(self.name, '')
directories = environment_value.split(
@@ -90,6 +108,7 @@ class RemovePath(NameValueModifier):
class EnvironmentModifications(object):
+
"""
Keeps track of requests to modify the current environment.
@@ -240,6 +259,132 @@ class EnvironmentModifications(object):
for x in actions:
x.execute()
+ @staticmethod
+ def from_sourcing_files(*args, **kwargs):
+ """Creates an instance of EnvironmentModifications that, if executed,
+ has the same effect on the environment as sourcing the files passed as
+ parameters
+
+ :param \*args: list of files to be sourced
+ :rtype: instance of EnvironmentModifications
+ """
+
+ env = EnvironmentModifications()
+ # Check if the files are actually there
+ files = [line.split(' ')[0] for line in args]
+ non_existing = [file for file in files if not os.path.isfile(file)]
+ if non_existing:
+ message = 'trying to source non-existing files\n'
+ message += '\n'.join(non_existing)
+ raise RuntimeError(message)
+ # Relevant kwd parameters and formats
+ info = dict(kwargs)
+ info.setdefault('shell', '/bin/bash')
+ info.setdefault('shell_options', '-c')
+ info.setdefault('source_command', 'source')
+ info.setdefault('suppress_output', '&> /dev/null')
+ info.setdefault('concatenate_on_success', '&&')
+
+ shell = '{shell}'.format(**info)
+ shell_options = '{shell_options}'.format(**info)
+ source_file = '{source_command} {file} {concatenate_on_success}'
+
+ dump_cmd = "import os, json; print json.dumps(dict(os.environ))"
+ dump_environment = 'python -c "%s"' % dump_cmd
+
+ # Construct the command that will be executed
+ command = [source_file.format(file=file, **info) for file in args]
+ command.append(dump_environment)
+ command = ' '.join(command)
+ command = [
+ shell,
+ shell_options,
+ command
+ ]
+
+ # Try to source all the files,
+ proc = subprocess.Popen(
+ command, stdout=subprocess.PIPE, env=os.environ)
+ proc.wait()
+ if proc.returncode != 0:
+ raise RuntimeError('sourcing files returned a non-zero exit code')
+ output = ''.join([line for line in proc.stdout])
+ # Construct a dictionary with all the variables in the new environment
+ after_source_env = dict(json.loads(output))
+ this_environment = dict(os.environ)
+
+ # Filter variables that are not related to sourcing a file
+ to_be_filtered = 'SHLVL', '_', 'PWD', 'OLDPWD'
+ for d in after_source_env, this_environment:
+ for name in to_be_filtered:
+ d.pop(name, None)
+
+ # Fill the EnvironmentModifications instance
+
+ # New variables
+ new_variables = set(after_source_env) - set(this_environment)
+ for x in new_variables:
+ env.set(x, after_source_env[x])
+ # Variables that have been unset
+ unset_variables = set(this_environment) - set(after_source_env)
+ for x in unset_variables:
+ env.unset(x)
+ # Variables that have been modified
+ common_variables = set(
+ this_environment).intersection(set(after_source_env))
+ modified_variables = [x for x in common_variables
+ if this_environment[x] != after_source_env[x]]
+
+ def return_separator_if_any(first_value, second_value):
+ separators = ':', ';'
+ for separator in separators:
+ if separator in first_value and separator in second_value:
+ return separator
+ return None
+
+ for x in modified_variables:
+ current = this_environment[x]
+ modified = after_source_env[x]
+ sep = return_separator_if_any(current, modified)
+ if sep is None:
+ # We just need to set the variable to the new value
+ env.set(x, after_source_env[x])
+ else:
+ current_list = current.split(sep)
+ modified_list = modified.split(sep)
+ # Paths that have been removed
+ remove_list = [
+ ii for ii in current_list if ii not in modified_list]
+ # Check that nothing has been added in the middle of vurrent
+ # list
+ remaining_list = [
+ ii for ii in current_list if ii in modified_list]
+ start = modified_list.index(remaining_list[0])
+ end = modified_list.index(remaining_list[-1])
+ search = sep.join(modified_list[start:end + 1])
+ if search not in current:
+ # We just need to set the variable to the new value
+ env.set(x, after_source_env[x])
+ break
+ else:
+ try:
+ prepend_list = modified_list[:start]
+ except KeyError:
+ prepend_list = []
+ try:
+ append_list = modified_list[end + 1:]
+ except KeyError:
+ append_list = []
+
+ for item in remove_list:
+ env.remove_path(x, item)
+ for item in append_list:
+ env.append_path(x, item)
+ for item in prepend_list:
+ env.prepend_path(x, item)
+
+ return env
+
def concatenate_paths(paths, separator=':'):
"""
@@ -266,7 +411,7 @@ def set_or_unset_not_first(variable, changes, errstream):
if indexes:
good = '\t \t{context} at {filename}:{lineno}'
nogood = '\t--->\t{context} at {filename}:{lineno}'
- message = 'Suspicious requests to set or unset the variable \'{var}\' found' # NOQA: ignore=E501
+ message = "Suspicious requests to set or unset '{var}' found"
errstream(message.format(var=variable))
for ii, item in enumerate(changes):
print_format = nogood if ii in indexes else good
diff --git a/lib/spack/spack/error.py b/lib/spack/spack/error.py
index 85ad2fe249..b6261a05f4 100644
--- a/lib/spack/spack/error.py
+++ b/lib/spack/spack/error.py
@@ -26,32 +26,44 @@ import os
import sys
import llnl.util.tty as tty
import spack
+import inspect
+
class SpackError(Exception):
"""This is the superclass for all Spack errors.
Subclasses can be found in the modules they have to do with.
"""
+
def __init__(self, message, long_message=None):
super(SpackError, self).__init__()
self.message = message
self._long_message = long_message
+ # for exceptions raised from child build processes, we save the
+ # traceback as a string and print it in the parent.
+ self.traceback = None
@property
def long_message(self):
return self._long_message
-
def die(self):
+ # basic debug message
+ tty.error(self.message)
+ if self.long_message:
+ print(self.long_message)
+
+ # stack trace, etc. in debug mode.
if spack.debug:
- sys.excepthook(*sys.exc_info())
- os._exit(1)
- else:
- tty.error(self.message)
- if self.long_message:
- print self.long_message
- os._exit(1)
+ if self.traceback:
+ # exception came from a build child, already got
+ # traceback in child, so print it.
+ sys.stderr.write(self.traceback)
+ else:
+ # run parent exception hook.
+ sys.excepthook(*sys.exc_info())
+ os._exit(1)
def __str__(self):
msg = self.message
@@ -59,14 +71,27 @@ class SpackError(Exception):
msg += "\n %s" % self._long_message
return msg
+ def __repr__(self):
+ args = [repr(self.message), repr(self.long_message)]
+ args = ','.join(args)
+ qualified_name = inspect.getmodule(
+ self).__name__ + '.' + type(self).__name__
+ return qualified_name + '(' + args + ')'
+
+ def __reduce__(self):
+ return type(self), (self.message, self.long_message)
+
+
class UnsupportedPlatformError(SpackError):
"""Raised by packages when a platform is not supported"""
+
def __init__(self, message):
super(UnsupportedPlatformError, self).__init__(message)
class NoNetworkConnectionError(SpackError):
"""Raised when an operation needs an internet connection."""
+
def __init__(self, message, url):
super(NoNetworkConnectionError, self).__init__(
"No network connection: " + str(message),
diff --git a/lib/spack/spack/fetch_strategy.py b/lib/spack/spack/fetch_strategy.py
index 7c8cebe0c9..23f3b9a41e 100644
--- a/lib/spack/spack/fetch_strategy.py
+++ b/lib/spack/spack/fetch_strategy.py
@@ -1,4 +1,4 @@
-##############################################################################
+#
# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
# Produced at the Lawrence Livermore National Laboratory.
#
@@ -21,7 +21,7 @@
# You should have received a copy of the GNU Lesser General Public
# License along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
-##############################################################################
+#
"""
Fetch strategies are used to download source code into a staging area
in order to build it. They need to define the following methods:
@@ -75,11 +75,13 @@ def _needs_stage(fun):
class FetchStrategy(object):
+
"""Superclass of all fetch strategies."""
enabled = False # Non-abstract subclasses should be enabled.
required_attributes = None # Attributes required in version() args.
class __metaclass__(type):
+
"""This metaclass registers all fetch strategies in a list."""
def __init__(cls, name, bases, dict):
@@ -114,6 +116,14 @@ class FetchStrategy(object):
def archive(self, destination):
pass # Used to create tarball for mirror.
+ @property
+ def cachable(self):
+ """Return whether the fetcher is capable of caching the
+ resource it retrieves. This generally is determined by
+ whether the resource is identifiably associated with a
+ specific package version."""
+ pass
+
def __str__(self): # Should be human readable URL.
return "FetchStrategy.__str___"
@@ -126,6 +136,7 @@ class FetchStrategy(object):
@pattern.composite(interface=FetchStrategy)
class FetchStrategyComposite(object):
+
"""
Composite for a FetchStrategy object. Implements the GoF composite pattern.
"""
@@ -134,6 +145,7 @@ class FetchStrategyComposite(object):
class URLFetchStrategy(FetchStrategy):
+
"""FetchStrategy that pulls source code from a URL for an archive,
checks the archive against a checksum,and decompresses the archive.
"""
@@ -154,10 +166,20 @@ class URLFetchStrategy(FetchStrategy):
self.digest = digest
self.expand_archive = kwargs.get('expand', True)
+ self.extra_curl_options = kwargs.get('curl_options', [])
+ self._curl = None
+
+ self.extension = kwargs.get('extension', None)
if not self.url:
raise ValueError("URLFetchStrategy requires a url for fetching.")
+ @property
+ def curl(self):
+ if not self._curl:
+ self._curl = which('curl', required=True)
+ return self._curl
+
@_needs_stage
def fetch(self):
self.stage.chdir()
@@ -166,14 +188,13 @@ class URLFetchStrategy(FetchStrategy):
tty.msg("Already downloaded %s" % self.archive_file)
return
- possible_files = self.stage.expected_archive_files
save_file = None
partial_file = None
- if possible_files:
- save_file = self.stage.expected_archive_files[0]
- partial_file = self.stage.expected_archive_files[0] + '.part'
+ if self.stage.save_filename:
+ save_file = self.stage.save_filename
+ partial_file = self.stage.save_filename + '.part'
- tty.msg("Trying to fetch from %s" % self.url)
+ tty.msg("Fetching %s" % self.url)
if partial_file:
save_args = ['-C',
@@ -191,15 +212,21 @@ class URLFetchStrategy(FetchStrategy):
self.url,
]
+ if spack.insecure:
+ curl_args.append('-k')
+
if sys.stdout.isatty():
curl_args.append('-#') # status bar when using a tty
else:
curl_args.append('-sS') # just errors when not.
+ curl_args += self.extra_curl_options
+
# Run curl but grab the mime type from the http headers
- headers = spack.curl(*curl_args, output=str, fail_on_error=False)
+ curl = self.curl
+ headers = curl(*curl_args, output=str, fail_on_error=False)
- if spack.curl.returncode != 0:
+ if curl.returncode != 0:
# clean up archive on failure.
if self.archive_file:
os.remove(self.archive_file)
@@ -207,12 +234,12 @@ class URLFetchStrategy(FetchStrategy):
if partial_file and os.path.exists(partial_file):
os.remove(partial_file)
- if spack.curl.returncode == 22:
+ if curl.returncode == 22:
# This is a 404. Curl will print the error.
raise FailedDownloadError(
self.url, "URL %s was not found!" % self.url)
- elif spack.curl.returncode == 60:
+ elif curl.returncode == 60:
# This is a certificate error. Suggest spack -k
raise FailedDownloadError(
self.url,
@@ -228,19 +255,20 @@ class URLFetchStrategy(FetchStrategy):
# error, but print a spack message too
raise FailedDownloadError(
self.url,
- "Curl failed with error %d" % spack.curl.returncode)
+ "Curl failed with error %d" % curl.returncode)
# Check if we somehow got an HTML file rather than the archive we
# asked for. We only look at the last content type, to handle
# redirects properly.
content_types = re.findall(r'Content-Type:[^\r\n]+', headers)
if content_types and 'text/html' in content_types[-1]:
- tty.warn(
- "The contents of " + self.archive_file + " look like HTML.",
- "The checksum will likely be bad. If it is, you can use",
- "'spack clean <package>' to remove the bad archive, then fix",
- "your internet gateway issue and install again.")
-
+ tty.warn("The contents of ",
+ (self.archive_file if self.archive_file is not None
+ else "the archive"),
+ " look like HTML.",
+ "The checksum will likely be bad. If it is, you can use",
+ "'spack clean <package>' to remove the bad archive, then",
+ "fix your internet gateway issue and install again.")
if save_file:
os.rename(partial_file, save_file)
@@ -252,6 +280,10 @@ class URLFetchStrategy(FetchStrategy):
"""Path to the source archive within this stage directory."""
return self.stage.archive_file
+ @property
+ def cachable(self):
+ return bool(self.digest)
+
@_needs_stage
def expand(self):
if not self.expand_archive:
@@ -263,10 +295,12 @@ class URLFetchStrategy(FetchStrategy):
self.stage.chdir()
if not self.archive_file:
raise NoArchiveFileError(
- "URLFetchStrategy couldn't find archive file",
+ "Couldn't find archive file",
"Failed on expand() for URL %s" % self.url)
- decompress = decompressor_for(self.archive_file)
+ if not self.extension:
+ self.extension = extension(self.archive_file)
+ decompress = decompressor_for(self.archive_file, self.extension)
# Expand all tarballs in their own directory to contain
# exploding tarballs.
@@ -293,7 +327,8 @@ class URLFetchStrategy(FetchStrategy):
shutil.move(os.path.join(tarball_container, f),
os.path.join(self.stage.path, f))
os.rmdir(tarball_container)
-
+ if not files:
+ os.rmdir(tarball_container)
# Set the wd back to the stage when done.
self.stage.chdir()
@@ -302,10 +337,7 @@ class URLFetchStrategy(FetchStrategy):
if not self.archive_file:
raise NoArchiveFileError("Cannot call archive() before fetching.")
- if not extension(destination) == extension(self.archive_file):
- raise ValueError("Cannot archive without matching extensions.")
-
- shutil.move(self.archive_file, destination)
+ shutil.copyfile(self.archive_file, destination)
@_needs_stage
def check(self):
@@ -343,7 +375,7 @@ class URLFetchStrategy(FetchStrategy):
def __repr__(self):
url = self.url if self.url else "no url"
- return "URLFetchStrategy<%s>" % url
+ return "%s<%s>" % (self.__class__.__name__, url)
def __str__(self):
if self.url:
@@ -352,7 +384,45 @@ class URLFetchStrategy(FetchStrategy):
return "[no url]"
+class CacheURLFetchStrategy(URLFetchStrategy):
+ """The resource associated with a cache URL may be out of date."""
+
+ def __init__(self, *args, **kwargs):
+ super(CacheURLFetchStrategy, self).__init__(*args, **kwargs)
+
+ @_needs_stage
+ def fetch(self):
+ path = re.sub('^file://', '', self.url)
+
+ # check whether the cache file exists.
+ if not os.path.isfile(path):
+ raise NoCacheError('No cache of %s' % path)
+
+ self.stage.chdir()
+
+ # remove old symlink if one is there.
+ filename = self.stage.save_filename
+ if os.path.exists(filename):
+ os.remove(filename)
+
+ # Symlink to local cached archive.
+ os.symlink(path, filename)
+
+ # Remove link if checksum fails, or subsequent fetchers
+ # will assume they don't need to download.
+ if self.digest:
+ try:
+ self.check()
+ except ChecksumError:
+ os.remove(self.archive_file)
+ raise
+
+ # Notify the user how we fetched.
+ tty.msg('Using cached archive: %s' % path)
+
+
class VCSFetchStrategy(FetchStrategy):
+
def __init__(self, name, *rev_types, **kwargs):
super(VCSFetchStrategy, self).__init__()
self.name = name
@@ -407,6 +477,7 @@ class VCSFetchStrategy(FetchStrategy):
class GoFetchStrategy(VCSFetchStrategy):
+
"""
Fetch strategy that employs the `go get` infrastructure
Use like this in a package:
@@ -466,6 +537,7 @@ class GoFetchStrategy(VCSFetchStrategy):
class GitFetchStrategy(VCSFetchStrategy):
+
"""
Fetch strategy that gets source code from a git repository.
Use like this in a package:
@@ -494,6 +566,7 @@ class GitFetchStrategy(VCSFetchStrategy):
super(GitFetchStrategy, self).__init__(
'git', 'tag', 'branch', 'commit', **forwarded_args)
self._git = None
+ self.submodules = kwargs.get('submodules', False)
@property
def git_version(self):
@@ -504,8 +577,18 @@ class GitFetchStrategy(VCSFetchStrategy):
def git(self):
if not self._git:
self._git = which('git', required=True)
+
+ # If the user asked for insecure fetching, make that work
+ # with git as well.
+ if spack.insecure:
+ self._git.add_default_env('GIT_SSL_NO_VERIFY', 'true')
+
return self._git
+ @property
+ def cachable(self):
+ return bool(self.commit or self.tag)
+
@_needs_stage
def fetch(self):
self.stage.chdir()
@@ -572,6 +655,10 @@ class GitFetchStrategy(VCSFetchStrategy):
self.git('pull', '--tags', ignore_errors=1)
self.git('checkout', self.tag)
+ # Init submodules if the user asked for them.
+ if self.submodules:
+ self.git('submodule', 'update', '--init')
+
def archive(self, destination):
super(GitFetchStrategy, self).archive(destination, exclude='.git')
@@ -586,6 +673,7 @@ class GitFetchStrategy(VCSFetchStrategy):
class SvnFetchStrategy(VCSFetchStrategy):
+
"""Fetch strategy that gets source code from a subversion repository.
Use like this in a package:
@@ -617,6 +705,10 @@ class SvnFetchStrategy(VCSFetchStrategy):
self._svn = which('svn', required=True)
return self._svn
+ @property
+ def cachable(self):
+ return bool(self.revision)
+
@_needs_stage
def fetch(self):
self.stage.chdir()
@@ -662,6 +754,7 @@ class SvnFetchStrategy(VCSFetchStrategy):
class HgFetchStrategy(VCSFetchStrategy):
+
"""
Fetch strategy that gets source code from a Mercurial repository.
Use like this in a package:
@@ -699,6 +792,10 @@ class HgFetchStrategy(VCSFetchStrategy):
self._hg = which('hg', required=True)
return self._hg
+ @property
+ def cachable(self):
+ return bool(self.revision)
+
@_needs_stage
def fetch(self):
self.stage.chdir()
@@ -805,14 +902,62 @@ def for_package_version(pkg, version):
raise InvalidArgsError(pkg, version)
+def from_list_url(pkg):
+ """If a package provides a URL which lists URLs for resources by
+ version, this can can create a fetcher for a URL discovered for
+ the specified package's version."""
+ if pkg.list_url:
+ try:
+ versions = pkg.fetch_remote_versions()
+ try:
+ url_from_list = versions[pkg.version]
+ digest = None
+ if pkg.version in pkg.versions:
+ digest = pkg.versions[pkg.version].get('md5', None)
+ return URLFetchStrategy(url=url_from_list, digest=digest)
+ except KeyError:
+ tty.msg("Can not find version %s in url_list" %
+ self.version)
+ except:
+ tty.msg("Could not determine url from list_url.")
+
+
+class FsCache(object):
+
+ def __init__(self, root):
+ self.root = os.path.abspath(root)
+
+ def store(self, fetcher, relativeDst):
+ # skip fetchers that aren't cachable
+ if not fetcher.cachable:
+ return
+
+ # Don't store things that are already cached.
+ if isinstance(fetcher, CacheURLFetchStrategy):
+ return
+
+ dst = join_path(self.root, relativeDst)
+ mkdirp(os.path.dirname(dst))
+ fetcher.archive(dst)
+
+ def fetcher(self, targetPath, digest, **kwargs):
+ path = join_path(self.root, targetPath)
+ return CacheURLFetchStrategy(path, digest, **kwargs)
+
+ def destroy(self):
+ shutil.rmtree(self.root, ignore_errors=True)
+
+
class FetchError(spack.error.SpackError):
- def __init__(self, msg, long_msg=None):
- super(FetchError, self).__init__(msg, long_msg)
+ """Superclass fo fetcher errors."""
+
+
+class NoCacheError(FetchError):
+ """Raised when there is no cached archive for a package."""
class FailedDownloadError(FetchError):
"""Raised wen a download fails."""
-
def __init__(self, url, msg=""):
super(FailedDownloadError, self).__init__(
"Failed to fetch file from URL: %s" % url, msg)
@@ -820,13 +965,11 @@ class FailedDownloadError(FetchError):
class NoArchiveFileError(FetchError):
- def __init__(self, msg, long_msg):
- super(NoArchiveFileError, self).__init__(msg, long_msg)
+ """"Raised when an archive file is expected but none exists."""
class NoDigestError(FetchError):
- def __init__(self, msg, long_msg=None):
- super(NoDigestError, self).__init__(msg, long_msg)
+ """Raised after attempt to checksum when URL has no digest."""
class InvalidArgsError(FetchError):
@@ -840,13 +983,9 @@ class InvalidArgsError(FetchError):
class ChecksumError(FetchError):
"""Raised when archive fails to checksum."""
- def __init__(self, message, long_msg=None):
- super(ChecksumError, self).__init__(message, long_msg)
-
class NoStageError(FetchError):
"""Raised when fetch operations are called before set_stage()."""
-
def __init__(self, method):
super(NoStageError, self).__init__(
"Must call FetchStrategy.set_stage() before calling %s" %
diff --git a/lib/spack/spack/file_cache.py b/lib/spack/spack/file_cache.py
new file mode 100644
index 0000000000..e37f77d68d
--- /dev/null
+++ b/lib/spack/spack/file_cache.py
@@ -0,0 +1,182 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+import os
+import shutil
+
+from llnl.util.filesystem import *
+from llnl.util.lock import *
+
+from spack.error import SpackError
+
+
+class FileCache(object):
+ """This class manages cached data in the filesystem.
+
+ - Cache files are fetched and stored by unique keys. Keys can be relative
+ paths, so that thre can be some hierarchy in the cache.
+
+ - The FileCache handles locking cache files for reading and writing, so
+ client code need not manage locks for cache entries.
+
+ """
+
+ def __init__(self, root):
+ """Create a file cache object.
+
+ This will create the cache directory if it does not exist yet.
+
+ """
+ self.root = root.rstrip(os.path.sep)
+ if not os.path.exists(self.root):
+ mkdirp(self.root)
+
+ self._locks = {}
+
+ def destroy(self):
+ """Remove all files under the cache root."""
+ for f in os.listdir(self.root):
+ path = join_path(self.root, f)
+ if os.path.isdir(path):
+ shutil.rmtree(path, True)
+ else:
+ os.remove(path)
+
+ def cache_path(self, key):
+ """Path to the file in the cache for a particular key."""
+ return join_path(self.root, key)
+
+ def _lock_path(self, key):
+ """Path to the file in the cache for a particular key."""
+ keyfile = os.path.basename(key)
+ keydir = os.path.dirname(key)
+
+ return join_path(self.root, keydir, '.' + keyfile + '.lock')
+
+ def _get_lock(self, key):
+ """Create a lock for a key, if necessary, and return a lock object."""
+ if key not in self._locks:
+ self._locks[key] = Lock(self._lock_path(key))
+ return self._locks[key]
+
+ def init_entry(self, key):
+ """Ensure we can access a cache file. Create a lock for it if needed.
+
+ Return whether the cache file exists yet or not.
+ """
+ cache_path = self.cache_path(key)
+
+ exists = os.path.exists(cache_path)
+ if exists:
+ if not os.path.isfile(cache_path):
+ raise CacheError("Cache file is not a file: %s" % cache_path)
+
+ if not os.access(cache_path, os.R_OK | os.W_OK):
+ raise CacheError("Cannot access cache file: %s" % cache_path)
+ else:
+ # if the file is hierarchical, make parent directories
+ parent = os.path.dirname(cache_path)
+ if parent.rstrip(os.path.sep) != self.root:
+ mkdirp(parent)
+
+ if not os.access(parent, os.R_OK | os.W_OK):
+ raise CacheError("Cannot access cache directory: %s" % parent)
+
+ # ensure lock is created for this key
+ self._get_lock(key)
+ return exists
+
+ def read_transaction(self, key):
+ """Get a read transaction on a file cache item.
+
+ Returns a ReadTransaction context manager and opens the cache file for
+ reading. You can use it like this:
+
+ with file_cache_object.read_transaction(key) as cache_file:
+ cache_file.read()
+
+ """
+ return ReadTransaction(
+ self._get_lock(key), lambda: open(self.cache_path(key)))
+
+ def write_transaction(self, key):
+ """Get a write transaction on a file cache item.
+
+ Returns a WriteTransaction context manager that opens a temporary file
+ for writing. Once the context manager finishes, if nothing went wrong,
+ moves the file into place on top of the old file atomically.
+
+ """
+ class WriteContextManager(object):
+
+ def __enter__(cm):
+ cm.orig_filename = self.cache_path(key)
+ cm.orig_file = None
+ if os.path.exists(cm.orig_filename):
+ cm.orig_file = open(cm.orig_filename, 'r')
+
+ cm.tmp_filename = self.cache_path(key) + '.tmp'
+ cm.tmp_file = open(cm.tmp_filename, 'w')
+
+ return cm.orig_file, cm.tmp_file
+
+ def __exit__(cm, type, value, traceback):
+ if cm.orig_file:
+ cm.orig_file.close()
+ cm.tmp_file.close()
+
+ if value:
+ # remove tmp on exception & raise it
+ shutil.rmtree(cm.tmp_filename, True)
+ raise value
+ else:
+ os.rename(cm.tmp_filename, cm.orig_filename)
+
+ return WriteTransaction(self._get_lock(key), WriteContextManager)
+
+ def mtime(self, key):
+ """Return modification time of cache file, or 0 if it does not exist.
+
+ Time is in units returned by os.stat in the mtime field, which is
+ platform-dependent.
+
+ """
+ if not self.init_entry(key):
+ return 0
+ else:
+ sinfo = os.stat(self.cache_path(key))
+ return sinfo.st_mtime
+
+ def remove(self, key):
+ lock = self._get_lock(key)
+ try:
+ lock.acquire_write()
+ os.unlink(self.cache_path(key))
+ finally:
+ lock.release_write()
+ os.unlink(self._lock_path(key))
+
+
+class CacheError(SpackError):
+ pass
diff --git a/lib/spack/spack/graph.py b/lib/spack/spack/graph.py
index 22058d41d8..1f0390dae9 100644
--- a/lib/spack/spack/graph.py
+++ b/lib/spack/spack/graph.py
@@ -61,37 +61,39 @@ Note that ``graph_ascii`` assumes a single spec while ``graph_dot``
can take a number of specs as input.
"""
-__all__ = ['topological_sort', 'graph_ascii', 'AsciiGraph', 'graph_dot']
from heapq import *
from llnl.util.lang import *
from llnl.util.tty.color import *
-import spack
-from spack.spec import Spec
+from spack.spec import *
+
+__all__ = ['topological_sort', 'graph_ascii', 'AsciiGraph', 'graph_dot']
-def topological_sort(spec, **kwargs):
+def topological_sort(spec, reverse=False, deptype=None):
"""Topological sort for specs.
Return a list of dependency specs sorted topologically. The spec
argument is not modified in the process.
"""
- reverse = kwargs.get('reverse', False)
+ deptype = canonical_deptype(deptype)
+
if not reverse:
- parents = lambda s: s.dependents
- children = lambda s: s.dependencies
+ parents = lambda s: s.dependents()
+ children = lambda s: s.dependencies()
else:
- parents = lambda s: s.dependencies
- children = lambda s: s.dependents
+ parents = lambda s: s.dependencies()
+ children = lambda s: s.dependents()
# Work on a copy so this is nondestructive.
- spec = spec.copy()
- nodes = spec.index()
+ spec = spec.copy(deps=deptype)
+ nodes = spec.index(deptype=deptype)
topo_order = []
+ par = dict((name, parents(nodes[name])) for name in nodes.keys())
remaining = [name for name in nodes.keys() if not parents(nodes[name])]
heapify(remaining)
@@ -100,12 +102,12 @@ def topological_sort(spec, **kwargs):
topo_order.append(name)
node = nodes[name]
- for dep in children(node).values():
- del parents(dep)[node.name]
- if not parents(dep):
+ for dep in children(node):
+ par[dep.name].remove(node)
+ if not par[dep.name]:
heappush(remaining, dep.name)
- if any(parents(s) for s in spec.traverse()):
+ if any(par.get(s.name, []) for s in spec.traverse()):
raise ValueError("Spec has cycles!")
else:
return topo_order
@@ -125,18 +127,21 @@ def find(seq, predicate):
return -1
-# Names of different graph line states. We Record previous line
+# Names of different graph line states. We record previous line
# states so that we can easily determine what to do when connecting.
states = ('node', 'collapse', 'merge-right', 'expand-right', 'back-edge')
NODE, COLLAPSE, MERGE_RIGHT, EXPAND_RIGHT, BACK_EDGE = states
+
class AsciiGraph(object):
+
def __init__(self):
# These can be set after initialization or after a call to
# graph() to change behavior.
self.node_character = '*'
self.debug = False
self.indent = 0
+ self.deptype = alldeps
# These are colors in the order they'll be used for edges.
# See llnl.util.tty.color for details on color characters.
@@ -151,18 +156,18 @@ class AsciiGraph(object):
self._prev_state = None # State of previous line
self._prev_index = None # Index of expansion point of prev line
-
def _indent(self):
self._out.write(self.indent * ' ')
-
def _write_edge(self, string, index, sub=0):
"""Write a colored edge to the output stream."""
+ # Ignore empty frontier entries (they're just collapsed)
+ if not self._frontier[index]:
+ return
name = self._frontier[index][sub]
edge = "@%s{%s}" % (self._name_to_color[name], string)
self._out.write(edge)
-
def _connect_deps(self, i, deps, label=None):
"""Connect dependencies to existing edges in the frontier.
@@ -197,7 +202,8 @@ class AsciiGraph(object):
collapse = True
if self._prev_state == EXPAND_RIGHT:
# Special case where previous line expanded and i is off by 1.
- self._back_edge_line([], j, i+1, True, label + "-1.5 " + str((i+1,j)))
+ self._back_edge_line([], j, i + 1, True,
+ label + "-1.5 " + str((i + 1, j)))
collapse = False
else:
@@ -205,19 +211,20 @@ class AsciiGraph(object):
if self._prev_state == NODE and self._prev_index < i:
i += 1
- if i-j > 1:
+ if i - j > 1:
# We need two lines to connect if distance > 1
- self._back_edge_line([], j, i, True, label + "-1 " + str((i,j)))
+ self._back_edge_line([], j, i, True,
+ label + "-1 " + str((i, j)))
collapse = False
- self._back_edge_line([j], -1, -1, collapse, label + "-2 " + str((i,j)))
+ self._back_edge_line([j], -1, -1, collapse,
+ label + "-2 " + str((i, j)))
return True
elif deps:
self._frontier.insert(i, deps)
return False
-
def _set_state(self, state, index, label=None):
if state not in states:
raise ValueError("Invalid graph state!")
@@ -231,7 +238,6 @@ class AsciiGraph(object):
self._out.write("%-20s" % (str(label) if label else ''))
self._out.write("%s" % self._frontier)
-
def _back_edge_line(self, prev_ends, end, start, collapse, label=None):
"""Write part of a backwards edge in the graph.
@@ -285,27 +291,26 @@ class AsciiGraph(object):
self._indent()
for p in prev_ends:
- advance(p, lambda: [("| ", self._pos)] )
- advance(p+1, lambda: [("|/", self._pos)] )
+ advance(p, lambda: [("| ", self._pos)])
+ advance(p + 1, lambda: [("|/", self._pos)])
if end >= 0:
- advance(end + 1, lambda: [("| ", self._pos)] )
- advance(start - 1, lambda: [("|", self._pos), ("_", end)] )
+ advance(end + 1, lambda: [("| ", self._pos)])
+ advance(start - 1, lambda: [("|", self._pos), ("_", end)])
else:
- advance(start - 1, lambda: [("| ", self._pos)] )
+ advance(start - 1, lambda: [("| ", self._pos)])
if start >= 0:
- advance(start, lambda: [("|", self._pos), ("/", end)] )
+ advance(start, lambda: [("|", self._pos), ("/", end)])
if collapse:
- advance(flen, lambda: [(" /", self._pos)] )
+ advance(flen, lambda: [(" /", self._pos)])
else:
- advance(flen, lambda: [("| ", self._pos)] )
+ advance(flen, lambda: [("| ", self._pos)])
self._set_state(BACK_EDGE, end, label)
self._out.write("\n")
-
def _node_line(self, index, name):
"""Writes a line with a node at index."""
self._indent()
@@ -314,14 +319,13 @@ class AsciiGraph(object):
self._out.write("%s " % self.node_character)
- for c in range(index+1, len(self._frontier)):
+ for c in range(index + 1, len(self._frontier)):
self._write_edge("| ", c)
self._out.write(" %s" % name)
self._set_state(NODE, index)
self._out.write("\n")
-
def _collapse_line(self, index):
"""Write a collapsing line after a node was added at index."""
self._indent()
@@ -333,36 +337,33 @@ class AsciiGraph(object):
self._set_state(COLLAPSE, index)
self._out.write("\n")
-
def _merge_right_line(self, index):
"""Edge at index is same as edge to right. Merge directly with '\'"""
self._indent()
for c in range(index):
self._write_edge("| ", c)
self._write_edge("|", index)
- self._write_edge("\\", index+1)
- for c in range(index+1, len(self._frontier)):
- self._write_edge("| ", c )
+ self._write_edge("\\", index + 1)
+ for c in range(index + 1, len(self._frontier)):
+ self._write_edge("| ", c)
self._set_state(MERGE_RIGHT, index)
self._out.write("\n")
-
def _expand_right_line(self, index):
self._indent()
for c in range(index):
self._write_edge("| ", c)
self._write_edge("|", index)
- self._write_edge("\\", index+1)
+ self._write_edge("\\", index + 1)
- for c in range(index+2, len(self._frontier)):
+ for c in range(index + 2, len(self._frontier)):
self._write_edge(" \\", c)
self._set_state(EXPAND_RIGHT, index)
self._out.write("\n")
-
def write(self, spec, **kwargs):
"""Write out an ascii graph of the provided spec.
@@ -387,7 +388,7 @@ class AsciiGraph(object):
self._out = ColorStream(sys.stdout, color=color)
# We'll traverse the spec in topo order as we graph it.
- topo_order = topological_sort(spec, reverse=True)
+ topo_order = topological_sort(spec, reverse=True, deptype=self.deptype)
# Work on a copy to be nondestructive
spec = spec.copy()
@@ -396,7 +397,7 @@ class AsciiGraph(object):
# Colors associated with each node in the DAG.
# Edges are colored by the node they point to.
self._name_to_color = dict((name, self.colors[i % len(self.colors)])
- for i, name in enumerate(topo_order))
+ for i, name in enumerate(topo_order))
# Frontier tracks open edges of the graph as it's written out.
self._frontier = [[spec.name]]
@@ -405,7 +406,8 @@ class AsciiGraph(object):
i = find(self._frontier, lambda f: len(f) > 1)
if i >= 0:
- # Expand frontier until there are enough columns for all children.
+ # Expand frontier until there are enough columns for all
+ # children.
# Figure out how many back connections there are and
# sort them so we do them in order
@@ -420,29 +422,37 @@ class AsciiGraph(object):
if back:
back.sort()
prev_ends = []
+ collapse_l1 = False
for j, (b, d) in enumerate(back):
self._frontier[i].remove(d)
- if i-b > 1:
- self._back_edge_line(prev_ends, b, i, False, 'left-1')
+ if i - b > 1:
+ collapse_l1 = any(not e for e in self._frontier)
+ self._back_edge_line(
+ prev_ends, b, i, collapse_l1, 'left-1')
del prev_ends[:]
prev_ends.append(b)
# Check whether we did ALL the deps as back edges,
# in which case we're done.
- collapse = not self._frontier[i]
- if collapse:
+ pop = not self._frontier[i]
+ collapse_l2 = pop
+ if collapse_l1:
+ collapse_l2 = False
+ if pop:
self._frontier.pop(i)
- self._back_edge_line(prev_ends, -1, -1, collapse, 'left-2')
+ self._back_edge_line(
+ prev_ends, -1, -1, collapse_l2, 'left-2')
elif len(self._frontier[i]) > 1:
# Expand forward after doing all back connections
- if (i+1 < len(self._frontier) and len(self._frontier[i+1]) == 1
- and self._frontier[i+1][0] in self._frontier[i]):
+ if (i + 1 < len(self._frontier) and
+ len(self._frontier[i + 1]) == 1 and
+ self._frontier[i + 1][0] in self._frontier[i]):
# We need to connect to the element to the right.
# Keep lines straight by connecting directly and
# avoiding unnecessary expand/contract.
- name = self._frontier[i+1][0]
+ name = self._frontier[i + 1][0]
self._frontier[i].remove(name)
self._merge_right_line(i)
@@ -456,9 +466,8 @@ class AsciiGraph(object):
self._frontier.pop(i)
self._connect_deps(i, deps, "post-expand")
-
# Handle any remaining back edges to the right
- j = i+1
+ j = i + 1
while j < len(self._frontier):
deps = self._frontier.pop(j)
if not self._connect_deps(j, deps, "back-from-right"):
@@ -475,32 +484,28 @@ class AsciiGraph(object):
# Replace node with its dependencies
self._frontier.pop(i)
- if node.dependencies:
- deps = sorted((d for d in node.dependencies), reverse=True)
- self._connect_deps(i, deps, "new-deps") # anywhere.
+ deps = node.dependencies(self.deptype)
+ if deps:
+ deps = sorted((d.name for d in deps), reverse=True)
+ self._connect_deps(i, deps, "new-deps") # anywhere.
elif self._frontier:
self._collapse_line(i)
-def graph_ascii(spec, **kwargs):
- node_character = kwargs.get('node', 'o')
- out = kwargs.pop('out', None)
- debug = kwargs.pop('debug', False)
- indent = kwargs.pop('indent', 0)
- color = kwargs.pop('color', None)
- check_kwargs(kwargs, graph_ascii)
-
+def graph_ascii(spec, node='o', out=None, debug=False,
+ indent=0, color=None, deptype=None):
graph = AsciiGraph()
graph.debug = debug
graph.indent = indent
- graph.node_character = node_character
+ graph.node_character = node
+ if deptype:
+ graph.deptype = canonical_deptype(deptype)
graph.write(spec, color=color, out=out)
-
-def graph_dot(*specs, **kwargs):
+def graph_dot(specs, deptype=None, static=False, out=None):
"""Generate a graph in dot format of all provided specs.
Print out a dot formatted graph of all the dependencies between
@@ -509,42 +514,73 @@ def graph_dot(*specs, **kwargs):
spack graph --dot qt | dot -Tpdf > spack-graph.pdf
"""
- out = kwargs.pop('out', sys.stdout)
- check_kwargs(kwargs, graph_dot)
+ if out is None:
+ out = sys.stdout
+
+ if deptype is None:
+ deptype = alldeps
out.write('digraph G {\n')
- out.write(' label = "Spack Dependencies"\n')
out.write(' labelloc = "b"\n')
- out.write(' rankdir = "LR"\n')
+ out.write(' rankdir = "TB"\n')
out.write(' ranksep = "5"\n')
+ out.write('node[\n')
+ out.write(' fontname=Monaco,\n')
+ out.write(' penwidth=2,\n')
+ out.write(' fontsize=12,\n')
+ out.write(' margin=.1,\n')
+ out.write(' shape=box,\n')
+ out.write(' fillcolor=lightblue,\n')
+ out.write(' style="rounded,filled"]\n')
+
out.write('\n')
- def quote(string):
+ def q(string):
return '"%s"' % string
if not specs:
- specs = [p.name for p in spack.repo.all_packages()]
- else:
- roots = specs
- specs = set()
- for spec in roots:
- specs.update(Spec(s.name) for s in spec.normalized().traverse())
+ raise ValueError("Must provide specs ot graph_dot")
- deps = []
+ # Static graph includes anything a package COULD depend on.
+ if static:
+ names = set.union(*[s.package.possible_dependencies() for s in specs])
+ specs = [Spec(name) for name in names]
+
+ labeled = set()
+
+ def label(key, label):
+ if key not in labeled:
+ out.write(' "%s" [label="%s"]\n' % (key, label))
+ labeled.add(key)
+
+ deps = set()
for spec in specs:
- out.write(' %-30s [label="%s"]\n' % (quote(spec.name), spec.name))
+ if static:
+ out.write(' "%s" [label="%s"]\n' % (spec.name, spec.name))
+
+ # Skip virtual specs (we'll find out about them from concrete ones.
+ if spec.virtual:
+ continue
- # Skip virtual specs (we'll find out about them from concrete ones.
- if spec.virtual:
- continue
+ # Add edges for each depends_on in the package.
+ for dep_name, dep in spec.package.dependencies.iteritems():
+ deps.add((spec.name, dep_name))
+
+ # If the package provides something, add an edge for that.
+ for provider in set(s.name for s in spec.package.provided):
+ deps.add((provider, spec.name))
+
+ else:
+ def key_label(s):
+ return s.dag_hash(), "%s-%s" % (s.name, s.dag_hash(7))
- # Add edges for each depends_on in the package.
- for dep_name, dep in spec.package.dependencies.iteritems():
- deps.append((spec.name, dep_name))
+ for s in spec.traverse(deptype=deptype):
+ skey, slabel = key_label(s)
+ out.write(' "%s" [label="%s"]\n' % (skey, slabel))
- # If the package provides something, add an edge for that.
- for provider in set(s.name for s in spec.package.provided):
- deps.append((provider, spec.name))
+ for d in s.dependencies(deptype=deptype):
+ dkey, _ = key_label(d)
+ deps.add((skey, dkey))
out.write('\n')
diff --git a/lib/spack/spack/hooks/__init__.py b/lib/spack/spack/hooks/__init__.py
index 902e488eca..6454a865b6 100644
--- a/lib/spack/spack/hooks/__init__.py
+++ b/lib/spack/spack/hooks/__init__.py
@@ -24,7 +24,7 @@
##############################################################################
"""This package contains modules with hooks for various stages in the
Spack install process. You can add modules here and they'll be
- executaed by package at various times during the package lifecycle.
+ executed by package at various times during the package lifecycle.
Each hook is just a function that takes a package as a parameter.
Hooks are not executed in any particular order.
@@ -41,9 +41,11 @@
features.
"""
import imp
-from llnl.util.lang import memoized, list_modules
-from llnl.util.filesystem import join_path
+
import spack
+from llnl.util.filesystem import join_path
+from llnl.util.lang import memoized, list_modules
+
@memoized
def all_hook_modules():
@@ -58,22 +60,25 @@ def all_hook_modules():
class HookRunner(object):
+
def __init__(self, hook_name):
self.hook_name = hook_name
- def __call__(self, pkg):
+ def __call__(self, *args, **kwargs):
for module in all_hook_modules():
if hasattr(module, self.hook_name):
hook = getattr(module, self.hook_name)
if hasattr(hook, '__call__'):
- hook(pkg)
+ hook(*args, **kwargs)
#
# Define some functions that can be called to fire off hooks.
#
-pre_install = HookRunner('pre_install')
-post_install = HookRunner('post_install')
+pre_run = HookRunner('pre_run')
+
+pre_install = HookRunner('pre_install')
+post_install = HookRunner('post_install')
-pre_uninstall = HookRunner('pre_uninstall')
+pre_uninstall = HookRunner('pre_uninstall')
post_uninstall = HookRunner('post_uninstall')
diff --git a/lib/spack/spack/hooks/case_consistency.py b/lib/spack/spack/hooks/case_consistency.py
new file mode 100644
index 0000000000..faf38f7ae3
--- /dev/null
+++ b/lib/spack/spack/hooks/case_consistency.py
@@ -0,0 +1,101 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from __future__ import absolute_import
+import os
+import re
+import platform
+
+from llnl.util.filesystem import *
+
+import spack
+from spack.util.executable import *
+
+
+def pre_run():
+ if platform.system() != "Darwin":
+ return
+
+ git_case_consistency_check(spack.repo.get_repo('builtin').packages_path)
+
+
+def git_case_consistency_check(path):
+ """Re-sync case of files in a directory with git.
+
+ On case-insensitive but case-preserving filesystems like Mac OS X,
+ Git doesn't properly rename files that only had their case changed.
+
+ This checks files in a directory against git and does a
+ case-restoring rename (actually two renames, e.g.::
+
+ name -> tmp -> NAME
+
+ We use this in Spack to ensure package directories are named
+ correctly.
+
+ TODO: this check can probably be removed once package names have been
+ TODO: lowercase for a long while.
+
+ """
+ with working_dir(path):
+ # Don't bother fixing case if Spack isn't in a git repository
+ git = which('git')
+ if not git:
+ return
+
+ try:
+ git_filenames = git('ls-tree', '--name-only', 'HEAD', output=str)
+ git_filenames = set(re.split(r'\s+', git_filenames.strip()))
+ except ProcessError:
+ return # Ignore errors calling git
+
+ lower_to_mixed = {}
+ for fn in git_filenames:
+ lower = fn.lower()
+ mixed = lower_to_mixed.setdefault(lower, [])
+ mixed.append(fn)
+
+ # Iterate through all actual files and make sure their names are
+ # the same as corresponding names in git
+ actual_filenames = os.listdir('.')
+ for actual in actual_filenames:
+ lower = actual.lower()
+
+ # not tracked by git
+ if lower not in lower_to_mixed:
+ continue
+
+ # Don't know what to do with multiple matches
+ if len(lower_to_mixed[lower]) != 1:
+ continue
+
+ # Skip if case is already correct
+ git_name = lower_to_mixed[lower][0]
+ if git_name == actual:
+ continue
+
+ # restore case with two renames
+ tmp_name = actual + '.spack.tmp'
+ os.rename(actual, tmp_name)
+ os.rename(tmp_name, git_name)
diff --git a/lib/spack/spack/hooks/extensions.py b/lib/spack/spack/hooks/extensions.py
index bcbd68dfa0..070b309a43 100644
--- a/lib/spack/spack/hooks/extensions.py
+++ b/lib/spack/spack/hooks/extensions.py
@@ -23,8 +23,6 @@
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
-import spack
-
def pre_uninstall(pkg):
assert(pkg.spec.concrete)
diff --git a/lib/spack/spack/hooks/licensing.py b/lib/spack/spack/hooks/licensing.py
index 0f63b0e05a..a99099749c 100644
--- a/lib/spack/spack/hooks/licensing.py
+++ b/lib/spack/spack/hooks/licensing.py
@@ -26,7 +26,7 @@ import os
import spack
import llnl.util.tty as tty
-from llnl.util.filesystem import join_path
+from llnl.util.filesystem import join_path, mkdirp
def pre_install(pkg):
@@ -154,6 +154,14 @@ def symlink_license(pkg):
target = pkg.global_license_file
for filename in pkg.license_files:
link_name = join_path(pkg.prefix, filename)
+ license_dir = os.path.dirname(link_name)
+ if not os.path.exists(license_dir):
+ mkdirp(license_dir)
+
+ # If example file already exists, overwrite it with a symlink
+ if os.path.exists(link_name):
+ os.remove(link_name)
+
if os.path.exists(target):
os.symlink(target, link_name)
tty.msg("Added local symlink %s to global license file" %
diff --git a/lib/spack/spack/hooks/module_file_generation.py b/lib/spack/spack/hooks/module_file_generation.py
new file mode 100644
index 0000000000..445cea4e91
--- /dev/null
+++ b/lib/spack/spack/hooks/module_file_generation.py
@@ -0,0 +1,37 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+import spack.modules
+
+
+def post_install(pkg):
+ for item, cls in spack.modules.module_types.iteritems():
+ generator = cls(pkg.spec)
+ generator.write()
+
+
+def post_uninstall(pkg):
+ for item, cls in spack.modules.module_types.iteritems():
+ generator = cls(pkg.spec)
+ generator.remove()
diff --git a/lib/spack/spack/hooks/sbang.py b/lib/spack/spack/hooks/sbang.py
index 83d67ea225..6f9736a018 100644
--- a/lib/spack/spack/hooks/sbang.py
+++ b/lib/spack/spack/hooks/sbang.py
@@ -23,8 +23,9 @@
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
import os
+import stat
+import re
-from llnl.util.filesystem import *
import llnl.util.tty as tty
import spack
@@ -34,6 +35,7 @@ import spack.modules
# here, as it is the shortest I could find on a modern OS.
shebang_limit = 127
+
def shebang_too_long(path):
"""Detects whether a file has a shebang line that is too long."""
with open(path, 'r') as script:
@@ -57,21 +59,32 @@ def filter_shebang(path):
if original.startswith(new_sbang_line):
return
- backup = path + ".shebang.bak"
- os.rename(path, backup)
+ # Use --! instead of #! on second line for lua.
+ if re.search(r'^#!(/[^/]*)*lua\b', original):
+ original = re.sub(r'^#', '--', original)
+
+ # Change non-writable files to be writable if needed.
+ saved_mode = None
+ if not os.access(path, os.W_OK):
+ st = os.stat(path)
+ saved_mode = st.st_mode
+ os.chmod(path, saved_mode | stat.S_IWRITE)
with open(path, 'w') as new_file:
new_file.write(new_sbang_line)
new_file.write(original)
- copy_mode(backup, path)
- unset_executable_mode(backup)
+ # Restore original permissions.
+ if saved_mode is not None:
+ os.chmod(path, saved_mode)
- tty.warn("Patched overly long shebang in %s" % path)
+ tty.warn("Patched overlong shebang in %s" % path)
-def filter_shebangs_in_directory(directory):
- for file in os.listdir(directory):
+def filter_shebangs_in_directory(directory, filenames=None):
+ if filenames is None:
+ filenames = os.listdir(directory)
+ for file in filenames:
path = os.path.join(directory, file)
# only handle files
@@ -93,6 +106,6 @@ def post_install(pkg):
"""This hook edits scripts so that they call /bin/bash
$spack_prefix/bin/sbang instead of something longer than the
shebang limit."""
- if not os.path.isdir(pkg.prefix.bin):
- return
- filter_shebangs_in_directory(pkg.prefix.bin)
+
+ for directory, _, filenames in os.walk(pkg.prefix):
+ filter_shebangs_in_directory(directory, filenames)
diff --git a/lib/spack/spack/hooks/yaml_version_check.py b/lib/spack/spack/hooks/yaml_version_check.py
new file mode 100644
index 0000000000..a4b38198bc
--- /dev/null
+++ b/lib/spack/spack/hooks/yaml_version_check.py
@@ -0,0 +1,57 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+"""Yaml Version Check is a module for ensuring that config file
+formats are compatible with the current version of Spack."""
+import os.path
+import os
+import llnl.util.tty as tty
+import spack.util.spack_yaml as syaml
+import spack.config
+
+
+def pre_run():
+ check_compiler_yaml_version()
+
+
+def check_compiler_yaml_version():
+ config_scopes = spack.config.config_scopes
+ for scope in config_scopes.values():
+ file_name = os.path.join(scope.path, 'compilers.yaml')
+ data = None
+ if os.path.isfile(file_name):
+ with open(file_name) as f:
+ data = syaml.load(f)
+
+ if data:
+ compilers = data['compilers']
+ if len(compilers) > 0:
+ if (not isinstance(compilers, list) or
+ 'operating_system' not in compilers[0]['compiler']):
+ new_file = os.path.join(scope.path, '_old_compilers.yaml')
+ tty.warn('%s in out of date compilers format. '
+ 'Moved to %s. Spack automatically generate '
+ 'a compilers config file '
+ % (file_name, new_file))
+ os.rename(file_name, new_file)
diff --git a/lib/spack/spack/mirror.py b/lib/spack/spack/mirror.py
index 0bbcfba6b4..aef5e2e8ee 100644
--- a/lib/spack/spack/mirror.py
+++ b/lib/spack/spack/mirror.py
@@ -40,36 +40,45 @@ import spack.error
import spack.url as url
import spack.fetch_strategy as fs
from spack.spec import Spec
-from spack.stage import Stage
from spack.version import *
-from spack.util.compression import extension, allowed_archive
+from spack.util.compression import allowed_archive
-def mirror_archive_filename(spec, fetcher):
+def mirror_archive_filename(spec, fetcher, resourceId=None):
"""Get the name of the spec's archive in the mirror."""
if not spec.version.concrete:
raise ValueError("mirror.path requires spec with concrete version.")
if isinstance(fetcher, fs.URLFetchStrategy):
if fetcher.expand_archive:
- # If we fetch this version with a URLFetchStrategy, use URL's archive type
- ext = url.downloaded_file_extension(fetcher.url)
+ # If we fetch with a URLFetchStrategy, use URL's archive type
+ ext = url.determine_url_file_extension(fetcher.url)
+ ext = ext or spec.package.versions[spec.package.version].get(
+ 'extension', None)
+ ext = ext.lstrip('.')
+ if not ext:
+ raise MirrorError(
+ "%s version does not specify an extension" % spec.name +
+ " and could not parse extension from %s" % fetcher.url)
else:
- # If the archive shouldn't be expanded, don't check for its extension.
+ # If the archive shouldn't be expanded, don't check extension.
ext = None
else:
# Otherwise we'll make a .tar.gz ourselves
ext = 'tar.gz'
- filename = "%s-%s" % (spec.package.name, spec.version)
- if ext:
- filename += ".%s" % ext
+ if resourceId:
+ filename = "%s-%s" % (resourceId, spec.version) + ".%s" % ext
+ else:
+ filename = "%s-%s" % (spec.package.name, spec.version) + ".%s" % ext
+
return filename
-def mirror_archive_path(spec, fetcher):
+def mirror_archive_path(spec, fetcher, resourceId=None):
"""Get the relative path to the spec's archive within a mirror."""
- return join_path(spec.name, mirror_archive_filename(spec, fetcher))
+ return join_path(
+ spec.name, mirror_archive_filename(spec, fetcher, resourceId))
def get_matching_versions(specs, **kwargs):
@@ -95,6 +104,9 @@ def get_matching_versions(specs, **kwargs):
s = Spec(pkg.name)
s.versions = VersionList([v])
s.variants = spec.variants.copy()
+ # This is needed to avoid hanging references during the
+ # concretization phase
+ s.variants.spec = s
matching_spec.append(s)
if not matching_spec:
@@ -106,7 +118,9 @@ def get_matching_versions(specs, **kwargs):
def suggest_archive_basename(resource):
"""
- Return a tentative basename for an archive. Raise an exception if the name is among the allowed archive types.
+ Return a tentative basename for an archive.
+
+ Raises an exception if the name is not an allowed archive type.
:param fetcher:
:return:
@@ -119,27 +133,28 @@ def suggest_archive_basename(resource):
def create(path, specs, **kwargs):
"""Create a directory to be used as a spack mirror, and fill it with
- package archives.
-
- Arguments:
- path Path to create a mirror directory hierarchy in.
- specs Any package versions matching these specs will be added
- to the mirror.
-
- Keyword args:
- no_checksum: If True, do not checkpoint when fetching (default False)
- num_versions: Max number of versions to fetch per spec,
- if spec is ambiguous (default is 0 for all of them)
-
- Return Value:
- Returns a tuple of lists: (present, mirrored, error)
- * present: Package specs that were already present.
- * mirrored: Package specs that were successfully mirrored.
- * error: Package specs that failed to mirror due to some error.
-
- This routine iterates through all known package versions, and
- it creates specs for those versions. If the version satisfies any spec
- in the specs list, it is downloaded and added to the mirror.
+ package archives.
+
+ Arguments:
+ path: Path to create a mirror directory hierarchy in.
+ specs: Any package versions matching these specs will be added \
+ to the mirror.
+
+ Keyword args:
+ no_checksum: If True, do not checkpoint when fetching (default False)
+ num_versions: Max number of versions to fetch per spec, \
+ if spec is ambiguous (default is 0 for all of them)
+
+ Return Value:
+ Returns a tuple of lists: (present, mirrored, error)
+
+ * present: Package specs that were already present.
+ * mirrored: Package specs that were successfully mirrored.
+ * error: Package specs that failed to mirror due to some error.
+
+ This routine iterates through all known package versions, and
+ it creates specs for those versions. If the version satisfies any spec
+ in the specs list, it is downloaded and added to the mirror.
"""
# Make sure nothing is in the way.
if os.path.isfile(path):
@@ -170,7 +185,7 @@ def create(path, specs, **kwargs):
'error': []
}
- # Iterate through packages and download all the safe tarballs for each of them
+ # Iterate through packages and download all safe tarballs for each
for spec in version_specs:
add_single_spec(spec, mirror_root, categories, **kwargs)
@@ -190,12 +205,16 @@ def add_single_spec(spec, mirror_root, categories, **kwargs):
fetcher = stage.fetcher
if ii == 0:
# create a subdirectory for the current package@version
- archive_path = os.path.abspath(join_path(mirror_root, mirror_archive_path(spec, fetcher)))
+ archive_path = os.path.abspath(join_path(
+ mirror_root, mirror_archive_path(spec, fetcher)))
name = spec.format("$_$@")
else:
resource = stage.resource
- archive_path = join_path(subdir, suggest_archive_basename(resource))
- name = "{resource} ({pkg}).".format(resource=resource.name, pkg=spec.format("$_$@"))
+ archive_path = os.path.abspath(join_path(
+ mirror_root,
+ mirror_archive_path(spec, fetcher, resource.name)))
+ name = "{resource} ({pkg}).".format(
+ resource=resource.name, pkg=spec.format("$_$@"))
subdir = os.path.dirname(archive_path)
mkdirp(subdir)
@@ -217,15 +236,18 @@ def add_single_spec(spec, mirror_root, categories, **kwargs):
categories['present'].append(spec)
else:
categories['mirrored'].append(spec)
+
except Exception as e:
if spack.debug:
sys.excepthook(*sys.exc_info())
else:
- tty.warn("Error while fetching %s" % spec.format('$_$@'), e.message)
+ tty.warn("Error while fetching %s"
+ % spec.format('$_$@'), e.message)
categories['error'].append(spec)
class MirrorError(spack.error.SpackError):
"""Superclass of all mirror-creation related errors."""
+
def __init__(self, msg, long_msg=None):
super(MirrorError, self).__init__(msg, long_msg)
diff --git a/lib/spack/spack/modules.py b/lib/spack/spack/modules.py
index d2b819e80a..5e2a840e14 100644
--- a/lib/spack/spack/modules.py
+++ b/lib/spack/spack/modules.py
@@ -40,6 +40,7 @@ module file.
"""
import copy
import datetime
+import itertools
import os
import os.path
import re
@@ -47,19 +48,27 @@ import string
import textwrap
import llnl.util.tty as tty
+from llnl.util.filesystem import join_path, mkdirp
+
import spack
+import spack.compilers # Needed by LmodModules
import spack.config
-from llnl.util.filesystem import join_path, mkdirp
+from spack.util.path import canonicalize_path
from spack.build_environment import parent_class_modules
from spack.build_environment import set_module_variables_for_package
from spack.environment import *
__all__ = ['EnvModule', 'Dotkit', 'TclModule']
-# Registry of all types of modules. Entries created by EnvModule's metaclass
+"""Registry of all types of modules. Entries created by EnvModule's
+ metaclass."""
module_types = {}
-CONFIGURATION = spack.config.get_config('modules')
+"""Module install roots are in config.yaml."""
+_roots = spack.config.get_config('config').get('module_roots', {})
+
+"""Specifics about modules are in modules.yaml"""
+_module_config = spack.config.get_config('modules')
def print_help():
@@ -89,7 +98,7 @@ def inspect_path(prefix):
"""
env = EnvironmentModifications()
# Inspect the prefix to check for the existence of common directories
- prefix_inspections = CONFIGURATION.get('prefix_inspections', {})
+ prefix_inspections = _module_config.get('prefix_inspections', {})
for relative_path, variables in prefix_inspections.items():
expected = join_path(prefix, relative_path)
if os.path.isdir(expected):
@@ -120,7 +129,7 @@ def dependencies(spec, request='all'):
return []
if request == 'direct':
- return [xx for _, xx in spec.dependencies.items()]
+ return spec.dependencies(deptype=('link', 'run'))
# FIXME : during module file creation nodes seem to be visited multiple
# FIXME : times even if cover='nodes' is given. This work around permits
@@ -128,14 +137,13 @@ def dependencies(spec, request='all'):
# FIXME : step among nodes that refer to the same package?
seen = set()
seen_add = seen.add
- l = [xx
- for xx in sorted(
- spec.traverse(order='post',
- depth=True,
- cover='nodes',
- root=False),
- reverse=True)]
- return [xx for ii, xx in l if not (xx in seen or seen_add(xx))]
+ l = sorted(
+ spec.traverse(order='post',
+ cover='nodes',
+ deptype=('link', 'run'),
+ root=False),
+ reverse=True)
+ return [x for x in l if not (x in seen or seen_add(x))]
def update_dictionary_extending_lists(target, update):
@@ -166,7 +174,7 @@ def parse_config_options(module_generator):
module file
"""
# Get the configuration for this kind of generator
- module_configuration = copy.deepcopy(CONFIGURATION.get(
+ module_configuration = copy.deepcopy(_module_config.get(
module_generator.name, {}))
#####
@@ -188,6 +196,8 @@ def parse_config_options(module_generator):
#####
# Automatic loading loads
+ module_file_actions['hash_length'] = module_configuration.get(
+ 'hash_length', 7)
module_file_actions['autoload'] = dependencies(
module_generator.spec, module_file_actions.get('autoload', 'none'))
# Prerequisites
@@ -232,14 +242,19 @@ def filter_blacklisted(specs, module_name):
yield x
+def format_env_var_name(name):
+ return name.replace('-', '_').upper()
+
+
class EnvModule(object):
name = 'env_module'
formats = {}
class __metaclass__(type):
+
def __init__(cls, name, bases, dict):
type.__init__(cls, name, bases, dict)
- if cls.name != 'env_module' and cls.name in CONFIGURATION[
+ if cls.name != 'env_module' and cls.name in _module_config[
'enable']:
module_types[cls.name] = cls
@@ -262,35 +277,35 @@ class EnvModule(object):
@property
def naming_scheme(self):
try:
- naming_scheme = CONFIGURATION[self.name]['naming_scheme']
+ naming_scheme = _module_config[self.name]['naming_scheme']
except KeyError:
naming_scheme = self.default_naming_format
return naming_scheme
@property
- def tokens(self):
- tokens = {
- 'name': self.spec.name,
- 'version': self.spec.version,
- 'compiler': self.spec.compiler
- }
- return tokens
-
- @property
def use_name(self):
"""
Subclasses should implement this to return the name the module command
uses to refer to the package.
"""
- naming_tokens = self.tokens
- naming_scheme = self.naming_scheme
- name = naming_scheme.format(**naming_tokens)
- name += '-' + self.spec.dag_hash(
- ) # Always append the hash to make the module file unique
+ name = self.spec.format(self.naming_scheme)
# Not everybody is working on linux...
parts = name.split('/')
name = join_path(*parts)
- return name
+ # Add optional suffixes based on constraints
+ path_elements = [name] + self._get_suffixes()
+ return '-'.join(path_elements)
+
+ def _get_suffixes(self):
+ configuration, _ = parse_config_options(self)
+ suffixes = []
+ for constraint, suffix in configuration.get('suffixes', {}).items():
+ if constraint in self.spec:
+ suffixes.append(suffix)
+ hash_length = configuration.get('hash_length', 7)
+ if hash_length != 0:
+ suffixes.append(self.spec.dag_hash(length=hash_length))
+ return suffixes
@property
def category(self):
@@ -305,7 +320,7 @@ class EnvModule(object):
@property
def blacklisted(self):
- configuration = CONFIGURATION.get(self.name, {})
+ configuration = _module_config.get(self.name, {})
whitelist_matches = [x
for x in configuration.get('whitelist', [])
if self.spec.satisfies(x)]
@@ -331,7 +346,7 @@ class EnvModule(object):
return False
- def write(self):
+ def write(self, overwrite=False):
"""
Writes out a module file for this object.
@@ -364,6 +379,7 @@ class EnvModule(object):
for mod in modules:
set_module_variables_for_package(package, mod)
set_module_variables_for_package(package, package.module)
+ package.setup_environment(spack_env, env)
package.setup_dependent_package(self.pkg.module, self.spec)
package.setup_dependent_environment(spack_env, env, self.spec)
@@ -381,6 +397,8 @@ class EnvModule(object):
for x in filter_blacklisted(
module_configuration.pop('autoload', []), self.name):
module_file_content += self.autoload(x)
+ for x in module_configuration.pop('load', []):
+ module_file_content += self.autoload(x)
for x in filter_blacklisted(
module_configuration.pop('prerequisites', []), self.name):
module_file_content += self.prerequisite(x)
@@ -390,6 +408,15 @@ class EnvModule(object):
for line in self.module_specific_content(module_configuration):
module_file_content += line
+ # Print a warning in case I am accidentally overwriting
+ # a module file that is already there (name clash)
+ if not overwrite and os.path.exists(self.file_name):
+ message = 'Module file already exists : skipping creation\n'
+ message += 'file : {0.file_name}\n'
+ message += 'spec : {0.spec}'
+ tty.warn(message.format(self))
+ return
+
# Dump to file
with open(self.file_name, 'w') as f:
f.write(module_file_content)
@@ -402,8 +429,12 @@ class EnvModule(object):
return tuple()
def autoload(self, spec):
- m = type(self)(spec)
- return self.autoload_format.format(module_file=m.use_name)
+ if not isinstance(spec, str):
+ m = type(self)(spec)
+ module_file = m.use_name
+ else:
+ module_file = spec
+ return self.autoload_format.format(module_file=module_file)
def prerequisite(self, spec):
m = type(self)(spec)
@@ -411,11 +442,18 @@ class EnvModule(object):
def process_environment_command(self, env):
for command in env:
+ # Token expansion from configuration file
+ name = format_env_var_name(
+ self.spec.format(command.args.get('name', '')))
+ value = self.spec.format(str(command.args.get('value', '')))
+ command.update_args(name=name, value=value)
+ # Format the line int the module file
try:
yield self.environment_modifications_formats[type(
command)].format(**command.args)
except KeyError:
- message = 'Cannot handle command of type {command} : skipping request' # NOQA: ignore=E501
+ message = ('Cannot handle command of type {command}: '
+ 'skipping request')
details = '{context} at {filename}:{lineno}'
tty.warn(message.format(command=type(command)))
tty.warn(details.format(**command.args))
@@ -441,20 +479,23 @@ class EnvModule(object):
class Dotkit(EnvModule):
name = 'dotkit'
- path = join_path(spack.share_path, "dotkit")
+ path = canonicalize_path(
+ _roots.get(name, join_path(spack.share_path, name)))
environment_modifications_formats = {
PrependPath: 'dk_alter {name} {value}\n',
+ RemovePath: 'dk_unalter {name} {value}\n',
SetEnv: 'dk_setenv {name} {value}\n'
}
autoload_format = 'dk_op {module_file}\n'
- default_naming_format = '{name}-{version}-{compiler.name}-{compiler.version}' # NOQA: ignore=E501
+ default_naming_format = \
+ '${PACKAGE}-${VERSION}-${COMPILERNAME}-${COMPILERVER}'
@property
def file_name(self):
- return join_path(Dotkit.path, self.spec.architecture,
+ return join_path(self.path, self.spec.architecture,
'%s.dk' % self.use_name)
@property
@@ -476,21 +517,15 @@ class Dotkit(EnvModule):
def prerequisite(self, spec):
tty.warn('prerequisites: not supported by dotkit module files')
- tty.warn('\tYou may want to check ~/.spack/modules.yaml')
+ tty.warn('\tYou may want to check %s/modules.yaml'
+ % spack.user_config_path)
return ''
class TclModule(EnvModule):
name = 'tcl'
- path = join_path(spack.share_path, "modules")
-
- environment_modifications_formats = {
- PrependPath: 'prepend-path --delim "{delim}" {name} \"{value}\"\n',
- AppendPath: 'append-path --delim "{delim}" {name} \"{value}\"\n',
- RemovePath: 'remove-path --delim "{delim}" {name} \"{value}\"\n',
- SetEnv: 'setenv {name} \"{value}\"\n',
- UnsetEnv: 'unsetenv {name}\n'
- }
+ path = canonicalize_path(
+ _roots.get(name, join_path(spack.share_path, 'modules')))
autoload_format = ('if ![ is-loaded {module_file} ] {{\n'
' puts stderr "Autoloading {module_file}"\n'
@@ -499,21 +534,24 @@ class TclModule(EnvModule):
prerequisite_format = 'prereq {module_file}\n'
- default_naming_format = '{name}-{version}-{compiler.name}-{compiler.version}' # NOQA: ignore=E501
+ default_naming_format = \
+ '${PACKAGE}-${VERSION}-${COMPILERNAME}-${COMPILERVER}'
@property
def file_name(self):
- return join_path(TclModule.path, self.spec.architecture, self.use_name)
+ return join_path(self.path, self.spec.architecture, self.use_name)
@property
def header(self):
timestamp = datetime.datetime.now()
# TCL Modulefile header
- header = '#%Module1.0\n'
- header += '## Module file created by spack (https://github.com/LLNL/spack) on %s\n' % timestamp # NOQA: ignore=E501
- header += '##\n'
- header += '## %s\n' % self.spec.short_spec
- header += '##\n'
+ header = """\
+#%%Module1.0
+## Module file created by spack (https://github.com/LLNL/spack) on %s
+##
+## %s
+##
+""" % (timestamp, self.spec.short_spec)
# TODO : category ?
# Short description
@@ -528,8 +566,46 @@ class TclModule(EnvModule):
header += '}\n\n'
return header
+ def process_environment_command(self, env):
+ environment_modifications_formats_colon = {
+ PrependPath: 'prepend-path {name} \"{value}\"\n',
+ AppendPath: 'append-path {name} \"{value}\"\n',
+ RemovePath: 'remove-path {name} \"{value}\"\n',
+ SetEnv: 'setenv {name} \"{value}\"\n',
+ UnsetEnv: 'unsetenv {name}\n'
+ }
+ environment_modifications_formats_general = {
+ PrependPath:
+ 'prepend-path --delim "{separator}" {name} \"{value}\"\n',
+ AppendPath:
+ 'append-path --delim "{separator}" {name} \"{value}\"\n',
+ RemovePath:
+ 'remove-path --delim "{separator}" {name} \"{value}\"\n',
+ SetEnv: 'setenv {name} \"{value}\"\n',
+ UnsetEnv: 'unsetenv {name}\n'
+ }
+ for command in env:
+ # Token expansion from configuration file
+ name = format_env_var_name(
+ self.spec.format(command.args.get('name', '')))
+ value = self.spec.format(str(command.args.get('value', '')))
+ command.update_args(name=name, value=value)
+ # Format the line int the module file
+ try:
+ if command.args.get('separator', ':') == ':':
+ yield environment_modifications_formats_colon[type(
+ command)].format(**command.args)
+ else:
+ yield environment_modifications_formats_general[type(
+ command)].format(**command.args)
+ except KeyError:
+ message = ('Cannot handle command of type {command}: '
+ 'skipping request')
+ details = '{context} at {filename}:{lineno}'
+ tty.warn(message.format(command=type(command)))
+ tty.warn(details.format(**command.args))
+
def module_specific_content(self, configuration):
- naming_tokens = self.tokens
# Conflict
conflict_format = configuration.get('conflict', [])
f = string.Formatter()
@@ -540,13 +616,259 @@ class TclModule(EnvModule):
for naming_dir, conflict_dir in zip(
self.naming_scheme.split('/'), item.split('/')):
if naming_dir != conflict_dir:
- message = 'conflict scheme does not match naming scheme [{spec}]\n\n' # NOQA: ignore=E501
+ message = 'conflict scheme does not match naming '
+ message += 'scheme [{spec}]\n\n'
message += 'naming scheme : "{nformat}"\n'
message += 'conflict scheme : "{cformat}"\n\n'
- message += '** You may want to check your `modules.yaml` configuration file **\n' # NOQA: ignore=E501
+ message += '** You may want to check your '
+ message += '`modules.yaml` configuration file **\n'
tty.error(message.format(spec=self.spec,
nformat=self.naming_scheme,
cformat=item))
raise SystemExit('Module generation aborted.')
- line = line.format(**naming_tokens)
+ line = self.spec.format(line)
yield line
+
+# To construct an arbitrary hierarchy of module files:
+# 1. Parse the configuration file and check that all the items in
+# hierarchical_scheme are indeed virtual packages
+# This needs to be done only once at start-up
+# 2. Order the stack as `hierarchical_scheme + ['mpi, 'compiler']
+# 3. Check which of the services are provided by the package
+# -> may be more than one
+# 4. Check which of the services are needed by the package
+# -> this determines where to write the module file
+# 5. For each combination of services in which we have at least one provider
+# here add the appropriate conditional MODULEPATH modifications
+
+
+class LmodModule(EnvModule):
+ name = 'lmod'
+ path = canonicalize_path(
+ _roots.get(name, join_path(spack.share_path, name)))
+
+ environment_modifications_formats = {
+ PrependPath: 'prepend_path("{name}", "{value}")\n',
+ AppendPath: 'append_path("{name}", "{value}")\n',
+ RemovePath: 'remove_path("{name}", "{value}")\n',
+ SetEnv: 'setenv("{name}", "{value}")\n',
+ UnsetEnv: 'unsetenv("{name}")\n'
+ }
+
+ autoload_format = ('if not isloaded("{module_file}") then\n'
+ ' LmodMessage("Autoloading {module_file}")\n'
+ ' load("{module_file}")\n'
+ 'end\n\n')
+
+ prerequisite_format = 'prereq("{module_file}")\n'
+
+ family_format = 'family("{family}")\n'
+
+ path_part_without_hash = join_path('{token.name}', '{token.version}')
+
+ # TODO : Check that extra tokens specified in configuration file
+ # TODO : are actually virtual dependencies
+ configuration = _module_config.get('lmod', {})
+ hierarchy_tokens = configuration.get('hierarchical_scheme', [])
+ hierarchy_tokens = hierarchy_tokens + ['mpi', 'compiler']
+
+ def __init__(self, spec=None):
+ super(LmodModule, self).__init__(spec)
+
+ self.configuration = _module_config.get('lmod', {})
+ hierarchy_tokens = self.configuration.get('hierarchical_scheme', [])
+ # TODO : Check that the extra hierarchy tokens specified in the
+ # TODO : configuration file are actually virtual dependencies
+ self.hierarchy_tokens = hierarchy_tokens + ['mpi', 'compiler']
+
+ # Sets the root directory for this architecture
+ self.modules_root = join_path(LmodModule.path, self.spec.architecture)
+
+ # Retrieve core compilers
+ self.core_compilers = self.configuration.get('core_compilers', [])
+ # Keep track of the requirements that this package has in terms
+ # of virtual packages that participate in the hierarchical structure
+ self.requires = {'compiler': self.spec.compiler}
+ # For each virtual dependency in the hierarchy
+ for x in self.hierarchy_tokens:
+ if x in self.spec and not self.spec.package.provides(
+ x): # if I depend on it
+ self.requires[x] = self.spec[x] # record the actual provider
+ # Check what are the services I need (this will determine where the
+ # module file will be written)
+ self.substitutions = {}
+ self.substitutions.update(self.requires)
+ # TODO : complete substitutions
+ # Check what service I provide to others
+ self.provides = {}
+ # If it is in the list of supported compilers family -> compiler
+ if self.spec.name in spack.compilers.supported_compilers():
+ self.provides['compiler'] = spack.spec.CompilerSpec(str(self.spec))
+ # Special case for llvm
+ if self.spec.name == 'llvm':
+ self.provides['compiler'] = spack.spec.CompilerSpec(str(self.spec))
+ self.provides['compiler'].name = 'clang'
+
+ for x in self.hierarchy_tokens:
+ if self.spec.package.provides(x):
+ self.provides[x] = self.spec[x]
+
+ def _hierarchy_token_combinations(self):
+ """
+ Yields all the relevant combinations that could appear in the hierarchy
+ """
+ for ii in range(len(self.hierarchy_tokens) + 1):
+ for item in itertools.combinations(self.hierarchy_tokens, ii):
+ if 'compiler' in item:
+ yield item
+
+ def _hierarchy_to_be_provided(self):
+ """
+ Filters a list of hierarchy tokens and yields only the one that we
+ need to provide
+ """
+ for item in self._hierarchy_token_combinations():
+ if any(x in self.provides for x in item):
+ yield item
+
+ def token_to_path(self, name, value):
+ # If we are dealing with a core compiler, return 'Core'
+ if name == 'compiler' and str(value) in self.core_compilers:
+ return 'Core'
+ # CompilerSpec does not have an hash
+ if name == 'compiler':
+ return self.path_part_without_hash.format(token=value)
+ # In this case the hierarchy token refers to a virtual provider
+ path = self.path_part_without_hash.format(token=value)
+ path = '-'.join([path, value.dag_hash(length=7)])
+ return path
+
+ @property
+ def file_name(self):
+ parts = [self.token_to_path(x, self.requires[x])
+ for x in self.hierarchy_tokens if x in self.requires]
+ hierarchy_name = join_path(*parts)
+ fullname = join_path(self.modules_root, hierarchy_name,
+ self.use_name + '.lua')
+ return fullname
+
+ @property
+ def use_name(self):
+ path_elements = [self.spec.format("${PACKAGE}/${VERSION}")]
+ # The remaining elements are filename suffixes
+ path_elements.extend(self._get_suffixes())
+ return '-'.join(path_elements)
+
+ def modulepath_modifications(self):
+ # What is available is what we require plus what we provide
+ entry = ''
+ available = {}
+ available.update(self.requires)
+ available.update(self.provides)
+ available_parts = [self.token_to_path(x, available[x])
+ for x in self.hierarchy_tokens if x in available]
+ # Missing parts
+ missing = [x for x in self.hierarchy_tokens if x not in available]
+ # Direct path we provide on top of compilers
+ modulepath = join_path(self.modules_root, *available_parts)
+ env = EnvironmentModifications()
+ env.prepend_path('MODULEPATH', modulepath)
+ for line in self.process_environment_command(env):
+ entry += line
+
+ def local_variable(x):
+ lower, upper = x.lower(), x.upper()
+ fmt = 'local {lower}_name = os.getenv("LMOD_{upper}_NAME")\n'
+ fmt += 'local {lower}_version = os.getenv("LMOD_{upper}_VERSION")\n' # NOQA: ignore=501
+ return fmt.format(lower=lower, upper=upper)
+
+ def set_variables_for_service(env, x):
+ upper = x.upper()
+ s = self.provides[x]
+ name, version = os.path.split(self.token_to_path(x, s))
+
+ env.set('LMOD_{upper}_NAME'.format(upper=upper), name)
+ env.set('LMOD_{upper}_VERSION'.format(upper=upper), version)
+
+ def conditional_modulepath_modifications(item):
+ entry = 'if '
+ needed = []
+ for x in self.hierarchy_tokens:
+ if x in missing:
+ needed.append('{x}_name '.format(x=x))
+ entry += 'and '.join(needed) + 'then\n'
+ entry += ' local t = pathJoin("{root}"'.format(
+ root=self.modules_root)
+ for x in item:
+ if x in missing:
+ entry += ', {lower}_name, {lower}_version'.format(
+ lower=x.lower())
+ else:
+ entry += ', "{x}"'.format(
+ x=self.token_to_path(x, available[x]))
+ entry += ')\n'
+ entry += ' prepend_path("MODULEPATH", t)\n'
+ entry += 'end\n\n'
+ return entry
+
+ if 'compiler' not in self.provides:
+ # Retrieve variables
+ entry += '\n'
+ for x in missing:
+ entry += local_variable(x)
+ entry += '\n'
+ # Conditional modifications
+ conditionals = [x
+ for x in self._hierarchy_to_be_provided()
+ if any(t in missing for t in x)]
+ for item in conditionals:
+ entry += conditional_modulepath_modifications(item)
+
+ # Set environment variables for the services we provide
+ env = EnvironmentModifications()
+ for x in self.provides:
+ set_variables_for_service(env, x)
+ for line in self.process_environment_command(env):
+ entry += line
+
+ return entry
+
+ @property
+ def header(self):
+ timestamp = datetime.datetime.now()
+ # Header as in
+ # https://www.tacc.utexas.edu/research-development/tacc-projects/lmod/advanced-user-guide/more-about-writing-module-files
+ header = "-- -*- lua -*-\n"
+ header += '-- Module file created by spack (https://github.com/LLNL/spack) on %s\n' % timestamp # NOQA: ignore=E501
+ header += '--\n'
+ header += '-- %s\n' % self.spec.short_spec
+ header += '--\n'
+
+ # Short description -> whatis()
+ if self.short_description:
+ header += "whatis([[Name : {name}]])\n".format(name=self.spec.name)
+ header += "whatis([[Version : {version}]])\n".format(
+ version=self.spec.version)
+
+ # Long description -> help()
+ if self.long_description:
+ doc = re.sub(r'"', '\"', self.long_description)
+ header += "help([[{documentation}]])\n".format(documentation=doc)
+
+ # Certain things need to be done only if we provide a service
+ if self.provides:
+ # Add family directives
+ header += '\n'
+ for x in self.provides:
+ header += self.family_format.format(family=x)
+ header += '\n'
+ header += '-- MODULEPATH modifications\n'
+ header += '\n'
+ # Modify MODULEPATH
+ header += self.modulepath_modifications()
+ # Set environment variables for services we provide
+ header += '\n'
+ header += '-- END MODULEPATH modifications\n'
+ header += '\n'
+
+ return header
diff --git a/lib/spack/spack/multimethod.py b/lib/spack/spack/multimethod.py
index 5fda9328d6..4e2fb3bdaa 100644
--- a/lib/spack/spack/multimethod.py
+++ b/lib/spack/spack/multimethod.py
@@ -43,15 +43,13 @@ avoids overly complicated rat nests of if statements. Obviously,
depending on the scenario, regular old conditionals might be clearer,
so package authors should use their judgement.
"""
-import sys
import functools
-import collections
from llnl.util.lang import *
import spack.architecture
import spack.error
-from spack.spec import parse_anonymous_spec, Spec
+from spack.spec import parse_anonymous_spec
class SpecMultiMethod(object):
@@ -89,13 +87,13 @@ class SpecMultiMethod(object):
See the docs for decorators below for more details.
"""
+
def __init__(self, default=None):
self.method_list = []
self.default = default
if default:
functools.update_wrapper(self, default)
-
def register(self, spec, method):
"""Register a version of a method for a particular sys_type."""
self.method_list.append((spec, method))
@@ -105,11 +103,19 @@ class SpecMultiMethod(object):
else:
assert(self.__name__ == method.__name__)
-
def __get__(self, obj, objtype):
"""This makes __call__ support instance methods."""
- return functools.partial(self.__call__, obj)
-
+ # Method_list is a list of tuples (constraint, method)
+ # Here we are going to assume that we have at least one
+ # element in the list. The first registered function
+ # will be the one 'wrapped'.
+ wrapped_method = self.method_list[0][1]
+ # Call functools.wraps manually to get all the attributes
+ # we need to be disguised as the wrapped_method
+ func = functools.wraps(wrapped_method)(
+ functools.partial(self.__call__, obj)
+ )
+ return func
def __call__(self, package_self, *args, **kwargs):
"""Find the first method with a spec that matches the
@@ -127,7 +133,6 @@ class SpecMultiMethod(object):
type(package_self), self.__name__, spec,
[m[0] for m in self.method_list])
-
def __str__(self):
return "SpecMultiMethod {\n\tdefault: %s,\n\tspecs: %s\n}" % (
self.default, self.method_list)
@@ -146,12 +151,12 @@ class when(object):
def install(self, prefix):
# Do default install
- @when('=chaos_5_x86_64_ib')
+ @when('arch=chaos_5_x86_64_ib')
def install(self, prefix):
# This will be executed instead of the default install if
- # the package's sys_type() is chaos_5_x86_64_ib.
+ # the package's platform() is chaos_5_x86_64_ib.
- @when('=bgqos_0")
+ @when('arch=bgqos_0")
def install(self, prefix):
# This will be executed if the package's sys_type is bgqos_0
@@ -195,11 +200,13 @@ class when(object):
platform-specific versions. There's not much we can do to get
around this because of the way decorators work.
"""
+
def __init__(self, spec):
pkg = get_calling_module_name()
if spec is True:
spec = pkg
- self.spec = parse_anonymous_spec(spec, pkg) if spec is not False else None
+ self.spec = (parse_anonymous_spec(spec, pkg)
+ if spec is not False else None)
def __call__(self, method):
# Get the first definition of the method in the calling scope
@@ -218,12 +225,14 @@ class when(object):
class MultiMethodError(spack.error.SpackError):
"""Superclass for multimethod dispatch errors"""
+
def __init__(self, message):
super(MultiMethodError, self).__init__(message)
class NoSuchMethodError(spack.error.SpackError):
"""Raised when we can't find a version of a multi-method."""
+
def __init__(self, cls, method_name, spec, possible_specs):
super(NoSuchMethodError, self).__init__(
"Package %s does not support %s called with %s. Options are: %s"
diff --git a/lib/spack/spack/operating_systems/__init__.py b/lib/spack/spack/operating_systems/__init__.py
new file mode 100644
index 0000000000..ed1ec23bca
--- /dev/null
+++ b/lib/spack/spack/operating_systems/__init__.py
@@ -0,0 +1,24 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
diff --git a/lib/spack/spack/operating_systems/cnk.py b/lib/spack/spack/operating_systems/cnk.py
new file mode 100644
index 0000000000..7e02fdd5b2
--- /dev/null
+++ b/lib/spack/spack/operating_systems/cnk.py
@@ -0,0 +1,41 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack.architecture import OperatingSystem
+
+
+class Cnk(OperatingSystem):
+ """ Compute Node Kernel (CNK) is the node level operating system for
+ the IBM Blue Gene series of supercomputers. The compute nodes of the
+ Blue Gene family of supercomputers run CNK, a lightweight kernel that
+ runs on each node and supports one application running for one user
+ on that node."""
+
+ def __init__(self):
+ name = 'cnk'
+ version = '1'
+ super(Cnk, self).__init__(name, version)
+
+ def __str__(self):
+ return self.name
diff --git a/lib/spack/spack/operating_systems/cnl.py b/lib/spack/spack/operating_systems/cnl.py
new file mode 100644
index 0000000000..7acab1cbcb
--- /dev/null
+++ b/lib/spack/spack/operating_systems/cnl.py
@@ -0,0 +1,82 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+import re
+
+from spack.architecture import OperatingSystem
+from spack.util.executable import *
+import spack.spec
+from spack.util.multiproc import parmap
+import spack.compilers
+
+
+class Cnl(OperatingSystem):
+ """ Compute Node Linux (CNL) is the operating system used for the Cray XC
+ series super computers. It is a very stripped down version of GNU/Linux.
+ Any compilers found through this operating system will be used with
+ modules. If updated, user must make sure that version and name are
+ updated to indicate that OS has been upgraded (or downgraded)
+ """
+
+ def __init__(self):
+ name = 'CNL'
+ version = '10'
+ super(Cnl, self).__init__(name, version)
+
+ def __str__(self):
+ return self.name
+
+ def find_compilers(self, *paths):
+ types = spack.compilers.all_compiler_types()
+ compiler_lists = parmap(
+ lambda cmp_cls: self.find_compiler(cmp_cls, *paths), types)
+
+ # ensure all the version calls we made are cached in the parent
+ # process, as well. This speeds up Spack a lot.
+ clist = reduce(lambda x, y: x + y, compiler_lists)
+ return clist
+
+ def find_compiler(self, cmp_cls, *paths):
+ compilers = []
+ if cmp_cls.PrgEnv:
+ if not cmp_cls.PrgEnv_compiler:
+ tty.die('Must supply PrgEnv_compiler with PrgEnv')
+
+ modulecmd = which('modulecmd')
+ modulecmd.add_default_arg('python')
+
+ output = modulecmd(
+ 'avail', cmp_cls.PrgEnv_compiler, output=str, error=str)
+ version_regex = r'(%s)/([\d\.]+[\d])' % cmp_cls.PrgEnv_compiler
+ matches = re.findall(version_regex, output)
+ for name, version in matches:
+ v = version
+ comp = cmp_cls(
+ spack.spec.CompilerSpec(name + '@' + v),
+ self, "any",
+ ['cc', 'CC', 'ftn'], [cmp_cls.PrgEnv, name + '/' + v])
+
+ compilers.append(comp)
+
+ return compilers
diff --git a/lib/spack/spack/operating_systems/linux_distro.py b/lib/spack/spack/operating_systems/linux_distro.py
new file mode 100644
index 0000000000..fb2797fd36
--- /dev/null
+++ b/lib/spack/spack/operating_systems/linux_distro.py
@@ -0,0 +1,53 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+import re
+from spack.architecture import OperatingSystem
+
+
+class LinuxDistro(OperatingSystem):
+ """ This class will represent the autodetected operating system
+ for a Linux System. Since there are many different flavors of
+ Linux, this class will attempt to encompass them all through
+ autodetection using the python module platform and the method
+ platform.dist()
+ """
+
+ def __init__(self):
+ try:
+ # This will throw an error if imported on a non-Linux platform.
+ from external.distro import linux_distribution
+ distname, version, _ = linux_distribution(
+ full_distribution_name=False)
+ distname, version = str(distname), str(version)
+ except ImportError:
+ distname, version = 'unknown', ''
+
+ # Grabs major version from tuple on redhat; on other platforms
+ # grab the first legal identifier in the version field. On
+ # debian you get things like 'wheezy/sid'; sid means unstable.
+ # We just record 'wheezy' and don't get quite so detailed.
+ version = re.split(r'[^\w-]', version)[0]
+
+ super(LinuxDistro, self).__init__(distname, version)
diff --git a/lib/spack/spack/operating_systems/mac_os.py b/lib/spack/spack/operating_systems/mac_os.py
new file mode 100644
index 0000000000..a75ce8a946
--- /dev/null
+++ b/lib/spack/spack/operating_systems/mac_os.py
@@ -0,0 +1,54 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+import platform as py_platform
+from spack.architecture import OperatingSystem
+
+
+class MacOs(OperatingSystem):
+ """This class represents the macOS operating system. This will be
+ auto detected using the python platform.mac_ver. The macOS
+ platform will be represented using the major version operating
+ system name, i.e el capitan, yosemite...etc.
+ """
+
+ def __init__(self):
+ """ Autodetects the mac version from a dictionary. Goes back as
+ far as 10.6 snowleopard. If the user has an older mac then
+ the version will just be a generic mac_os.
+ """
+ mac_releases = {'10.6': "snowleopard",
+ "10.7": "lion",
+ "10.8": "mountainlion",
+ "10.9": "mavericks",
+ "10.10": "yosemite",
+ "10.11": "elcapitan",
+ "10.12": "sierra"}
+
+ mac_ver = '.'.join(py_platform.mac_ver()[0].split('.')[:2])
+ name = mac_releases.get(mac_ver, "macos")
+ super(MacOs, self).__init__(name, mac_ver)
+
+ def __str__(self):
+ return self.name
diff --git a/lib/spack/spack/package.py b/lib/spack/spack/package.py
index 2e7d8a7709..f9bc1fafbc 100644
--- a/lib/spack/spack/package.py
+++ b/lib/spack/spack/package.py
@@ -33,14 +33,21 @@ Homebrew makes it very easy to create packages. For a complete
rundown on spack and how it differs from homebrew, look at the
README.
"""
+import contextlib
+import copy
+import functools
+import inspect
import os
import re
+import sys
import textwrap
import time
+from StringIO import StringIO
+import llnl.util.lock
import llnl.util.tty as tty
import spack
-import spack.build_environment
+import spack.store
import spack.compilers
import spack.directives
import spack.error
@@ -50,22 +57,183 @@ import spack.mirror
import spack.repository
import spack.url
import spack.util.web
-from StringIO import StringIO
from llnl.util.filesystem import *
from llnl.util.lang import *
from llnl.util.link_tree import LinkTree
from llnl.util.tty.log import log_output
+from spack import directory_layout
from spack.stage import Stage, ResourceStage, StageComposite
-from spack.util.compression import allowed_archive
+from spack.util.crypto import bit_length
from spack.util.environment import dump_environment
-from spack.util.executable import ProcessError
from spack.version import *
-from urlparse import urlparse
+
"""Allowed URL schemes for spack packages."""
_ALLOWED_URL_SCHEMES = ["http", "https", "ftp", "file", "git"]
-class Package(object):
+class InstallPhase(object):
+ """Manages a single phase of the installation
+
+ This descriptor stores at creation time the name of the method it should
+ search for execution. The method is retrieved at __get__ time, so that
+ it can be overridden by subclasses of whatever class declared the phases.
+
+ It also provides hooks to execute prerequisite and sanity checks.
+ """
+
+ def __init__(self, name):
+ self.name = name
+ self.preconditions = []
+ self.sanity_checks = []
+
+ def __get__(self, instance, owner):
+ # The caller is a class that is trying to customize
+ # my behavior adding something
+ if instance is None:
+ return self
+ # If instance is there the caller wants to execute the
+ # install phase, thus return a properly set wrapper
+ phase = getattr(instance, self.name)
+
+ @functools.wraps(phase)
+ def phase_wrapper(spec, prefix):
+ # Check instance attributes at the beginning of a phase
+ self._on_phase_start(instance)
+ # Execute phase pre-conditions,
+ # and give them the chance to fail
+ for check in self.preconditions:
+ # Do something sensible at some point
+ check(instance)
+ phase(spec, prefix)
+ # Execute phase sanity_checks,
+ # and give them the chance to fail
+ for check in self.sanity_checks:
+ check(instance)
+ # Check instance attributes at the end of a phase
+ self._on_phase_exit(instance)
+ return phase_wrapper
+
+ def _on_phase_start(self, instance):
+ pass
+
+ def _on_phase_exit(self, instance):
+ # If a phase has a matching last_phase attribute,
+ # stop the installation process raising a StopIteration
+ if getattr(instance, 'last_phase', None) == self.name:
+ raise StopIteration('Stopping at \'{0}\' phase'.format(self.name))
+
+ def copy(self):
+ try:
+ return copy.deepcopy(self)
+ except TypeError:
+ # This bug-fix was not back-ported in Python 2.6
+ # http://bugs.python.org/issue1515
+ other = InstallPhase(self.name)
+ other.preconditions.extend(self.preconditions)
+ other.sanity_checks.extend(self.sanity_checks)
+ return other
+
+
+class PackageMeta(spack.directives.DirectiveMetaMixin):
+ """Conveniently transforms attributes to permit extensible phases
+
+ Iterates over the attribute 'phases' and creates / updates private
+ InstallPhase attributes in the class that is being initialized
+ """
+ phase_fmt = '_InstallPhase_{0}'
+
+ _InstallPhase_sanity_checks = {}
+ _InstallPhase_preconditions = {}
+
+ def __new__(meta, name, bases, attr_dict):
+ # Check if phases is in attr dict, then set
+ # install phases wrappers
+ if 'phases' in attr_dict:
+ _InstallPhase_phases = [PackageMeta.phase_fmt.format(x) for x in attr_dict['phases']] # NOQA: ignore=E501
+ for phase_name, callback_name in zip(_InstallPhase_phases, attr_dict['phases']): # NOQA: ignore=E501
+ attr_dict[phase_name] = InstallPhase(callback_name)
+ attr_dict['_InstallPhase_phases'] = _InstallPhase_phases
+
+ def _append_checks(check_name):
+ # Name of the attribute I am going to check it exists
+ attr_name = PackageMeta.phase_fmt.format(check_name)
+ checks = getattr(meta, attr_name)
+ if checks:
+ for phase_name, funcs in checks.items():
+ try:
+ # Search for the phase in the attribute dictionary
+ phase = attr_dict[
+ PackageMeta.phase_fmt.format(phase_name)]
+ except KeyError:
+ # If it is not there it's in the bases
+ # and we added a check. We need to copy
+ # and extend
+ for base in bases:
+ phase = getattr(
+ base,
+ PackageMeta.phase_fmt.format(phase_name),
+ None
+ )
+ attr_dict[PackageMeta.phase_fmt.format(
+ phase_name)] = phase.copy()
+ phase = attr_dict[
+ PackageMeta.phase_fmt.format(phase_name)]
+ getattr(phase, check_name).extend(funcs)
+ # Clear the attribute for the next class
+ setattr(meta, attr_name, {})
+
+ @classmethod
+ def _register_checks(cls, check_type, *args):
+ def _register_sanity_checks(func):
+ attr_name = PackageMeta.phase_fmt.format(check_type)
+ check_list = getattr(meta, attr_name)
+ for item in args:
+ checks = check_list.setdefault(item, [])
+ checks.append(func)
+ setattr(meta, attr_name, check_list)
+ return func
+ return _register_sanity_checks
+
+ @staticmethod
+ def on_package_attributes(**attrs):
+ def _execute_under_condition(func):
+ @functools.wraps(func)
+ def _wrapper(instance):
+ # If all the attributes have the value we require, then
+ # execute
+ if all([getattr(instance, key, None) == value for key, value in attrs.items()]): # NOQA: ignore=E501
+ func(instance)
+ return _wrapper
+ return _execute_under_condition
+
+ @classmethod
+ def precondition(cls, *args):
+ return cls._register_checks('preconditions', *args)
+
+ @classmethod
+ def sanity_check(cls, *args):
+ return cls._register_checks('sanity_checks', *args)
+
+ if all([not hasattr(x, '_register_checks') for x in bases]):
+ attr_dict['_register_checks'] = _register_checks
+
+ if all([not hasattr(x, 'sanity_check') for x in bases]):
+ attr_dict['sanity_check'] = sanity_check
+
+ if all([not hasattr(x, 'precondition') for x in bases]):
+ attr_dict['precondition'] = precondition
+
+ if all([not hasattr(x, 'on_package_attributes') for x in bases]):
+ attr_dict['on_package_attributes'] = on_package_attributes
+
+ # Preconditions
+ _append_checks('preconditions')
+ # Sanity checks
+ _append_checks('sanity_checks')
+ return super(PackageMeta, meta).__new__(meta, name, bases, attr_dict)
+
+
+class PackageBase(object):
"""This is the superclass for all spack packages.
***The Package class***
@@ -78,9 +246,9 @@ class Package(object):
with the package itself. Packages are written in pure python.
Packages are all submodules of spack.packages. If spack is installed
- in $prefix, all of its python files are in $prefix/lib/spack. Most
- of them are in the spack module, so all the packages live in
- $prefix/lib/spack/spack/packages.
+ in ``$prefix``, all of its python files are in ``$prefix/lib/spack``.
+ Most of them are in the spack module, so all the packages live in
+ ``$prefix/lib/spack/spack/packages``.
All you have to do to create a package is make a new subclass of Package
in this directory. Spack automatically scans the python files there
@@ -89,7 +257,7 @@ class Package(object):
**An example package**
Let's look at the cmake package to start with. This package lives in
- $prefix/lib/spack/spack/packages/cmake.py:
+ ``$prefix/var/spack/repos/builtin/packages/cmake/package.py``:
.. code-block:: python
@@ -112,19 +280,21 @@ class Package(object):
1. The module name, ``cmake``.
* User will refers to this name, e.g. 'spack install cmake'.
- * Corresponds to the name of the file, 'cmake.py', and it can
- include ``_``, ``-``, and numbers (it can even start with a
+ * It can include ``_``, ``-``, and numbers (it can even start with a
number).
2. The class name, "Cmake". This is formed by converting `-` or
``_`` in the module name to camel case. If the name starts with
a number, we prefix the class name with ``_``. Examples:
- Module Name Class Name
- foo_bar FooBar
- docbook-xml DocbookXml
- FooBar Foobar
- 3proxy _3proxy
+ =========== ==========
+ Module Name Class Name
+ =========== ==========
+ foo_bar FooBar
+ docbook-xml DocbookXml
+ FooBar Foobar
+ 3proxy _3proxy
+ =========== ==========
The class name is what spack looks for when it loads a package module.
@@ -133,28 +303,30 @@ class Package(object):
Aside from proper naming, here is the bare minimum set of things you
need when you make a package:
- homepage
- informational URL, so that users know what they're
- installing.
+ homepage:
+ informational URL, so that users know what they're
+ installing.
- url
- URL of the source archive that spack will fetch.
+ url or url_for_version(self, version):
+ If url, then the URL of the source archive that spack will fetch.
+ If url_for_version(), then a method returning the URL required
+ to fetch a particular version.
- install()
- This function tells spack how to build and install the
- software it downloaded.
+ install():
+ This function tells spack how to build and install the
+ software it downloaded.
**Optional Attributes**
You can also optionally add these attributes, if needed:
- list_url
+ list_url:
Webpage to scrape for available version strings. Default is the
directory containing the tarball; use this if the default isn't
correct so that invoking 'spack versions' will work for this
package.
- url_version(self, version)
+ url_version(self, version):
When spack downloads packages at particular versions, it just
converts version to string with str(version). Override this if
your package needs special version formatting in its URL. boost
@@ -171,11 +343,12 @@ class Package(object):
**spack create**
- Most software comes in nicely packaged tarballs, like this one:
- http://www.cmake.org/files/v2.8/cmake-2.8.10.2.tar.gz
+ Most software comes in nicely packaged tarballs, like this one
+
+ http://www.cmake.org/files/v2.8/cmake-2.8.10.2.tar.gz
Taking a page from homebrew, spack deduces pretty much everything it
- needs to know from the URL above. If you simply type this:
+ needs to know from the URL above. If you simply type this::
spack create http://www.cmake.org/files/v2.8/cmake-2.8.10.2.tar.gz
@@ -210,13 +383,13 @@ class Package(object):
you can just run configure or cmake without any additional arguments and
it will find the dependencies automatically.
-
**The Install Function**
The install function is designed so that someone not too terribly familiar
with Python could write a package installer. For example, we put a number
of commands in install scope that you can use almost like shell commands.
- These include make, configure, cmake, rm, rmtree, mkdir, mkdirp, and others.
+ These include make, configure, cmake, rm, rmtree, mkdir, mkdirp, and
+ others.
You can see above in the cmake script that these commands are used to run
configure and make almost like they're used on the command line. The
@@ -231,17 +404,16 @@ class Package(object):
pollute other namespaces, and it allows you to more easily implement an
install function.
- For a full list of commands and variables available in module scope, see the
- add_commands_to_module() function in this class. This is where most of
- them are created and set on the module.
-
+ For a full list of commands and variables available in module scope, see
+ the add_commands_to_module() function in this class. This is where most
+ of them are created and set on the module.
**Parallel Builds**
By default, Spack will run make in parallel when you run make() in your
install function. Spack figures out how many cores are available on
- your system and runs make with -j<cores>. If you do not want this behavior,
- you can explicitly mark a package not to use parallel make:
+ your system and runs make with -j<cores>. If you do not want this
+ behavior, you can explicitly mark a package not to use parallel make:
.. code-block:: python
@@ -250,15 +422,16 @@ class Package(object):
parallel = False
...
- This changes thd default behavior so that make is sequential. If you still
- want to build some parts in parallel, you can do this in your install function:
+ This changes the default behavior so that make is sequential. If you still
+ want to build some parts in parallel, you can do this in your install
+ function:
.. code-block:: python
make(parallel=True)
- Likewise, if you do not supply parallel = True in your Package, you can keep
- the default parallel behavior and run make like this when you want a
+ Likewise, if you do not supply parallel = True in your Package, you can
+ keep the default parallel behavior and run make like this when you want a
sequential build:
.. code-block:: python
@@ -289,39 +462,60 @@ class Package(object):
p.do_restage() # removes the build directory and
# re-expands the archive.
- The convention used here is that a do_* function is intended to be called
- internally by Spack commands (in spack.cmd). These aren't for package
- writers to override, and doing so may break the functionality of the Package
- class.
+ The convention used here is that a ``do_*`` function is intended to be
+ called internally by Spack commands (in spack.cmd). These aren't for
+ package writers to override, and doing so may break the functionality
+ of the Package class.
Package creators override functions like install() (all of them do this),
clean() (some of them do this), and others to provide custom behavior.
-
"""
+ __metaclass__ = PackageMeta
#
# These are default values for instance variables.
#
"""By default we build in parallel. Subclasses can override this."""
parallel = True
+
"""# jobs to use for parallel make. If set, overrides default of ncpus."""
make_jobs = None
+
+ """By default do not run tests within package's install()"""
+ run_tests = False
+
+ # FIXME: this is a bad object-oriented design, should be moved to Clang.
+ """By default do not setup mockup XCode on macOS with Clang"""
+ use_xcode = False
+
"""Most packages are NOT extendable. Set to True if you want extensions."""
extendable = False
+
+ """When True, add RPATHs for the entire DAG. When False, add RPATHs only
+ for immediate dependencies."""
+ transitive_rpaths = True
+
"""List of prefix-relative file paths (or a single path). If these do
not exist after install, or if they exist but are not files,
sanity checks fail.
"""
sanity_check_is_file = []
+
"""List of prefix-relative directory paths (or a single path). If
these do not exist after install, or if they exist but are not
directories, sanity checks will fail.
"""
sanity_check_is_dir = []
+ """Per-process lock objects for each install prefix."""
+ prefix_locks = {}
+
def __init__(self, spec):
# this determines how the package should be built.
self.spec = spec
+ # Lock on the prefix shared resource. Will be set in prefix property
+ self._prefix_lock = None
+
# Name of package is the name of its module, without the
# containing module names.
self.name = self.module.__name__
@@ -331,9 +525,6 @@ class Package(object):
# Allow custom staging paths for packages
self.path = None
- # Sanity check attributes required by Spack directives.
- spack.directives.ensure_dicts(type(self))
-
# Check versions in the versions dict.
for v in self.versions:
assert (isinstance(v, Version))
@@ -397,14 +588,41 @@ class Package(object):
if self.is_extension:
spack.repo.get(self.extendee_spec)._check_extendable()
+ self.extra_args = {}
+
+ def possible_dependencies(self, visited=None):
+ """Return set of possible transitive dependencies of this package."""
+ if visited is None:
+ visited = set()
+
+ visited.add(self.name)
+ for name in self.dependencies:
+ if name not in visited and not spack.spec.Spec(name).virtual:
+ pkg = spack.repo.get(name)
+ for name in pkg.possible_dependencies(visited):
+ visited.add(name)
+
+ return visited
+
+ @property
+ def package_dir(self):
+ """Return the directory where the package.py file lives."""
+ return os.path.dirname(self.module.__file__)
+
+ @property
+ def global_license_dir(self):
+ """Returns the directory where global license files for all
+ packages are stored."""
+ spack_root = ancestor(__file__, 4)
+ return join_path(spack_root, 'etc', 'spack', 'licenses')
+
@property
def global_license_file(self):
- """Returns the path where a global license file should be stored."""
+ """Returns the path where a global license file for this
+ particular package should be stored."""
if not self.license_files:
return
- spack_root = ancestor(__file__, 4)
- global_license_dir = join_path(spack_root, 'etc', 'spack', 'licenses')
- return join_path(global_license_dir, self.name,
+ return join_path(self.global_license_dir, self.name,
os.path.basename(self.license_files[0]))
@property
@@ -443,8 +661,13 @@ class Package(object):
# TODO: move this out of here and into some URL extrapolation module?
def url_for_version(self, version):
- """
- Returns a URL that you can download a new version of this package from.
+ """Returns a URL from which the specified version of this package
+ may be downloaded.
+
+ version: class Version
+ The version for which a URL is sought.
+
+ See Class Version (version.py)
"""
if not isinstance(version, Version):
version = Version(version)
@@ -464,7 +687,8 @@ class Package(object):
def _make_resource_stage(self, root_stage, fetcher, resource):
resource_stage_folder = self._resource_stage(resource)
- resource_mirror = join_path(self.name, os.path.basename(fetcher.url))
+ resource_mirror = spack.mirror.mirror_archive_path(
+ self.spec, fetcher, resource.name)
stage = ResourceStage(resource.fetcher,
root=root_stage,
resource=resource,
@@ -479,8 +703,13 @@ class Package(object):
# Construct a path where the stage should build..
s = self.spec
stage_name = "%s-%s-%s" % (s.name, s.version, s.dag_hash())
- # Build the composite stage
- stage = Stage(fetcher, mirror_path=mp, name=stage_name, path=self.path)
+
+ def download_search():
+ dynamic_fetcher = fs.from_list_url(self)
+ return [dynamic_fetcher] if dynamic_fetcher else []
+
+ stage = Stage(fetcher, mirror_path=mp, name=stage_name, path=self.path,
+ search_fn=download_search)
return stage
def _make_stage(self):
@@ -545,6 +774,11 @@ class Package(object):
def fetcher(self, f):
self._fetcher = f
+ def dependencies_of_type(self, *deptypes):
+ """Get subset of the dependencies with certain types."""
+ return dict((name, conds) for name, conds in self.dependencies.items()
+ if any(d in self.dependency_types[name] for d in deptypes))
+
@property
def extendee_spec(self):
"""
@@ -557,7 +791,7 @@ class Package(object):
name = next(iter(self.extendees))
# If the extendee is in the spec's deps already, return that.
- for dep in self.spec.traverse():
+ for dep in self.spec.traverse(deptypes=('link', 'run')):
if name == dep.name:
return dep
@@ -604,71 +838,41 @@ class Package(object):
if not self.is_extension:
raise ValueError(
"is_extension called on package that is not an extension.")
- exts = spack.install_layout.extension_map(self.extendee_spec)
+ exts = spack.store.layout.extension_map(self.extendee_spec)
return (self.name in exts) and (exts[self.name] == self.spec)
- def preorder_traversal(self, visited=None, **kwargs):
- """This does a preorder traversal of the package's dependence DAG."""
- virtual = kwargs.get("virtual", False)
-
- if visited is None:
- visited = set()
-
- if self.name in visited:
- return
- visited.add(self.name)
-
- if not virtual:
- yield self
-
- for name in sorted(self.dependencies.keys()):
- spec = self.dependencies[name]
-
- # currently, we do not descend into virtual dependencies, as this
- # makes doing a sensible traversal much harder. We just assume
- # that ANY of the virtual deps will work, which might not be true
- # (due to conflicts or unsatisfiable specs). For now this is ok
- # but we might want to reinvestigate if we start using a lot of
- # complicated virtual dependencies
- # TODO: reinvestigate this.
- if spec.virtual:
- if virtual:
- yield spec
- continue
-
- for pkg in spack.repo.get(name).preorder_traversal(visited,
- **kwargs):
- yield pkg
-
def provides(self, vpkg_name):
"""
True if this package provides a virtual package with the specified name
"""
return any(s.name == vpkg_name for s in self.provided)
- def virtual_dependencies(self, visited=None):
- for spec in sorted(set(self.preorder_traversal(virtual=True))):
- yield spec
-
@property
def installed(self):
return os.path.isdir(self.prefix)
@property
- def installed_dependents(self):
- """Return a list of the specs of all installed packages that depend
- on this one.
+ def prefix_lock(self):
+ """Prefix lock is a byte range lock on the nth byte of a file.
- TODO: move this method to database.py?
+ The lock file is ``spack.store.db.prefix_lock`` -- the DB
+ tells us what to call it and it lives alongside the install DB.
+
+ n is the sys.maxsize-bit prefix of the DAG hash. This makes
+ likelihood of collision is very low AND it gives us
+ readers-writer lock semantics with just a single lockfile, so no
+ cleanup required.
"""
- dependents = []
- for spec in spack.installed_db.query():
- if self.name == spec.name:
- continue
- for dep in spec.traverse():
- if self.spec == dep:
- dependents.append(spec)
- return dependents
+ if self._prefix_lock is None:
+ prefix = self.spec.prefix
+ if prefix not in Package.prefix_locks:
+ Package.prefix_locks[prefix] = llnl.util.lock.Lock(
+ spack.store.db.prefix_lock_path,
+ self.spec.dag_hash_bit_prefix(bit_length(sys.maxsize)), 1)
+
+ self._prefix_lock = Package.prefix_locks[prefix]
+
+ return self._prefix_lock
@property
def prefix(self):
@@ -676,11 +880,20 @@ class Package(object):
return self.spec.prefix
@property
+ def architecture(self):
+ """Get the spack.architecture.Arch object that represents the
+ environment in which this package will be built."""
+ if not self.spec.concrete:
+ raise ValueError("Can only get the arch for concrete package.")
+ return spack.architecture.arch_for_spec(self.spec.architecture)
+
+ @property
def compiler(self):
"""Get the spack.compiler.Compiler object used to build this package"""
if not self.spec.concrete:
raise ValueError("Can only get a compiler for a concrete package.")
- return spack.compilers.compiler_for_spec(self.spec.compiler)
+ return spack.compilers.compiler_for_spec(self.spec.compiler,
+ self.spec.architecture)
def url_version(self, version):
"""
@@ -698,7 +911,7 @@ class Package(object):
Removes the prefix for a package along with any empty parent
directories
"""
- spack.install_layout.remove_install_directory(self.spec)
+ spack.store.layout.remove_install_directory(self.spec)
def do_fetch(self, mirror_only=False):
"""
@@ -715,17 +928,17 @@ class Package(object):
# Ask the user whether to skip the checksum if we're
# interactive, but just fail if non-interactive.
- checksum_msg = "Add a checksum or use --no-checksum to skip this check." # NOQA: ignore=E501
+ ck_msg = "Add a checksum or use --no-checksum to skip this check."
ignore_checksum = False
if sys.stdout.isatty():
ignore_checksum = tty.get_yes_or_no(" Fetch anyway?",
default=False)
if ignore_checksum:
- tty.msg("Fetching with no checksum.", checksum_msg)
+ tty.msg("Fetching with no checksum.", ck_msg)
if not ignore_checksum:
raise FetchError("Will not fetch %s" %
- self.spec.format('$_$@'), checksum_msg)
+ self.spec.format('$_$@'), ck_msg)
self.stage.fetch(mirror_only)
@@ -734,6 +947,8 @@ class Package(object):
if spack.do_checksum and self.version in self.versions:
self.stage.check()
+ self.stage.cache_local()
+
def do_stage(self, mirror_only=False):
"""Unpacks the fetched tarball, then changes into the expanded tarball
directory."""
@@ -827,12 +1042,35 @@ class Package(object):
return namespace
def do_fake_install(self):
- """Make a fake install directory contaiing a 'fake' file in bin."""
+ """Make a fake install directory containing a 'fake' file in bin."""
+ # FIXME : Make this part of the 'install' behavior ?
mkdirp(self.prefix.bin)
touch(join_path(self.prefix.bin, 'fake'))
+ mkdirp(self.prefix.include)
mkdirp(self.prefix.lib)
mkdirp(self.prefix.man1)
+ def _if_make_target_execute(self, target):
+ try:
+ # Check if we have a makefile
+ file = [x for x in ('Makefile', 'makefile') if os.path.exists(x)]
+ file = file.pop()
+ except IndexError:
+ tty.msg('No Makefile found in the build directory')
+ return
+
+ # Check if 'target' is in the makefile
+ regex = re.compile('^' + target + ':')
+ with open(file, 'r') as f:
+ matches = [line for line in f.readlines() if regex.match(line)]
+
+ if not matches:
+ tty.msg('Target \'' + target + ':\' not found in Makefile')
+ return
+
+ # Execute target
+ inspect.getmodule(self).make(target)
+
def _get_needed_resources(self):
resources = []
# Select the resources that are needed for this build
@@ -850,34 +1088,59 @@ class Package(object):
resource_stage_folder = '-'.join(pieces)
return resource_stage_folder
+ @contextlib.contextmanager
+ def _prefix_read_lock(self):
+ try:
+ self.prefix_lock.acquire_read(60)
+ yield self
+ finally:
+ self.prefix_lock.release_read()
+
+ @contextlib.contextmanager
+ def _prefix_write_lock(self):
+ try:
+ self.prefix_lock.acquire_write(60)
+ yield self
+ finally:
+ self.prefix_lock.release_write()
+
def do_install(self,
keep_prefix=False,
keep_stage=False,
- ignore_deps=False,
+ install_deps=True,
skip_patch=False,
verbose=False,
make_jobs=None,
+ run_tests=False,
fake=False,
- explicit=False):
+ explicit=False,
+ dirty=None,
+ **kwargs):
"""Called by commands to install a package and its dependencies.
Package implementations should override install() to describe
their build process.
- Args:
- keep_prefix -- Keep install prefix on failure. By default, destroys it.
- keep_stage -- By default, stage is destroyed only if there are no
- exceptions during build. Set to True to keep the stage
- even with exceptions.
- ignore_deps -- Don't install dependencies before installing this
- package
- fake -- Don't really build -- install fake stub files instead.
- skip_patch -- Skip patch stage of build if True.
- verbose -- Display verbose build output (by default, suppresses it)
- make_jobs -- Number of make jobs to use for install. Default is ncpus
+ :param keep_prefix: Keep install prefix on failure. By default, \
+ destroys it.
+ :param keep_stage: By default, stage is destroyed only if there are \
+ no exceptions during build. Set to True to keep the stage
+ even with exceptions.
+ :param install_deps: Install dependencies before installing this \
+ package
+ :param fake: Don't really build; install fake stub files instead.
+ :param skip_patch: Skip patch stage of build if True.
+ :param verbose: Display verbose build output (by default, suppresses \
+ it)
+ :param dirty: Don't clean the build environment before installing.
+ :param make_jobs: Number of make jobs to use for install. Default is \
+ ncpus
+ :param force: Install again, even if already installed.
+ :param run_tests: Run tests within the package's install()
"""
if not self.spec.concrete:
- raise ValueError("Can only install concrete packages.")
+ raise ValueError("Can only install concrete packages: %s."
+ % self.spec.name)
# No installation needed if package is external
if self.spec.external:
@@ -886,35 +1149,66 @@ class Package(object):
return
# Ensure package is not already installed
- if spack.install_layout.check_installed(self.spec):
- tty.msg("%s is already installed in %s" % (self.name, self.prefix))
- rec = spack.installed_db.get_record(self.spec)
- if (not rec.explicit) and explicit:
- with spack.installed_db.write_transaction():
- rec = spack.installed_db.get_record(self.spec)
- rec.explicit = True
- return
+ layout = spack.store.layout
+ with self._prefix_read_lock():
+ if layout.check_installed(self.spec):
+ tty.msg(
+ "%s is already installed in %s" % (self.name, self.prefix))
+ rec = spack.store.db.get_record(self.spec)
+ if (not rec.explicit) and explicit:
+ with spack.store.db.write_transaction():
+ rec = spack.store.db.get_record(self.spec)
+ rec.explicit = True
+ return
+
+ # Dirty argument takes precedence over dirty config setting.
+ if dirty is None:
+ dirty = spack.dirty
+
+ self._do_install_pop_kwargs(kwargs)
tty.msg("Installing %s" % self.name)
# First, install dependencies recursively.
- if not ignore_deps:
- self.do_install_dependencies(keep_prefix=keep_prefix,
- keep_stage=keep_stage,
- ignore_deps=ignore_deps,
- fake=fake,
- skip_patch=skip_patch,
- verbose=verbose,
- make_jobs=make_jobs)
+ if install_deps:
+ for dep in self.spec.dependencies():
+ dep.package.do_install(
+ keep_prefix=keep_prefix,
+ keep_stage=keep_stage,
+ install_deps=install_deps,
+ fake=fake,
+ skip_patch=skip_patch,
+ verbose=verbose,
+ make_jobs=make_jobs,
+ run_tests=run_tests,
+ dirty=dirty,
+ **kwargs
+ )
+
+ # Set run_tests flag before starting build.
+ self.run_tests = run_tests
# Set parallelism before starting build.
self.make_jobs = make_jobs
# Then install the package itself.
- def build_process():
+ def build_process(input_stream):
"""Forked for each build. Has its own process and python
module space set up by build_environment.fork()."""
+ # We are in the child process. This means that our sys.stdin is
+ # equal to open(os.devnull). Python did this to prevent our process
+ # and the parent process from possible simultaneous reading from
+ # the original standard input. But we assume that the parent
+ # process is not going to read from it till we are done here,
+ # otherwise it should not have passed us the copy of the stream.
+ # Thus, we are free to work with the the copy (input_stream)
+ # however we want. For example, we might want to call functions
+ # (e.g. raw_input()) that implicitly read from whatever stream is
+ # assigned to sys.stdin. Since we want them to work with the
+ # original input stream, we are making the following assignment:
+ sys.stdin = input_stream
+
start_time = time.time()
if not fake:
if not skip_patch:
@@ -922,14 +1216,16 @@ class Package(object):
else:
self.do_stage()
- tty.msg("Building %s" % self.name)
+ tty.msg(
+ 'Building {0} [{1}]'.format(self.name, self.build_system_class)
+ )
self.stage.keep = keep_stage
- with self.stage:
+
+ with contextlib.nested(self.stage, self._prefix_write_lock()):
# Run the pre-install hook in the child process after
# the directory is created.
spack.hooks.pre_install(self)
-
if fake:
self.do_fake_install()
else:
@@ -939,37 +1235,34 @@ class Package(object):
# Save the build environment in a file before building.
env_path = join_path(os.getcwd(), 'spack-build.env')
- try:
- # Redirect I/O to a build log (and optionally to
- # the terminal)
- log_path = join_path(os.getcwd(), 'spack-build.out')
- log_file = open(log_path, 'w')
- with log_output(log_file, verbose, sys.stdout.isatty(),
- True):
- dump_environment(env_path)
- self.install(self.spec, self.prefix)
-
- except ProcessError as e:
- # Annotate ProcessErrors with the location of
- # the build log
- e.build_log = log_path
- raise e
-
- # Ensure that something was actually installed.
- self.sanity_check_prefix()
-
- # Copy provenance into the install directory on success
- log_install_path = spack.install_layout.build_log_path(
- self.spec)
- env_install_path = spack.install_layout.build_env_path(
- self.spec)
- packages_dir = spack.install_layout.build_packages_path(
- self.spec)
-
- install(log_path, log_install_path)
- install(env_path, env_install_path)
- dump_packages(self.spec, packages_dir)
-
+ # Redirect I/O to a build log (and optionally to
+ # the terminal)
+ log_path = join_path(os.getcwd(), 'spack-build.out')
+
+ # FIXME : refactor this assignment
+ self.log_path = log_path
+ self.env_path = env_path
+ dump_environment(env_path)
+
+ # Spawn a daemon that reads from a pipe and redirects
+ # everything to log_path
+ redirection_context = log_output(
+ log_path,
+ echo=verbose,
+ force_color=sys.stdout.isatty(),
+ debug=True,
+ input_stream=input_stream
+ )
+ with redirection_context as log_redirection:
+ for phase_name, phase in zip(self.phases, self._InstallPhase_phases): # NOQA: ignore=E501
+ tty.msg(
+ 'Executing phase : \'{0}\''.format(phase_name) # NOQA: ignore=E501
+ )
+ # Redirect stdout and stderr to daemon pipe
+ with log_redirection:
+ getattr(self, phase)(
+ self.spec, self.prefix)
+ self.log()
# Run post install hooks before build stage is removed.
spack.hooks.post_install(self)
@@ -985,23 +1278,67 @@ class Package(object):
try:
# Create the install prefix and fork the build process.
- spack.install_layout.create_install_directory(self.spec)
- spack.build_environment.fork(self, build_process)
- except:
- # remove the install prefix if anything went wrong during install.
+ spack.store.layout.create_install_directory(self.spec)
+ # Fork a child to do the actual installation
+ spack.build_environment.fork(self, build_process, dirty=dirty)
+ # If we installed then we should keep the prefix
+ keep_prefix = True if self.last_phase is None else keep_prefix
+ # note: PARENT of the build process adds the new package to
+ # the database, so that we don't need to re-read from file.
+ spack.store.db.add(
+ self.spec, spack.store.layout, explicit=explicit
+ )
+ except directory_layout.InstallDirectoryAlreadyExistsError:
+ # Abort install if install directory exists.
+ # But do NOT remove it (you'd be overwriting someone else's stuff)
+ tty.warn("Keeping existing install prefix in place.")
+ raise
+ except StopIteration as e:
+ # A StopIteration exception means that do_install
+ # was asked to stop early from clients
+ tty.msg(e.message)
+ tty.msg(
+ 'Package stage directory : {0}'.format(self.stage.source_path)
+ )
+ finally:
+ # Remove the install prefix if anything went wrong during install.
if not keep_prefix:
self.remove_prefix()
- else:
- tty.warn("Keeping install prefix in place despite error.",
- "Spack will think this package is installed. " +
- "Manually remove this directory to fix:",
- self.prefix,
- wrap=True)
- raise
- # note: PARENT of the build process adds the new package to
- # the database, so that we don't need to re-read from file.
- spack.installed_db.add(self.spec, self.prefix, explicit=explicit)
+ def _do_install_pop_kwargs(self, kwargs):
+ """Pops kwargs from do_install before starting the installation
+
+ Args:
+ kwargs:
+ 'stop_at': last installation phase to be executed (or None)
+
+ """
+ self.last_phase = kwargs.pop('stop_at', None)
+ if self.last_phase is not None and self.last_phase not in self.phases:
+ tty.die('\'{0}\' is not an allowed phase for package {1}'
+ .format(self.last_phase, self.name))
+
+ def log(self):
+ # Copy provenance into the install directory on success
+ log_install_path = spack.store.layout.build_log_path(
+ self.spec)
+ env_install_path = spack.store.layout.build_env_path(
+ self.spec)
+ packages_dir = spack.store.layout.build_packages_path(
+ self.spec)
+
+ # Remove first if we're overwriting another build
+ # (can happen with spack setup)
+ try:
+ # log_install_path and env_install_path are inside this
+ shutil.rmtree(packages_dir)
+ except Exception:
+ # FIXME : this potentially catches too many things...
+ pass
+
+ install(self.log_path, log_install_path)
+ install(self.env_path, env_install_path)
+ dump_packages(self.spec, packages_dir)
def sanity_check_prefix(self):
"""This function checks whether install succeeded."""
@@ -1021,20 +1358,16 @@ class Package(object):
check_paths(self.sanity_check_is_dir, 'directory', os.path.isdir)
installed = set(os.listdir(self.prefix))
- installed.difference_update(spack.install_layout.hidden_file_paths)
+ installed.difference_update(
+ spack.store.layout.hidden_file_paths)
if not installed:
raise InstallError(
"Install failed for %s. Nothing was installed!" % self.name)
- def do_install_dependencies(self, **kwargs):
- # Pass along paths of dependencies here
- for dep in self.spec.dependencies.values():
- dep.package.do_install(**kwargs)
-
@property
def build_log_path(self):
if self.installed:
- return spack.install_layout.build_log_path(self.spec)
+ return spack.store.layout.build_log_path(self.spec)
else:
return join_path(self.stage.source_path, 'spack-build.out')
@@ -1081,20 +1414,20 @@ class Package(object):
def setup_dependent_environment(self, spack_env, run_env, dependent_spec):
"""Set up the environment of packages that depend on this one.
- This is similar to `setup_environment`, but it is used to
+ This is similar to ``setup_environment``, but it is used to
modify the compile and runtime environments of packages that
*depend* on this one. This gives packages like Python and
others that follow the extension model a way to implement
common environment or compile-time settings for dependencies.
- By default, this delegates to self.setup_environment()
+ By default, this delegates to ``self.setup_environment()``
- Example :
+ Example:
1. Installing python modules generally requires
- `PYTHONPATH` to point to the lib/pythonX.Y/site-packages
- directory in the module's install prefix. This could
- set that variable.
+ `PYTHONPATH` to point to the lib/pythonX.Y/site-packages
+ directory in the module's install prefix. This could
+ set that variable.
Args:
@@ -1113,7 +1446,6 @@ class Package(object):
This is useful if there are some common steps to installing
all extensions for a certain package.
-
"""
self.setup_environment(spack_env, run_env)
@@ -1161,28 +1493,30 @@ class Package(object):
"""
pass
- def install(self, spec, prefix):
- """
- Package implementations override this with their own configuration
- """
- raise InstallError("Package %s provides no install method!" %
- self.name)
-
def do_uninstall(self, force=False):
if not self.installed:
- raise InstallError(str(self.spec) + " is not installed.")
+ # prefix may not exist, but DB may be inconsistent. Try to fix by
+ # removing, but omit hooks.
+ specs = spack.store.db.query(self.spec, installed=True)
+ if specs:
+ spack.store.db.remove(specs[0])
+ tty.msg("Removed stale DB entry for %s" % self.spec.short_spec)
+ return
+ else:
+ raise InstallError(str(self.spec) + " is not installed.")
if not force:
- dependents = self.installed_dependents
+ dependents = spack.store.db.installed_dependents(self.spec)
if dependents:
raise PackageStillNeededError(self.spec, dependents)
# Pre-uninstall hook runs first.
- spack.hooks.pre_uninstall(self)
-
- # Uninstalling in Spack only requires removing the prefix.
- self.remove_prefix()
- spack.installed_db.remove(self.spec)
+ with self._prefix_write_lock():
+ spack.hooks.pre_uninstall(self)
+ # Uninstalling in Spack only requires removing the prefix.
+ self.remove_prefix()
+ #
+ spack.store.db.remove(self.spec)
tty.msg("Successfully uninstalled %s" % self.spec.short_spec)
# Once everything else is done, run post install hooks
@@ -1209,26 +1543,26 @@ class Package(object):
(self.name, self.extendee.name))
def do_activate(self, force=False):
- """Called on an etension to invoke the extendee's activate method.
+ """Called on an extension to invoke the extendee's activate method.
Commands should call this routine, and should not call
activate() directly.
"""
self._sanity_check_extension()
- spack.install_layout.check_extension_conflict(self.extendee_spec,
- self.spec)
+ spack.store.layout.check_extension_conflict(
+ self.extendee_spec, self.spec)
# Activate any package dependencies that are also extensions.
if not force:
- for spec in self.spec.traverse(root=False):
+ for spec in self.spec.traverse(root=False, deptype='run'):
if spec.package.extends(self.extendee_spec):
if not spec.package.activated:
spec.package.do_activate(force=force)
self.extendee_spec.package.activate(self, **self.extendee_args)
- spack.install_layout.add_extension(self.extendee_spec, self.spec)
+ spack.store.layout.add_extension(self.extendee_spec, self.spec)
tty.msg("Activated extension %s for %s" %
(self.spec.short_spec, self.extendee_spec.format("$_$@$+$%@")))
@@ -1243,7 +1577,7 @@ class Package(object):
"""
def ignore(filename):
- return (filename in spack.install_layout.hidden_file_paths or
+ return (filename in spack.store.layout.hidden_file_paths or
kwargs.get('ignore', lambda f: False)(filename))
tree = LinkTree(extension.prefix)
@@ -1261,25 +1595,28 @@ class Package(object):
# Allow a force deactivate to happen. This can unlink
# spurious files if something was corrupted.
if not force:
- spack.install_layout.check_activated(self.extendee_spec, self.spec)
+ spack.store.layout.check_activated(
+ self.extendee_spec, self.spec)
- activated = spack.install_layout.extension_map(self.extendee_spec)
+ activated = spack.store.layout.extension_map(
+ self.extendee_spec)
for name, aspec in activated.items():
if aspec == self.spec:
continue
- for dep in aspec.traverse():
+ for dep in aspec.traverse(deptype='run'):
if self.spec == dep:
+ msg = ("Cannot deactivate %s because %s is activated "
+ "and depends on it.")
raise ActivationError(
- "Cannot deactivate %s because %s is activated and depends on it." # NOQA: ignore=E501
- % (self.spec.short_spec, aspec.short_spec))
+ msg % (self.spec.short_spec, aspec.short_spec))
self.extendee_spec.package.deactivate(self, **self.extendee_args)
# redundant activation check -- makes SURE the spec is not
# still activated even if something was wrong above.
if self.activated:
- spack.install_layout.remove_extension(self.extendee_spec,
- self.spec)
+ spack.store.layout.remove_extension(
+ self.extendee_spec, self.spec)
tty.msg("Deactivated extension %s for %s" %
(self.spec.short_spec, self.extendee_spec.format("$_$@$+$%@")))
@@ -1295,7 +1632,7 @@ class Package(object):
"""
def ignore(filename):
- return (filename in spack.install_layout.hidden_file_paths or
+ return (filename in spack.store.layout.hidden_file_paths or
kwargs.get('ignore', lambda f: False)(filename))
tree = LinkTree(extension.prefix)
@@ -1353,9 +1690,10 @@ class Package(object):
def rpath(self):
"""Get the rpath this package links with, as a list of paths."""
rpaths = [self.prefix.lib, self.prefix.lib64]
- rpaths.extend(d.prefix.lib for d in self.spec.traverse(root=False)
+ deps = self.spec.dependencies(deptype='link')
+ rpaths.extend(d.prefix.lib for d in deps
if os.path.isdir(d.prefix.lib))
- rpaths.extend(d.prefix.lib64 for d in self.spec.traverse(root=False)
+ rpaths.extend(d.prefix.lib64 for d in deps
if os.path.isdir(d.prefix.lib64))
return rpaths
@@ -1367,17 +1705,35 @@ class Package(object):
return " ".join("-Wl,-rpath,%s" % p for p in self.rpath)
+class Package(PackageBase):
+ phases = ['install']
+ # To be used in UI queries that require to know which
+ # build-system class we are using
+ build_system_class = 'Package'
+ # This will be used as a registration decorator in user
+ # packages, if need be
+ PackageBase.sanity_check('install')(PackageBase.sanity_check_prefix)
+
+
def install_dependency_symlinks(pkg, spec, prefix):
"""Execute a dummy install and flatten dependencies"""
flatten_dependencies(spec, prefix)
+def use_cray_compiler_names():
+ """Compiler names for builds that rely on cray compiler names."""
+ os.environ['CC'] = 'cc'
+ os.environ['CXX'] = 'CC'
+ os.environ['FC'] = 'ftn'
+ os.environ['F77'] = 'ftn'
+
+
def flatten_dependencies(spec, flat_dir):
"""Make each dependency of spec present in dir via symlink."""
for dep in spec.traverse(root=False):
name = dep.name
- dep_path = spack.install_layout.path_for_spec(dep)
+ dep_path = spack.store.layout.path_for_spec(dep)
dep_files = LinkTree(dep_path)
os.mkdir(flat_dir + '/' + name)
@@ -1389,16 +1745,6 @@ def flatten_dependencies(spec, flat_dir):
dep_files.merge(flat_dir + '/' + name)
-def validate_package_url(url_string):
- """Determine whether spack can handle a particular URL or not."""
- url = urlparse(url_string)
- if url.scheme not in _ALLOWED_URL_SCHEMES:
- tty.die("Invalid protocol in URL: '%s'" % url_string)
-
- if not allowed_archive(url_string):
- tty.die("Invalid file type in URL: '%s'" % url_string)
-
-
def dump_packages(spec, path):
"""Dump all package information for a spec and its dependencies.
@@ -1416,7 +1762,7 @@ def dump_packages(spec, path):
if node is not spec:
# Locate the dependency package in the install tree and find
# its provenance information.
- source = spack.install_layout.build_packages_path(node)
+ source = spack.store.layout.build_packages_path(node)
source_repo_root = join_path(source, node.namespace)
# There's no provenance installed for the source package. Skip it.
@@ -1536,12 +1882,14 @@ class ExtensionError(PackageError):
class ExtensionConflictError(ExtensionError):
+
def __init__(self, path):
super(ExtensionConflictError, self).__init__(
"Extension blocked by file: %s" % path)
class ActivationError(ExtensionError):
+
def __init__(self, msg, long_msg=None):
super(ActivationError, self).__init__(msg, long_msg)
diff --git a/lib/spack/spack/package_prefs.py b/lib/spack/spack/package_prefs.py
new file mode 100644
index 0000000000..190647bb81
--- /dev/null
+++ b/lib/spack/spack/package_prefs.py
@@ -0,0 +1,314 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+
+import spack
+import spack.error
+from spack.version import *
+
+
+def get_packages_config():
+ """Wrapper around get_packages_config() to validate semantics."""
+ config = spack.config.get_config('packages')
+
+ # Get a list of virtuals from packages.yaml. Note that because we
+ # check spack.repo, this collects virtuals that are actually provided
+ # by sometihng, not just packages/names that don't exist.
+ # So, this won't include, e.g., 'all'.
+ virtuals = [(pkg_name, pkg_name._start_mark) for pkg_name in config
+ if spack.repo.is_virtual(pkg_name)]
+
+ # die if there are virtuals in `packages.py`
+ if virtuals:
+ errors = ["%s: %s" % (line_info, name) for name, line_info in virtuals]
+ raise VirtualInPackagesYAMLError(
+ "packages.yaml entries cannot be virtual packages:", *errors)
+
+ return config
+
+
+class PreferredPackages(object):
+ def __init__(self):
+ self.preferred = get_packages_config()
+ self._spec_for_pkgname_cache = {}
+
+ # Given a package name, sort component (e.g, version, compiler, ...), and
+ # a second_key (used by providers), return the list
+ def _order_for_package(self, pkgname, component, second_key,
+ test_all=True):
+ pkglist = [pkgname]
+ if test_all:
+ pkglist.append('all')
+ for pkg in pkglist:
+ order = self.preferred.get(pkg, {}).get(component, {})
+ if isinstance(order, dict) and second_key:
+ order = order.get(second_key, {})
+ if not order:
+ continue
+ return [str(s).strip() for s in order]
+ return []
+
+ # A generic sorting function. Given a package name and sort
+ # component, return less-than-0, 0, or greater-than-0 if
+ # a is respectively less-than, equal to, or greater than b.
+ def _component_compare(self, pkgname, component, a, b,
+ reverse_natural_compare, second_key):
+ if a is None:
+ return -1
+ if b is None:
+ return 1
+ orderlist = self._order_for_package(pkgname, component, second_key)
+ a_in_list = str(a) in orderlist
+ b_in_list = str(b) in orderlist
+ if a_in_list and not b_in_list:
+ return -1
+ elif b_in_list and not a_in_list:
+ return 1
+
+ cmp_a = None
+ cmp_b = None
+ reverse = None
+ if not a_in_list and not b_in_list:
+ cmp_a = a
+ cmp_b = b
+ reverse = -1 if reverse_natural_compare else 1
+ else:
+ cmp_a = orderlist.index(str(a))
+ cmp_b = orderlist.index(str(b))
+ reverse = 1
+
+ if cmp_a < cmp_b:
+ return -1 * reverse
+ elif cmp_a > cmp_b:
+ return 1 * reverse
+ else:
+ return 0
+
+ # A sorting function for specs. Similar to component_compare, but
+ # a and b are considered to match entries in the sorting list if they
+ # satisfy the list component.
+ def _spec_compare(self, pkgname, component, a, b,
+ reverse_natural_compare, second_key):
+ if not a or (not a.concrete and not second_key):
+ return -1
+ if not b or (not b.concrete and not second_key):
+ return 1
+ specs = self._spec_for_pkgname(pkgname, component, second_key)
+ a_index = None
+ b_index = None
+ reverse = -1 if reverse_natural_compare else 1
+ for i, cspec in enumerate(specs):
+ if a_index is None and (cspec.satisfies(a) or a.satisfies(cspec)):
+ a_index = i
+ if b_index:
+ break
+ if b_index is None and (cspec.satisfies(b) or b.satisfies(cspec)):
+ b_index = i
+ if a_index:
+ break
+
+ if a_index is not None and b_index is None:
+ return -1
+ elif a_index is None and b_index is not None:
+ return 1
+ elif a_index is not None and b_index == a_index:
+ return -1 * cmp(a, b)
+ elif (a_index is not None and b_index is not None and
+ a_index != b_index):
+ return cmp(a_index, b_index)
+ else:
+ return cmp(a, b) * reverse
+
+ # Given a sort order specified by the pkgname/component/second_key, return
+ # a list of CompilerSpecs, VersionLists, or Specs for that sorting list.
+ def _spec_for_pkgname(self, pkgname, component, second_key):
+ key = (pkgname, component, second_key)
+ if key not in self._spec_for_pkgname_cache:
+ pkglist = self._order_for_package(pkgname, component, second_key)
+ if component == 'compiler':
+ self._spec_for_pkgname_cache[key] = \
+ [spack.spec.CompilerSpec(s) for s in pkglist]
+ elif component == 'version':
+ self._spec_for_pkgname_cache[key] = \
+ [VersionList(s) for s in pkglist]
+ else:
+ self._spec_for_pkgname_cache[key] = \
+ [spack.spec.Spec(s) for s in pkglist]
+ return self._spec_for_pkgname_cache[key]
+
+ def provider_compare(self, pkgname, provider_str, a, b):
+ """Return less-than-0, 0, or greater than 0 if a is respecively
+ less-than, equal-to, or greater-than b. A and b are possible
+ implementations of provider_str. One provider is less-than another
+ if it is preferred over the other. For example,
+ provider_compare('scorep', 'mpi', 'mvapich', 'openmpi') would
+ return -1 if mvapich should be preferred over openmpi for scorep."""
+ return self._spec_compare(pkgname, 'providers', a, b, False,
+ provider_str)
+
+ def spec_has_preferred_provider(self, pkgname, provider_str):
+ """Return True iff the named package has a list of preferred
+ providers"""
+ return bool(self._order_for_package(pkgname, 'providers',
+ provider_str, False))
+
+ def spec_preferred_variants(self, pkgname):
+ """Return a VariantMap of preferred variants and their values"""
+ for pkg in (pkgname, 'all'):
+ variants = self.preferred.get(pkg, {}).get('variants', '')
+ if variants:
+ break
+ if not isinstance(variants, basestring):
+ variants = " ".join(variants)
+ pkg = spack.repo.get(pkgname)
+ spec = spack.spec.Spec("%s %s" % (pkgname, variants))
+ # Only return variants that are actually supported by the package
+ return dict((name, variant) for name, variant in spec.variants.items()
+ if name in pkg.variants)
+
+ def version_compare(self, pkgname, a, b):
+ """Return less-than-0, 0, or greater than 0 if version a of pkgname is
+ respectively less-than, equal-to, or greater-than version b of
+ pkgname. One version is less-than another if it is preferred over
+ the other."""
+ return self._spec_compare(pkgname, 'version', a, b, True, None)
+
+ def variant_compare(self, pkgname, a, b):
+ """Return less-than-0, 0, or greater than 0 if variant a of pkgname is
+ respectively less-than, equal-to, or greater-than variant b of
+ pkgname. One variant is less-than another if it is preferred over
+ the other."""
+ return self._component_compare(pkgname, 'variant', a, b, False, None)
+
+ def architecture_compare(self, pkgname, a, b):
+ """Return less-than-0, 0, or greater than 0 if architecture a of pkgname
+ is respectively less-than, equal-to, or greater-than architecture b
+ of pkgname. One architecture is less-than another if it is preferred
+ over the other."""
+ return self._component_compare(pkgname, 'architecture', a, b,
+ False, None)
+
+ def compiler_compare(self, pkgname, a, b):
+ """Return less-than-0, 0, or greater than 0 if compiler a of pkgname is
+ respecively less-than, equal-to, or greater-than compiler b of
+ pkgname. One compiler is less-than another if it is preferred over
+ the other."""
+ return self._spec_compare(pkgname, 'compiler', a, b, False, None)
+
+
+def spec_externals(spec):
+ """Return a list of external specs (with external directory path filled in),
+ one for each known external installation."""
+ # break circular import.
+ from spack.build_environment import get_path_from_module
+
+ allpkgs = get_packages_config()
+ name = spec.name
+
+ external_specs = []
+ pkg_paths = allpkgs.get(name, {}).get('paths', None)
+ pkg_modules = allpkgs.get(name, {}).get('modules', None)
+ if (not pkg_paths) and (not pkg_modules):
+ return []
+
+ for external_spec, path in pkg_paths.iteritems():
+ if not path:
+ # skip entries without paths (avoid creating extra Specs)
+ continue
+
+ external_spec = spack.spec.Spec(external_spec, external=path)
+ if external_spec.satisfies(spec):
+ external_specs.append(external_spec)
+
+ for external_spec, module in pkg_modules.iteritems():
+ if not module:
+ continue
+
+ path = get_path_from_module(module)
+
+ external_spec = spack.spec.Spec(
+ external_spec, external=path, external_module=module)
+ if external_spec.satisfies(spec):
+ external_specs.append(external_spec)
+
+ return external_specs
+
+
+def is_spec_buildable(spec):
+ """Return true if the spec pkgspec is configured as buildable"""
+ allpkgs = get_packages_config()
+ if spec.name not in allpkgs:
+ return True
+ if 'buildable' not in allpkgs[spec.name]:
+ return True
+ return allpkgs[spec.name]['buildable']
+
+
+def cmp_specs(lhs, rhs):
+ # Package name sort order is not configurable, always goes alphabetical
+ if lhs.name != rhs.name:
+ return cmp(lhs.name, rhs.name)
+
+ # Package version is second in compare order
+ pkgname = lhs.name
+ if lhs.versions != rhs.versions:
+ return pkgsort().version_compare(
+ pkgname, lhs.versions, rhs.versions)
+
+ # Compiler is third
+ if lhs.compiler != rhs.compiler:
+ return pkgsort().compiler_compare(
+ pkgname, lhs.compiler, rhs.compiler)
+
+ # Variants
+ if lhs.variants != rhs.variants:
+ return pkgsort().variant_compare(
+ pkgname, lhs.variants, rhs.variants)
+
+ # Architecture
+ if lhs.architecture != rhs.architecture:
+ return pkgsort().architecture_compare(
+ pkgname, lhs.architecture, rhs.architecture)
+
+ # Dependency is not configurable
+ lhash, rhash = hash(lhs), hash(rhs)
+ if lhash != rhash:
+ return -1 if lhash < rhash else 1
+
+ # Equal specs
+ return 0
+
+
+_pkgsort = None
+
+
+def pkgsort():
+ global _pkgsort
+ if _pkgsort is None:
+ _pkgsort = PreferredPackages()
+ return _pkgsort
+
+
+class VirtualInPackagesYAMLError(spack.error.SpackError):
+ """Raised when a disallowed virtual is found in packages.yaml"""
diff --git a/lib/spack/spack/parse.py b/lib/spack/spack/parse.py
index 8adf957e7f..e116175823 100644
--- a/lib/spack/spack/parse.py
+++ b/lib/spack/spack/parse.py
@@ -23,12 +23,14 @@
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
import re
+import shlex
import itertools
import spack.error
class Token:
"""Represents tokens; generated from input by lexer and fed to parse()."""
+
def __init__(self, type, value='', start=0, end=0):
self.type = type
self.value = value
@@ -51,25 +53,65 @@ class Token:
class Lexer(object):
"""Base class for Lexers that keep track of line numbers."""
- def __init__(self, lexicon):
- self.scanner = re.Scanner(lexicon)
+
+ def __init__(self, lexicon0, mode_switches_01=[],
+ lexicon1=[], mode_switches_10=[]):
+ self.scanner0 = re.Scanner(lexicon0)
+ self.mode_switches_01 = mode_switches_01
+ self.scanner1 = re.Scanner(lexicon1)
+ self.mode_switches_10 = mode_switches_10
+ self.mode = 0
def token(self, type, value=''):
- return Token(type, value, self.scanner.match.start(0), self.scanner.match.end(0))
+ if self.mode == 0:
+ return Token(type, value,
+ self.scanner0.match.start(0),
+ self.scanner0.match.end(0))
+ else:
+ return Token(type, value,
+ self.scanner1.match.start(0),
+ self.scanner1.match.end(0))
+
+ def lex_word(self, word):
+ scanner = self.scanner0
+ mode_switches = self.mode_switches_01
+ if self.mode == 1:
+ scanner = self.scanner1
+ mode_switches = self.mode_switches_10
+
+ tokens, remainder = scanner.scan(word)
+ remainder_used = 0
+
+ for i, t in enumerate(tokens):
+ if t.type in mode_switches:
+ # Combine post-switch tokens with remainder and
+ # scan in other mode
+ self.mode = 1 - self.mode # swap 0/1
+ remainder_used = 1
+ tokens = tokens[:i + 1] + self.lex_word(
+ word[word.index(t.value) + len(t.value):])
+ break
+
+ if remainder and not remainder_used:
+ raise LexError("Invalid character", word, word.index(remainder))
- def lex(self, text):
- tokens, remainder = self.scanner.scan(text)
- if remainder:
- raise LexError("Invalid character", text, text.index(remainder))
return tokens
+ def lex(self, text):
+ lexed = []
+ for word in text:
+ tokens = self.lex_word(word)
+ lexed.extend(tokens)
+ return lexed
+
class Parser(object):
"""Base class for simple recursive descent parsers."""
+
def __init__(self, lexer):
- self.tokens = iter([]) # iterators over tokens, handled in order. Starts empty.
- self.token = Token(None) # last accepted token starts at beginning of file
- self.next = None # next token
+ self.tokens = iter([]) # iterators over tokens, handled in order.
+ self.token = Token(None) # last accepted token
+ self.next = None # next token
self.lexer = lexer
self.text = None
@@ -82,11 +124,12 @@ class Parser(object):
def push_tokens(self, iterable):
"""Adds all tokens in some iterable to the token stream."""
- self.tokens = itertools.chain(iter(iterable), iter([self.next]), self.tokens)
+ self.tokens = itertools.chain(
+ iter(iterable), iter([self.next]), self.tokens)
self.gettok()
def accept(self, id):
- """Puts the next symbol in self.token if we like it. Then calls gettok()"""
+ """Put the next symbol in self.token if accepted, then call gettok()"""
if self.next and self.next.is_a(id):
self.token = self.next
self.gettok()
@@ -116,6 +159,8 @@ class Parser(object):
sys.exit(1)
def setup(self, text):
+ if isinstance(text, basestring):
+ text = shlex.split(text)
self.text = text
self.push_tokens(self.lexer.lex(text))
@@ -124,9 +169,9 @@ class Parser(object):
return self.do_parse()
-
class ParseError(spack.error.SpackError):
"""Raised when we don't hit an error while parsing."""
+
def __init__(self, message, string, pos):
super(ParseError, self).__init__(message)
self.string = string
@@ -135,5 +180,6 @@ class ParseError(spack.error.SpackError):
class LexError(ParseError):
"""Raised when we don't know how to lex something."""
+
def __init__(self, message, string, pos):
super(LexError, self).__init__(message, string, pos)
diff --git a/lib/spack/spack/patch.py b/lib/spack/spack/patch.py
index c2e181be2f..ee83748319 100644
--- a/lib/spack/spack/patch.py
+++ b/lib/spack/spack/patch.py
@@ -24,69 +24,111 @@
##############################################################################
import os
-import llnl.util.tty as tty
-from llnl.util.filesystem import join_path
-
import spack
-import spack.stage
import spack.error
+import spack.stage
+import spack.fetch_strategy as fs
+from llnl.util.filesystem import join_path
from spack.util.executable import which
-# Patch tool for patching archives.
-_patch = which("patch", required=True)
-
class Patch(object):
- """This class describes a patch to be applied to some expanded
- source code."""
+ """Base class to describe a patch that needs to be applied to some
+ expanded source code.
+ """
+
+ @staticmethod
+ def create(pkg, path_or_url, level, **kwargs):
+ """
+ Factory method that creates an instance of some class derived from
+ Patch
+
+ Args:
+ pkg: package that needs to be patched
+ path_or_url: path or url where the patch is found
+ level: patch level
+
+ Returns:
+ instance of some Patch class
+ """
+ # Check if we are dealing with a URL
+ if '://' in path_or_url:
+ return UrlPatch(pkg, path_or_url, level, **kwargs)
+ # Assume patches are stored in the repository
+ return FilePatch(pkg, path_or_url, level)
def __init__(self, pkg, path_or_url, level):
- self.pkg_name = pkg.name
+ # Check on level (must be an integer > 0)
+ if not isinstance(level, int) or not level >= 0:
+ raise ValueError("Patch level needs to be a non-negative integer.")
+ # Attributes shared by all patch subclasses
self.path_or_url = path_or_url
- self.path = None
- self.url = None
self.level = level
+ # self.path needs to be computed by derived classes
+ # before a call to apply
+ self.path = None
if not isinstance(self.level, int) or not self.level >= 0:
raise ValueError("Patch level needs to be a non-negative integer.")
- if '://' in path_or_url:
- self.url = path_or_url
- else:
- pkg_dir = spack.repo.dirname_for_package_name(self.pkg_name)
- self.path = join_path(pkg_dir, path_or_url)
- if not os.path.isfile(self.path):
- raise NoSuchPatchFileError(pkg_name, self.path)
-
-
def apply(self, stage):
- """Fetch this patch, if necessary, and apply it to the source
- code in the supplied stage.
+ """Apply the patch at self.path to the source code in the
+ supplied stage
+
+ Args:
+ stage: stage for the package that needs to be patched
"""
stage.chdir_to_source()
+ # Use -N to allow the same patches to be applied multiple times.
+ _patch = which("patch", required=True)
+ _patch('-s', '-p', str(self.level), '-i', self.path)
- patch_stage = None
- try:
- if self.url:
- # use an anonymous stage to fetch the patch if it is a URL
- patch_stage = spack.stage.Stage(self.url)
- patch_stage.fetch()
- patch_file = patch_stage.archive_file
- else:
- patch_file = self.path
- # Use -N to allow the same patches to be applied multiple times.
- _patch('-s', '-p', str(self.level), '-i', patch_file)
+class FilePatch(Patch):
+ """Describes a patch that is retrieved from a file in the repository"""
+ def __init__(self, pkg, path_or_url, level):
+ super(FilePatch, self).__init__(pkg, path_or_url, level)
- finally:
- if patch_stage:
- patch_stage.destroy()
+ pkg_dir = spack.repo.dirname_for_package_name(pkg.name)
+ self.path = join_path(pkg_dir, path_or_url)
+ if not os.path.isfile(self.path):
+ raise NoSuchPatchFileError(pkg.name, self.path)
+class UrlPatch(Patch):
+ """Describes a patch that is retrieved from a URL"""
+ def __init__(self, pkg, path_or_url, level, **kwargs):
+ super(UrlPatch, self).__init__(pkg, path_or_url, level)
+ self.url = path_or_url
+ self.md5 = kwargs.get('md5')
+
+ def apply(self, stage):
+ """Retrieve the patch in a temporary stage, computes
+ self.path and calls `super().apply(stage)`
+
+ Args:
+ stage: stage for the package that needs to be patched
+ """
+ fetcher = fs.URLFetchStrategy(self.url, digest=self.md5)
+ mirror = join_path(
+ os.path.dirname(stage.mirror_path),
+ os.path.basename(self.url)
+ )
+ with spack.stage.Stage(fetcher, mirror_path=mirror) as patch_stage:
+ patch_stage.fetch()
+ patch_stage.check()
+ patch_stage.cache_local()
+ patch_stage.expand_archive()
+ self.path = os.path.abspath(
+ os.listdir(patch_stage.path).pop()
+ )
+ super(UrlPatch, self).apply(stage)
+
class NoSuchPatchFileError(spack.error.SpackError):
"""Raised when user specifies a patch file that doesn't exist."""
+
def __init__(self, package, path):
super(NoSuchPatchFileError, self).__init__(
"No such patch file for package %s: %s" % (package, path))
diff --git a/lib/spack/spack/platforms/__init__.py b/lib/spack/spack/platforms/__init__.py
new file mode 100644
index 0000000000..ed1ec23bca
--- /dev/null
+++ b/lib/spack/spack/platforms/__init__.py
@@ -0,0 +1,24 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
diff --git a/lib/spack/spack/platforms/bgq.py b/lib/spack/spack/platforms/bgq.py
new file mode 100644
index 0000000000..8ff33dd418
--- /dev/null
+++ b/lib/spack/spack/platforms/bgq.py
@@ -0,0 +1,57 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+import os
+from spack.architecture import Platform, Target
+from spack.operating_systems.linux_distro import LinuxDistro
+from spack.operating_systems.cnk import Cnk
+
+
+class Bgq(Platform):
+ priority = 30
+ front_end = 'power7'
+ back_end = 'ppc64'
+ default = 'ppc64'
+
+ def __init__(self):
+ ''' IBM Blue Gene/Q system platform.'''
+
+ super(Bgq, self).__init__('bgq')
+
+ self.add_target(self.front_end, Target(self.front_end))
+ self.add_target(self.back_end, Target(self.back_end))
+
+ front_distro = LinuxDistro()
+ back_distro = Cnk()
+
+ self.front_os = str(front_distro)
+ self.back_os = str(back_distro)
+ self.default_os = self.back_os
+
+ self.add_operating_system(str(front_distro), front_distro)
+ self.add_operating_system(str(back_distro), back_distro)
+
+ @classmethod
+ def detect(self):
+ return os.path.exists('/bgsys')
diff --git a/lib/spack/spack/platforms/cray.py b/lib/spack/spack/platforms/cray.py
new file mode 100644
index 0000000000..1cd08e5565
--- /dev/null
+++ b/lib/spack/spack/platforms/cray.py
@@ -0,0 +1,151 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+import os
+import re
+import llnl.util.tty as tty
+from spack import build_env_path
+from spack.util.executable import which
+from spack.architecture import Platform, Target, NoPlatformError
+from spack.operating_systems.linux_distro import LinuxDistro
+from spack.operating_systems.cnl import Cnl
+from llnl.util.filesystem import join_path
+
+
+def _get_modules_in_modulecmd_output(output):
+ '''Return list of valid modules parsed from modulecmd output string.'''
+ return [i for i in output.splitlines()
+ if len(i.split()) == 1]
+
+
+def _fill_craype_targets_from_modules(targets, modules):
+ '''Extend CrayPE CPU targets list with those found in list of modules.'''
+ # Craype- module prefixes that are not valid CPU targets.
+ non_targets = ('hugepages', 'network', 'target', 'accel', 'xtpe')
+ pattern = r'craype-(?!{0})(\S*)'.format('|'.join(non_targets))
+ for mod in modules:
+ if 'craype-' in mod:
+ targets.extend(re.findall(pattern, mod))
+
+
+class Cray(Platform):
+ priority = 10
+
+ def __init__(self):
+ ''' Create a Cray system platform.
+
+ Target names should use craype target names but not include the
+ 'craype-' prefix. Uses first viable target from:
+ self
+ envars [SPACK_FRONT_END, SPACK_BACK_END]
+ configuration file "targets.yaml" with keys 'front_end', 'back_end'
+ scanning /etc/bash/bashrc.local for back_end only
+ '''
+ super(Cray, self).__init__('cray')
+
+ # Make all craype targets available.
+ for target in self._avail_targets():
+ name = target.replace('-', '_')
+ self.add_target(name, Target(name, 'craype-%s' % target))
+
+ # Get aliased targets from config or best guess from environment:
+ for name in ('front_end', 'back_end'):
+ _target = getattr(self, name, None)
+ if _target is None:
+ _target = os.environ.get('SPACK_' + name.upper())
+ if _target is None and name == 'back_end':
+ _target = self._default_target_from_env()
+ if _target is not None:
+ safe_name = _target.replace('-', '_')
+ setattr(self, name, safe_name)
+ self.add_target(name, self.targets[safe_name])
+
+ if self.back_end is not None:
+ self.default = self.back_end
+ self.add_target('default', self.targets[self.back_end])
+ else:
+ raise NoPlatformError()
+
+ front_distro = LinuxDistro()
+ back_distro = Cnl()
+
+ self.default_os = str(back_distro)
+ self.back_os = self.default_os
+ self.front_os = str(front_distro)
+
+ self.add_operating_system(self.back_os, back_distro)
+ self.add_operating_system(self.front_os, front_distro)
+
+ @classmethod
+ def setup_platform_environment(cls, pkg, env):
+ """ Change the linker to default dynamic to be more
+ similar to linux/standard linker behavior
+ """
+ env.set('CRAYPE_LINK_TYPE', 'dynamic')
+ cray_wrapper_names = join_path(build_env_path, 'cray')
+ if os.path.isdir(cray_wrapper_names):
+ env.prepend_path('PATH', cray_wrapper_names)
+ env.prepend_path('SPACK_ENV_PATH', cray_wrapper_names)
+
+ @classmethod
+ def detect(cls):
+ return os.environ.get('CRAYPE_VERSION') is not None
+
+ def _default_target_from_env(self):
+ '''Set and return the default CrayPE target loaded in a clean login
+ session.
+
+ A bash subshell is launched with a wiped environment and the list of
+ loaded modules is parsed for the first acceptable CrayPE target.
+ '''
+ # Based on the incantation:
+ # echo "$(env - USER=$USER /bin/bash -l -c 'module list -lt')"
+ if getattr(self, 'default', None) is None:
+ env = which('env')
+ env.add_default_arg('-')
+ # CAUTION - $USER is generally needed in the sub-environment.
+ # There may be other variables needed for general success.
+ output = env('USER=%s' % os.environ['USER'],
+ 'HOME=%s' % os.environ['HOME'],
+ '/bin/bash', '--noprofile', '--norc', '-c',
+ '. /etc/profile; module list -lt',
+ output=str, error=str)
+ self._defmods = _get_modules_in_modulecmd_output(output)
+ targets = []
+ _fill_craype_targets_from_modules(targets, self._defmods)
+ self.default = targets[0] if targets else None
+ tty.debug("Found default modules:",
+ *[" %s" % mod for mod in self._defmods])
+ return self.default
+
+ def _avail_targets(self):
+ '''Return a list of available CrayPE CPU targets.'''
+ if getattr(self, '_craype_targets', None) is None:
+ module = which('modulecmd', required=True)
+ module.add_default_arg('python')
+ output = module('avail', '-t', 'craype-', output=str, error=str)
+ craype_modules = _get_modules_in_modulecmd_output(output)
+ self._craype_targets = targets = []
+ _fill_craype_targets_from_modules(targets, craype_modules)
+ return self._craype_targets
diff --git a/lib/spack/spack/platforms/darwin.py b/lib/spack/spack/platforms/darwin.py
new file mode 100644
index 0000000000..3f6dc77655
--- /dev/null
+++ b/lib/spack/spack/platforms/darwin.py
@@ -0,0 +1,49 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+import platform
+from spack.architecture import Platform, Target
+from spack.operating_systems.mac_os import MacOs
+
+
+class Darwin(Platform):
+ priority = 89
+ front_end = 'x86_64'
+ back_end = 'x86_64'
+ default = 'x86_64'
+
+ def __init__(self):
+ super(Darwin, self).__init__('darwin')
+ self.add_target(self.default, Target(self.default))
+ mac_os = MacOs()
+
+ self.default_os = str(mac_os)
+ self.front_os = str(mac_os)
+ self.back_os = str(mac_os)
+
+ self.add_operating_system(str(mac_os), mac_os)
+
+ @classmethod
+ def detect(self):
+ return 'darwin' in platform.system().lower()
diff --git a/lib/spack/spack/platforms/linux.py b/lib/spack/spack/platforms/linux.py
new file mode 100644
index 0000000000..d7cdd643c0
--- /dev/null
+++ b/lib/spack/spack/platforms/linux.py
@@ -0,0 +1,53 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+import platform
+from spack.architecture import Platform, Target
+from spack.operating_systems.linux_distro import LinuxDistro
+
+
+class Linux(Platform):
+ priority = 90
+
+ def __init__(self):
+ super(Linux, self).__init__('linux')
+ self.add_target('x86_64', Target('x86_64'))
+ self.add_target('ppc64le', Target('ppc64le'))
+
+ self.default = platform.machine()
+ self.front_end = platform.machine()
+ self.back_end = platform.machine()
+
+ if self.default not in self.targets:
+ self.add_target(self.default, Target(self.default))
+
+ linux_dist = LinuxDistro()
+ self.default_os = str(linux_dist)
+ self.front_os = self.default_os
+ self.back_os = self.default_os
+ self.add_operating_system(str(linux_dist), linux_dist)
+
+ @classmethod
+ def detect(self):
+ return 'linux' in platform.system().lower()
diff --git a/lib/spack/spack/platforms/test.py b/lib/spack/spack/platforms/test.py
new file mode 100644
index 0000000000..a40e1f3b44
--- /dev/null
+++ b/lib/spack/spack/platforms/test.py
@@ -0,0 +1,49 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack.architecture import Platform, Target
+from spack.architecture import OperatingSystem as OS
+
+
+class Test(Platform):
+ priority = 1000000
+ front_end = 'x86_32'
+ back_end = 'x86_64'
+ default = 'x86_64'
+
+ front_os = 'redhat6'
+ back_os = 'debian6'
+ default_os = 'debian6'
+
+ def __init__(self):
+ super(Test, self).__init__('test')
+ self.add_target(self.default, Target(self.default))
+ self.add_target(self.front_end, Target(self.front_end))
+
+ self.add_operating_system(self.default_os, OS('debian', 6))
+ self.add_operating_system(self.front_os, OS('redhat', 6))
+
+ @classmethod
+ def detect(self):
+ return True
diff --git a/lib/spack/spack/preferred_packages.py b/lib/spack/spack/preferred_packages.py
deleted file mode 100644
index 4820584150..0000000000
--- a/lib/spack/spack/preferred_packages.py
+++ /dev/null
@@ -1,175 +0,0 @@
-##############################################################################
-# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
-# Produced at the Lawrence Livermore National Laboratory.
-#
-# This file is part of Spack.
-# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
-# LLNL-CODE-647188
-#
-# For details, see https://github.com/llnl/spack
-# Please also see the LICENSE file for our notice and the LGPL.
-#
-# This program is free software; you can redistribute it and/or modify
-# it under the terms of the GNU Lesser General Public License (as
-# published by the Free Software Foundation) version 2.1, February 1999.
-#
-# This program is distributed in the hope that it will be useful, but
-# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
-# conditions of the GNU Lesser General Public License for more details.
-#
-# You should have received a copy of the GNU Lesser General Public
-# License along with this program; if not, write to the Free Software
-# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
-##############################################################################
-
-import spack
-from spack.version import *
-
-class PreferredPackages(object):
- _default_order = {'compiler' : [ 'gcc', 'intel', 'clang', 'pgi', 'xlc' ] } # Arbitrary, but consistent
-
- def __init__(self):
- self.preferred = spack.config.get_config('packages')
- self._spec_for_pkgname_cache = {}
-
- # Given a package name, sort component (e.g, version, compiler, ...), and
- # a second_key (used by providers), return the list
- def _order_for_package(self, pkgname, component, second_key, test_all=True):
- pkglist = [pkgname]
- if test_all:
- pkglist.append('all')
- for pkg in pkglist:
- order = self.preferred.get(pkg, {}).get(component, {})
- if type(order) is dict:
- order = order.get(second_key, {})
- if not order:
- continue
- return [str(s).strip() for s in order]
- return []
-
-
- # A generic sorting function. Given a package name and sort
- # component, return less-than-0, 0, or greater-than-0 if
- # a is respectively less-than, equal to, or greater than b.
- def _component_compare(self, pkgname, component, a, b, reverse_natural_compare, second_key):
- if a is None:
- return -1
- if b is None:
- return 1
- orderlist = self._order_for_package(pkgname, component, second_key)
- a_in_list = str(a) in orderlist
- b_in_list = str(b) in orderlist
- if a_in_list and not b_in_list:
- return -1
- elif b_in_list and not a_in_list:
- return 1
-
- cmp_a = None
- cmp_b = None
- reverse = None
- if not a_in_list and not b_in_list:
- cmp_a = a
- cmp_b = b
- reverse = -1 if reverse_natural_compare else 1
- else:
- cmp_a = orderlist.index(str(a))
- cmp_b = orderlist.index(str(b))
- reverse = 1
-
- if cmp_a < cmp_b:
- return -1 * reverse
- elif cmp_a > cmp_b:
- return 1 * reverse
- else:
- return 0
-
-
- # A sorting function for specs. Similar to component_compare, but
- # a and b are considered to match entries in the sorting list if they
- # satisfy the list component.
- def _spec_compare(self, pkgname, component, a, b, reverse_natural_compare, second_key):
- if not a or not a.concrete:
- return -1
- if not b or not b.concrete:
- return 1
- specs = self._spec_for_pkgname(pkgname, component, second_key)
- a_index = None
- b_index = None
- reverse = -1 if reverse_natural_compare else 1
- for i, cspec in enumerate(specs):
- if a_index == None and (cspec.satisfies(a) or a.satisfies(cspec)):
- a_index = i
- if b_index:
- break
- if b_index == None and (cspec.satisfies(b) or b.satisfies(cspec)):
- b_index = i
- if a_index:
- break
-
- if a_index != None and b_index == None: return -1
- elif a_index == None and b_index != None: return 1
- elif a_index != None and b_index == a_index: return -1 * cmp(a, b)
- elif a_index != None and b_index != None and a_index != b_index: return cmp(a_index, b_index)
- else: return cmp(a, b) * reverse
-
-
-
- # Given a sort order specified by the pkgname/component/second_key, return
- # a list of CompilerSpecs, VersionLists, or Specs for that sorting list.
- def _spec_for_pkgname(self, pkgname, component, second_key):
- key = (pkgname, component, second_key)
- if not key in self._spec_for_pkgname_cache:
- pkglist = self._order_for_package(pkgname, component, second_key)
- if not pkglist:
- if component in self._default_order:
- pkglist = self._default_order[component]
- if component == 'compiler':
- self._spec_for_pkgname_cache[key] = [spack.spec.CompilerSpec(s) for s in pkglist]
- elif component == 'version':
- self._spec_for_pkgname_cache[key] = [VersionList(s) for s in pkglist]
- else:
- self._spec_for_pkgname_cache[key] = [spack.spec.Spec(s) for s in pkglist]
- return self._spec_for_pkgname_cache[key]
-
-
- def provider_compare(self, pkgname, provider_str, a, b):
- """Return less-than-0, 0, or greater than 0 if a is respecively less-than, equal-to, or
- greater-than b. A and b are possible implementations of provider_str.
- One provider is less-than another if it is preferred over the other.
- For example, provider_compare('scorep', 'mpi', 'mvapich', 'openmpi') would return -1 if
- mvapich should be preferred over openmpi for scorep."""
- return self._spec_compare(pkgname, 'providers', a, b, False, provider_str)
-
-
- def spec_has_preferred_provider(self, pkgname, provider_str):
- """Return True iff the named package has a list of preferred provider"""
- return bool(self._order_for_package(pkgname, 'providers', provider_str, False))
-
-
- def version_compare(self, pkgname, a, b):
- """Return less-than-0, 0, or greater than 0 if version a of pkgname is
- respecively less-than, equal-to, or greater-than version b of pkgname.
- One version is less-than another if it is preferred over the other."""
- return self._spec_compare(pkgname, 'version', a, b, True, None)
-
-
- def variant_compare(self, pkgname, a, b):
- """Return less-than-0, 0, or greater than 0 if variant a of pkgname is
- respecively less-than, equal-to, or greater-than variant b of pkgname.
- One variant is less-than another if it is preferred over the other."""
- return self._component_compare(pkgname, 'variant', a, b, False, None)
-
-
- def architecture_compare(self, pkgname, a, b):
- """Return less-than-0, 0, or greater than 0 if architecture a of pkgname is
- respecively less-than, equal-to, or greater-than architecture b of pkgname.
- One architecture is less-than another if it is preferred over the other."""
- return self._component_compare(pkgname, 'architecture', a, b, False, None)
-
-
- def compiler_compare(self, pkgname, a, b):
- """Return less-than-0, 0, or greater than 0 if compiler a of pkgname is
- respecively less-than, equal-to, or greater-than compiler b of pkgname.
- One compiler is less-than another if it is preferred over the other."""
- return self._spec_compare(pkgname, 'compiler', a, b, False, None)
diff --git a/lib/spack/spack/provider_index.py b/lib/spack/spack/provider_index.py
new file mode 100644
index 0000000000..0e771c6255
--- /dev/null
+++ b/lib/spack/spack/provider_index.py
@@ -0,0 +1,302 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+"""
+The ``virtual`` module contains utility classes for virtual dependencies.
+"""
+from itertools import product as iproduct
+from pprint import pformat
+
+import spack.util.spack_yaml as syaml
+from yaml.error import MarkedYAMLError
+
+import spack
+import spack.error
+
+
+class ProviderIndex(object):
+ """This is a dict of dicts used for finding providers of particular
+ virtual dependencies. The dict of dicts looks like:
+
+ { vpkg name :
+ { full vpkg spec : set(packages providing spec) } }
+
+ Callers can use this to first find which packages provide a vpkg,
+ then find a matching full spec. e.g., in this scenario:
+
+ { 'mpi' :
+ { mpi@:1.1 : set([mpich]),
+ mpi@:2.3 : set([mpich2@1.9:]) } }
+
+ Calling providers_for(spec) will find specs that provide a
+ matching implementation of MPI.
+
+ """
+
+ def __init__(self, specs=None, restrict=False):
+ """Create a new ProviderIndex.
+
+ Optional arguments:
+
+ specs
+ List (or sequence) of specs. If provided, will call
+ `update` on this ProviderIndex with each spec in the list.
+
+ restrict
+ "restricts" values to the verbatim input specs; do not
+ pre-apply package's constraints.
+
+ TODO: rename this. It is intended to keep things as broad
+ as possible without overly restricting results, so it is
+ not the best name.
+ """
+ if specs is None:
+ specs = []
+
+ self.restrict = restrict
+ self.providers = {}
+
+ for spec in specs:
+ if not isinstance(spec, spack.spec.Spec):
+ spec = spack.spec.Spec(spec)
+
+ if spec.virtual:
+ continue
+
+ self.update(spec)
+
+ def update(self, spec):
+ if not isinstance(spec, spack.spec.Spec):
+ spec = spack.spec.Spec(spec)
+
+ if not spec.name:
+ # Empty specs do not have a package
+ return
+
+ assert(not spec.virtual)
+
+ pkg = spec.package
+ for provided_spec, provider_specs in pkg.provided.iteritems():
+ for provider_spec in provider_specs:
+ # TODO: fix this comment.
+ # We want satisfaction other than flags
+ provider_spec.compiler_flags = spec.compiler_flags.copy()
+
+ if spec.satisfies(provider_spec, deps=False):
+ provided_name = provided_spec.name
+
+ provider_map = self.providers.setdefault(provided_name, {})
+ if provided_spec not in provider_map:
+ provider_map[provided_spec] = set()
+
+ if self.restrict:
+ provider_set = provider_map[provided_spec]
+
+ # If this package existed in the index before,
+ # need to take the old versions out, as they're
+ # now more constrained.
+ old = set(
+ [s for s in provider_set if s.name == spec.name])
+ provider_set.difference_update(old)
+
+ # Now add the new version.
+ provider_set.add(spec)
+
+ else:
+ # Before putting the spec in the map, constrain
+ # it so that it provides what was asked for.
+ constrained = spec.copy()
+ constrained.constrain(provider_spec)
+ provider_map[provided_spec].add(constrained)
+
+ def providers_for(self, *vpkg_specs):
+ """Gives specs of all packages that provide virtual packages
+ with the supplied specs."""
+ providers = set()
+ for vspec in vpkg_specs:
+ # Allow string names to be passed as input, as well as specs
+ if type(vspec) == str:
+ vspec = spack.spec.Spec(vspec)
+
+ # Add all the providers that satisfy the vpkg spec.
+ if vspec.name in self.providers:
+ for p_spec, spec_set in self.providers[vspec.name].items():
+ if p_spec.satisfies(vspec, deps=False):
+ providers.update(spec_set)
+
+ # Return providers in order
+ return sorted(providers)
+
+ # TODO: this is pretty darned nasty, and inefficient, but there
+ # are not that many vdeps in most specs.
+ def _cross_provider_maps(self, lmap, rmap):
+ result = {}
+ for lspec, rspec in iproduct(lmap, rmap):
+ try:
+ constrained = lspec.constrained(rspec)
+ except spack.spec.UnsatisfiableSpecError:
+ continue
+
+ # lp and rp are left and right provider specs.
+ for lp_spec, rp_spec in iproduct(lmap[lspec], rmap[rspec]):
+ if lp_spec.name == rp_spec.name:
+ try:
+ const = lp_spec.constrained(rp_spec, deps=False)
+ result.setdefault(constrained, set()).add(const)
+ except spack.spec.UnsatisfiableSpecError:
+ continue
+ return result
+
+ def __contains__(self, name):
+ """Whether a particular vpkg name is in the index."""
+ return name in self.providers
+
+ def satisfies(self, other):
+ """Check that providers of virtual specs are compatible."""
+ common = set(self.providers) & set(other.providers)
+ if not common:
+ return True
+
+ # This ensures that some provider in other COULD satisfy the
+ # vpkg constraints on self.
+ result = {}
+ for name in common:
+ crossed = self._cross_provider_maps(self.providers[name],
+ other.providers[name])
+ if crossed:
+ result[name] = crossed
+
+ return all(c in result for c in common)
+
+ def to_yaml(self, stream=None):
+ provider_list = self._transform(
+ lambda vpkg, pset: [
+ vpkg.to_node_dict(), [p.to_node_dict() for p in pset]], list)
+
+ syaml.dump({'provider_index': {'providers': provider_list}},
+ stream=stream)
+
+ @staticmethod
+ def from_yaml(stream):
+ try:
+ yfile = syaml.load(stream)
+ except MarkedYAMLError, e:
+ raise spack.spec.SpackYAMLError(
+ "error parsing YAML ProviderIndex cache:", str(e))
+
+ if not isinstance(yfile, dict):
+ raise ProviderIndexError("YAML ProviderIndex was not a dict.")
+
+ if 'provider_index' not in yfile:
+ raise ProviderIndexError(
+ "YAML ProviderIndex does not start with 'provider_index'")
+
+ index = ProviderIndex()
+ providers = yfile['provider_index']['providers']
+ index.providers = _transform(
+ providers,
+ lambda vpkg, plist: (
+ spack.spec.Spec.from_node_dict(vpkg),
+ set(spack.spec.Spec.from_node_dict(p) for p in plist)))
+ return index
+
+ def merge(self, other):
+ """Merge `other` ProviderIndex into this one."""
+ other = other.copy() # defensive copy.
+
+ for pkg in other.providers:
+ if pkg not in self.providers:
+ self.providers[pkg] = other.providers[pkg]
+ continue
+
+ spdict, opdict = self.providers[pkg], other.providers[pkg]
+ for provided_spec in opdict:
+ if provided_spec not in spdict:
+ spdict[provided_spec] = opdict[provided_spec]
+ continue
+
+ spdict[provided_spec] = \
+ spdict[provided_spec].union(opdict[provided_spec])
+
+ def remove_provider(self, pkg_name):
+ """Remove a provider from the ProviderIndex."""
+ empty_pkg_dict = []
+ for pkg, pkg_dict in self.providers.items():
+ empty_pset = []
+ for provided, pset in pkg_dict.items():
+ same_name = set(p for p in pset if p.fullname == pkg_name)
+ pset.difference_update(same_name)
+
+ if not pset:
+ empty_pset.append(provided)
+
+ for provided in empty_pset:
+ del pkg_dict[provided]
+
+ if not pkg_dict:
+ empty_pkg_dict.append(pkg)
+
+ for pkg in empty_pkg_dict:
+ del self.providers[pkg]
+
+ def copy(self):
+ """Deep copy of this ProviderIndex."""
+ clone = ProviderIndex()
+ clone.providers = self._transform(
+ lambda vpkg, pset: (vpkg, set((p.copy() for p in pset))))
+ return clone
+
+ def __eq__(self, other):
+ return self.providers == other.providers
+
+ def _transform(self, transform_fun, out_mapping_type=dict):
+ return _transform(self.providers, transform_fun, out_mapping_type)
+
+ def __str__(self):
+ return pformat(
+ _transform(self.providers,
+ lambda k, v: (k, list(v))))
+
+
+def _transform(providers, transform_fun, out_mapping_type=dict):
+ """Syntactic sugar for transforming a providers dict.
+
+ transform_fun takes a (vpkg, pset) mapping and runs it on each
+ pair in nested dicts.
+
+ """
+ def mapiter(mappings):
+ if isinstance(mappings, dict):
+ return mappings.iteritems()
+ else:
+ return iter(mappings)
+
+ return dict(
+ (name, out_mapping_type([
+ transform_fun(vpkg, pset) for vpkg, pset in mapiter(mappings)]))
+ for name, mappings in providers.items())
+
+
+class ProviderIndexError(spack.error.SpackError):
+ """Raised when there is a problem with a ProviderIndex."""
diff --git a/lib/spack/spack/repository.py b/lib/spack/spack/repository.py
index 70134964ad..1536ecb0e6 100644
--- a/lib/spack/spack/repository.py
+++ b/lib/spack/spack/repository.py
@@ -23,6 +23,9 @@
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
import os
+import stat
+import shutil
+import errno
import exceptions
import sys
import inspect
@@ -30,15 +33,18 @@ import imp
import re
import traceback
from bisect import bisect_left
-from external import yaml
+from types import ModuleType
+
+import yaml
import llnl.util.tty as tty
from llnl.util.filesystem import *
+import spack
import spack.error
-import spack.config
import spack.spec
-from spack.virtual import ProviderIndex
+from spack.provider_index import ProviderIndex
+from spack.util.path import canonicalize_path
from spack.util.naming import *
#
@@ -51,6 +57,7 @@ repo_namespace = 'spack.pkg'
# These names describe how repos should be laid out in the filesystem.
#
repo_config_name = 'repo.yaml' # Top-level filename for repo config.
+repo_index_name = 'index.yaml' # Top-level filename for repository index.
packages_dir_name = 'packages' # Top-level repo directory containing pkgs.
package_file_name = 'package.py' # Filename for packages in a repository.
@@ -61,6 +68,7 @@ NOT_PROVIDED = object()
def _autospec(function):
"""Decorator that automatically converts the argument of a single-arg
function to a Spec."""
+
def converter(self, spec_like, *args, **kwargs):
if not isinstance(spec_like, spack.spec.Spec):
spec_like = spack.spec.Spec(spec_like)
@@ -68,25 +76,22 @@ def _autospec(function):
return converter
-def _make_namespace_module(ns):
- module = imp.new_module(ns)
- module.__file__ = "(spack namespace)"
- module.__path__ = []
- module.__package__ = ns
- return module
-
-
-def substitute_spack_prefix(path):
- """Replaces instances of $spack with Spack's prefix."""
- return re.sub(r'^\$spack', spack.prefix, path)
+class SpackNamespace(ModuleType):
+ """ Allow lazy loading of modules."""
+ def __init__(self, namespace):
+ super(SpackNamespace, self).__init__(namespace)
+ self.__file__ = "(spack namespace)"
+ self.__path__ = []
+ self.__name__ = namespace
+ self.__package__ = namespace
+ self.__modules = {}
-def canonicalize_path(path):
- """Substitute $spack, expand user home, take abspath."""
- path = substitute_spack_prefix(path)
- path = os.path.expanduser(path)
- path = os.path.abspath(path)
- return path
+ def __getattr__(self, name):
+ """Getattr lazily loads modules if they're not already loaded."""
+ submodule = self.__package__ + '.' + name
+ setattr(self, name, __import__(submodule))
+ return getattr(self, name)
class RepoPath(object):
@@ -96,6 +101,7 @@ class RepoPath(object):
combined results of the Repos in its list instead of on a
single package repository.
"""
+
def __init__(self, *repo_dirs, **kwargs):
# super-namespace for all packages in the RepoPath
self.super_namespace = kwargs.get('namespace', repo_namespace)
@@ -104,11 +110,12 @@ class RepoPath(object):
self.by_namespace = NamespaceTrie()
self.by_path = {}
- self._all_package_names = []
+ self._all_package_names = None
self._provider_index = None
# If repo_dirs is empty, just use the configuration
if not repo_dirs:
+ import spack.config
repo_dirs = spack.config.get_config('repos')
if not repo_dirs:
raise NoRepoConfiguredError(
@@ -125,9 +132,8 @@ class RepoPath(object):
"To remove the bad repository, run this command:",
" spack repo rm %s" % root)
-
def swap(self, other):
- """Convenience function to make swapping repostiories easier.
+ """Convenience function to make swapping repositories easier.
This is currently used by mock tests.
TODO: Maybe there is a cleaner way.
@@ -143,7 +149,6 @@ class RepoPath(object):
setattr(self, attr, getattr(other, attr))
setattr(other, attr, tmp)
-
def _add(self, repo):
"""Add a repository to the namespace and path indexes.
@@ -157,47 +162,43 @@ class RepoPath(object):
if repo.namespace in self.by_namespace:
raise DuplicateRepoError(
"Package repos '%s' and '%s' both provide namespace %s"
- % (repo.root, self.by_namespace[repo.namespace].root, repo.namespace))
+ % (repo.root, self.by_namespace[repo.namespace].root,
+ repo.namespace))
# Add repo to the pkg indexes
self.by_namespace[repo.full_namespace] = repo
self.by_path[repo.root] = repo
- # add names to the cached name list
- new_pkgs = set(repo.all_package_names())
- new_pkgs.update(set(self._all_package_names))
- self._all_package_names = sorted(new_pkgs, key=lambda n:n.lower())
-
-
def put_first(self, repo):
"""Add repo first in the search path."""
self._add(repo)
self.repos.insert(0, repo)
-
def put_last(self, repo):
"""Add repo last in the search path."""
self._add(repo)
self.repos.append(repo)
-
def remove(self, repo):
"""Remove a repo from the search path."""
if repo in self.repos:
self.repos.remove(repo)
-
def get_repo(self, namespace, default=NOT_PROVIDED):
"""Get a repository by namespace.
- Arguments
- namespace
- Look up this namespace in the RepoPath, and return
- it if found.
-
- Optional Arguments
- default
- If default is provided, return it when the namespace
- isn't found. If not, raise an UnknownNamespaceError.
+
+ Arguments:
+
+ namespace:
+
+ Look up this namespace in the RepoPath, and return it if found.
+
+ Optional Arguments:
+
+ default:
+
+ If default is provided, return it when the namespace
+ isn't found. If not, raise an UnknownNamespaceError.
"""
fullspace = '%s.%s' % (self.super_namespace, namespace)
if fullspace not in self.by_namespace:
@@ -206,38 +207,45 @@ class RepoPath(object):
return default
return self.by_namespace[fullspace]
-
def first_repo(self):
"""Get the first repo in precedence order."""
return self.repos[0] if self.repos else None
-
def all_package_names(self):
"""Return all unique package names in all repositories."""
+ if self._all_package_names is None:
+ all_pkgs = set()
+ for repo in self.repos:
+ for name in repo.all_package_names():
+ all_pkgs.add(name)
+ self._all_package_names = sorted(all_pkgs, key=lambda n: n.lower())
return self._all_package_names
-
def all_packages(self):
for name in self.all_package_names():
yield self.get(name)
+ @property
+ def provider_index(self):
+ """Merged ProviderIndex from all Repos in the RepoPath."""
+ if self._provider_index is None:
+ self._provider_index = ProviderIndex()
+ for repo in reversed(self.repos):
+ self._provider_index.merge(repo.provider_index)
+
+ return self._provider_index
@_autospec
def providers_for(self, vpkg_spec):
- if self._provider_index is None:
- self._provider_index = ProviderIndex(self.all_package_names())
-
- providers = self._provider_index.providers_for(vpkg_spec)
+ providers = self.provider_index.providers_for(vpkg_spec)
if not providers:
raise UnknownPackageError(vpkg_spec.name)
return providers
-
@_autospec
def extensions_for(self, extendee_spec):
return [p for p in self.all_packages() if p.extends(extendee_spec)]
-
def find_module(self, fullname, path=None):
"""Implements precedence for overlaid namespaces.
@@ -264,7 +272,6 @@ class RepoPath(object):
return None
-
def load_module(self, fullname):
"""Handles loading container namespaces when necessary.
@@ -273,18 +280,14 @@ class RepoPath(object):
if fullname in sys.modules:
return sys.modules[fullname]
- # partition fullname into prefix and module name.
- namespace, dot, module_name = fullname.rpartition('.')
-
if not self.by_namespace.is_prefix(fullname):
raise ImportError("No such Spack repo: %s" % fullname)
- module = _make_namespace_module(namespace)
+ module = SpackNamespace(fullname)
module.__loader__ = self
sys.modules[fullname] = module
return module
-
@_autospec
def repo_for_pkg(self, spec):
"""Given a spec, get the repository for its package."""
@@ -306,7 +309,6 @@ class RepoPath(object):
# that can operate on packages that don't exist yet.
return self.first_repo()
-
@_autospec
def get(self, spec, new=False):
"""Find a repo that contains the supplied spec's package.
@@ -315,12 +317,10 @@ class RepoPath(object):
"""
return self.repo_for_pkg(spec).get(spec)
-
def get_pkg_class(self, pkg_name):
"""Find a class for the spec's package and return the class object."""
return self.repo_for_pkg(pkg_name).get_pkg_class(pkg_name)
-
@_autospec
def dump_provenance(self, spec, path):
"""Dump provenance information for a spec to a particular path.
@@ -330,24 +330,27 @@ class RepoPath(object):
"""
return self.repo_for_pkg(spec).dump_provenance(spec, path)
-
def dirname_for_package_name(self, pkg_name):
return self.repo_for_pkg(pkg_name).dirname_for_package_name(pkg_name)
-
def filename_for_package_name(self, pkg_name):
return self.repo_for_pkg(pkg_name).filename_for_package_name(pkg_name)
-
def exists(self, pkg_name):
+ """Whether package with the give name exists in the path's repos.
+
+ Note that virtual packages do not "exist".
+ """
return any(repo.exists(pkg_name) for repo in self.repos)
+ def is_virtual(self, pkg_name):
+ """True if the package with this name is virtual, False otherwise."""
+ return pkg_name in self.provider_index
def __contains__(self, pkg_name):
return self.exists(pkg_name)
-
class Repo(object):
"""Class representing a package repository in the filesystem.
@@ -360,6 +363,7 @@ class Repo(object):
A Python namespace where the repository's packages should live.
"""
+
def __init__(self, root, namespace=repo_namespace):
"""Instantiate a package repository from a filesystem path.
@@ -381,12 +385,14 @@ class Repo(object):
# check and raise BadRepoError on fail.
def check(condition, msg):
- if not condition: raise BadRepoError(msg)
+ if not condition:
+ raise BadRepoError(msg)
# Validate repository layout.
- self.config_file = join_path(self.root, repo_config_name)
+ self.config_file = join_path(self.root, repo_config_name)
check(os.path.isfile(self.config_file),
"No %s found in '%s'" % (repo_config_name, root))
+
self.packages_path = join_path(self.root, packages_dir_name)
check(os.path.isdir(self.packages_path),
"No directory '%s' found in '%s'" % (repo_config_name, root))
@@ -398,12 +404,14 @@ class Repo(object):
self.namespace = config['namespace']
check(re.match(r'[a-zA-Z][a-zA-Z0-9_.]+', self.namespace),
- ("Invalid namespace '%s' in repo '%s'. " % (self.namespace, self.root)) +
+ ("Invalid namespace '%s' in repo '%s'. "
+ % (self.namespace, self.root)) +
"Namespaces must be valid python identifiers separated by '.'")
# Set up 'full_namespace' to include the super-namespace
if self.super_namespace:
- self.full_namespace = "%s.%s" % (self.super_namespace, self.namespace)
+ self.full_namespace = "%s.%s" % (
+ self.super_namespace, self.namespace)
else:
self.full_namespace = self.namespace
@@ -414,12 +422,21 @@ class Repo(object):
self._modules = {}
self._classes = {}
self._instances = {}
+
+ # list of packages that are newer than the index.
+ self._needs_update = []
+
+ # Index of virtual dependencies
self._provider_index = None
+
+ # Cached list of package names.
self._all_package_names = None
# make sure the namespace for packages in this repo exists.
self._create_namespace()
+ # Unique filename for cache of virtual dependency providers
+ self._cache_file = 'providers/%s-index.yaml' % self.namespace
def _create_namespace(self):
"""Create this repo's namespace module and insert it into sys.modules.
@@ -429,10 +446,11 @@ class Repo(object):
"""
parent = None
- for l in range(1, len(self._names)+1):
+ for l in range(1, len(self._names) + 1):
ns = '.'.join(self._names[:l])
- if not ns in sys.modules:
- module = _make_namespace_module(ns)
+
+ if ns not in sys.modules:
+ module = SpackNamespace(ns)
module.__loader__ = self
sys.modules[ns] = module
@@ -442,14 +460,14 @@ class Repo(object):
# This ensures that we can do things like:
# import spack.pkg.builtin.mpich as mpich
if parent:
- modname = self._names[l-1]
- if not hasattr(parent, modname):
- setattr(parent, modname, module)
+ modname = self._names[l - 1]
+ setattr(parent, modname, module)
else:
- # no need to set up a module, but keep track of the parent.
+ # no need to set up a module
module = sys.modules[ns]
- parent = module
+ # but keep track of the parent in this loop
+ parent = module
def real_name(self, import_name):
"""Allow users to import Spack packages using Python identifiers.
@@ -476,13 +494,11 @@ class Repo(object):
return name
return None
-
def is_prefix(self, fullname):
"""True if fullname is a prefix of this Repo's namespace."""
parts = fullname.split('.')
return self._names[:len(parts)] == parts
-
def find_module(self, fullname, path=None):
"""Python find_module import hook.
@@ -498,7 +514,6 @@ class Repo(object):
return None
-
def load_module(self, fullname):
"""Python importer load hook.
@@ -510,7 +525,7 @@ class Repo(object):
namespace, dot, module_name = fullname.rpartition('.')
if self.is_prefix(fullname):
- module = _make_namespace_module(fullname)
+ module = SpackNamespace(fullname)
elif namespace == self.full_namespace:
real_name = self.real_name(module_name)
@@ -523,8 +538,12 @@ class Repo(object):
module.__loader__ = self
sys.modules[fullname] = module
- return module
+ if namespace != fullname:
+ parent = sys.modules[namespace]
+ if not hasattr(parent, module_name):
+ setattr(parent, module_name, module)
+ return module
def _read_config(self):
"""Check for a YAML config file in this db's root directory."""
@@ -533,40 +552,39 @@ class Repo(object):
yaml_data = yaml.load(reponame_file)
if (not yaml_data or 'repo' not in yaml_data or
- not isinstance(yaml_data['repo'], dict)):
- tty.die("Invalid %s in repository %s"
- % (repo_config_name, self.root))
+ not isinstance(yaml_data['repo'], dict)):
+ tty.die("Invalid %s in repository %s" % (
+ repo_config_name, self.root))
return yaml_data['repo']
- except exceptions.IOError, e:
+ except exceptions.IOError:
tty.die("Error reading %s when opening %s"
% (self.config_file, self.root))
-
@_autospec
def get(self, spec, new=False):
if spec.virtual:
raise UnknownPackageError(spec.name)
if spec.namespace and spec.namespace != self.namespace:
- raise UnknownPackageError("Repository %s does not contain package %s"
- % (self.namespace, spec.fullname))
+ raise UnknownPackageError(
+ "Repository %s does not contain package %s"
+ % (self.namespace, spec.fullname))
key = hash(spec)
if new or key not in self._instances:
package_class = self.get_pkg_class(spec.name)
try:
- copy = spec.copy() # defensive copy. Package owns its spec.
+ copy = spec.copy() # defensive copy. Package owns its spec.
self._instances[key] = package_class(copy)
- except Exception, e:
+ except Exception:
if spack.debug:
sys.excepthook(*sys.exc_info())
raise FailedConstructorError(spec.fullname, *sys.exc_info())
return self._instances[key]
-
@_autospec
def dump_provenance(self, spec, path):
"""Dump provenance information for a spec to a particular path.
@@ -579,8 +597,9 @@ class Repo(object):
raise UnknownPackageError(spec.name)
if spec.namespace and spec.namespace != self.namespace:
- raise UnknownPackageError("Repository %s does not contain package %s."
- % (self.namespace, spec.fullname))
+ raise UnknownPackageError(
+ "Repository %s does not contain package %s."
+ % (self.namespace, spec.fullname))
# Install any patch files needed by packages.
mkdirp(path)
@@ -595,34 +614,61 @@ class Repo(object):
# Install the package.py file itself.
install(self.filename_for_package_name(spec), path)
-
def purge(self):
"""Clear entire package instance cache."""
self._instances.clear()
+ def _update_provider_index(self):
+ # Check modification dates of all packages
+ self._fast_package_check()
- @_autospec
- def providers_for(self, vpkg_spec):
+ def read():
+ with open(self.index_file) as f:
+ self._provider_index = ProviderIndex.from_yaml(f)
+
+ # Read the old ProviderIndex, or make a new one.
+ key = self._cache_file
+ index_existed = spack.misc_cache.init_entry(key)
+ if index_existed and not self._needs_update:
+ with spack.misc_cache.read_transaction(key) as f:
+ self._provider_index = ProviderIndex.from_yaml(f)
+ else:
+ with spack.misc_cache.write_transaction(key) as (old, new):
+ if old:
+ self._provider_index = ProviderIndex.from_yaml(old)
+ else:
+ self._provider_index = ProviderIndex()
+
+ for pkg_name in self._needs_update:
+ namespaced_name = '%s.%s' % (self.namespace, pkg_name)
+ self._provider_index.remove_provider(namespaced_name)
+ self._provider_index.update(namespaced_name)
+
+ self._provider_index.to_yaml(new)
+
+ @property
+ def provider_index(self):
+ """A provider index with names *specific* to this repo."""
if self._provider_index is None:
- self._provider_index = ProviderIndex(self.all_package_names())
+ self._update_provider_index()
+ return self._provider_index
- providers = self._provider_index.providers_for(vpkg_spec)
+ @_autospec
+ def providers_for(self, vpkg_spec):
+ providers = self.provider_index.providers_for(vpkg_spec)
if not providers:
raise UnknownPackageError(vpkg_spec.name)
return providers
-
@_autospec
def extensions_for(self, extendee_spec):
return [p for p in self.all_packages() if p.extends(extendee_spec)]
-
def _check_namespace(self, spec):
"""Check that the spec's namespace is the same as this repository's."""
if spec.namespace and spec.namespace != self.namespace:
raise UnknownNamespaceError(spec.namespace)
-
@_autospec
def dirname_for_package_name(self, spec):
"""Get the directory name for a particular package. This is the
@@ -630,7 +676,6 @@ class Repo(object):
self._check_namespace(spec)
return join_path(self.packages_path, spec.name)
-
@_autospec
def filename_for_package_name(self, spec):
"""Get the filename for the module we should load for a particular
@@ -645,48 +690,99 @@ class Repo(object):
pkg_dir = self.dirname_for_package_name(spec.name)
return join_path(pkg_dir, package_file_name)
+ def _fast_package_check(self):
+ """List packages in the repo and check whether index is up to date.
- def all_package_names(self):
- """Returns a sorted list of all package names in the Repo."""
+ Both of these opreations require checking all `package.py`
+ files so we do them at the same time. We list the repo
+ directory and look at package.py files, and we compare the
+ index modification date with the ost recently modified package
+ file, storing the result.
+
+ The implementation here should try to minimize filesystem
+ calls. At the moment, it is O(number of packages) and makes
+ about one stat call per package. This is resonably fast, and
+ avoids actually importing packages in Spack, which is slow.
+
+ """
if self._all_package_names is None:
self._all_package_names = []
+ # Get index modification time.
+ index_mtime = spack.misc_cache.mtime(self._cache_file)
+
for pkg_name in os.listdir(self.packages_path):
# Skip non-directories in the package root.
pkg_dir = join_path(self.packages_path, pkg_name)
- if not os.path.isdir(pkg_dir):
- continue
-
- # Skip directories without a package.py in them.
- pkg_file = join_path(self.packages_path, pkg_name, package_file_name)
- if not os.path.isfile(pkg_file):
- continue
# Warn about invalid names that look like packages.
if not valid_module_name(pkg_name):
- tty.warn("Skipping package at %s. '%s' is not a valid Spack module name."
- % (pkg_dir, pkg_name))
+ msg = ("Skipping package at %s. "
+ "'%s' is not a valid Spack module name.")
+ tty.warn(msg % (pkg_dir, pkg_name))
+ continue
+
+ # construct the file name from the directory
+ pkg_file = join_path(
+ self.packages_path, pkg_name, package_file_name)
+
+ # Use stat here to avoid lots of calls to the filesystem.
+ try:
+ sinfo = os.stat(pkg_file)
+ except OSError as e:
+ if e.errno == errno.ENOENT:
+ # No package.py file here.
+ continue
+ elif e.errno == errno.EACCES:
+ tty.warn("Can't read package file %s." % pkg_file)
+ continue
+ raise e
+
+ # if it's not a file, skip it.
+ if stat.S_ISDIR(sinfo.st_mode):
continue
# All checks passed. Add it to the list.
self._all_package_names.append(pkg_name)
+
+ # record the package if it is newer than the index.
+ if sinfo.st_mtime > index_mtime:
+ self._needs_update.append(pkg_name)
+
self._all_package_names.sort()
return self._all_package_names
+ def all_package_names(self):
+ """Returns a sorted list of all package names in the Repo."""
+ self._fast_package_check()
+ return self._all_package_names
def all_packages(self):
+ """Iterator over all packages in the repository.
+
+ Use this with care, because loading packages is slow.
+
+ """
for name in self.all_package_names():
yield self.get(name)
-
def exists(self, pkg_name):
"""Whether a package with the supplied name exists."""
- # This does a binary search in the sorted list.
- idx = bisect_left(self.all_package_names(), pkg_name)
- return (idx < len(self._all_package_names) and
- self._all_package_names[idx] == pkg_name)
+ if self._all_package_names:
+ # This does a binary search in the sorted list.
+ idx = bisect_left(self.all_package_names(), pkg_name)
+ return (idx < len(self._all_package_names) and
+ self._all_package_names[idx] == pkg_name)
+ # If we haven't generated the full package list, don't.
+ # Just check whether the file exists.
+ filename = self.filename_for_package_name(pkg_name)
+ return os.path.exists(filename)
+
+ def is_virtual(self, pkg_name):
+ """True if the package with this name is virtual, False otherwise."""
+ return self.provider_index.contains(pkg_name)
def _get_pkg_module(self, pkg_name):
"""Create a module for a particular package.
@@ -719,7 +815,6 @@ class Repo(object):
return self._modules[pkg_name]
-
def get_pkg_class(self, pkg_name):
"""Get the class for the package out of its module.
@@ -727,6 +822,11 @@ class Repo(object):
package. Then extracts the package class from the module
according to Spack's naming convention.
"""
+ namespace, _, pkg_name = pkg_name.rpartition('.')
+ if namespace and (namespace != self.namespace):
+ raise InvalidNamespaceError('Invalid namespace for %s repo: %s'
+ % (self.namespace, namespace))
+
class_name = mod_to_class(pkg_name)
module = self._get_pkg_module(pkg_name)
@@ -736,15 +836,12 @@ class Repo(object):
return cls
-
def __str__(self):
return "[Repo '%s' at '%s']" % (self.namespace, self.root)
-
def __repr__(self):
return self.__str__()
-
def __contains__(self, pkg_name):
return self.exists(pkg_name)
@@ -753,30 +850,37 @@ def create_repo(root, namespace=None):
"""Create a new repository in root with the specified namespace.
If the namespace is not provided, use basename of root.
- Return the canonicalized path and the namespace of the created repository.
+ Return the canonicalized path and namespace of the created repository.
"""
root = canonicalize_path(root)
if not namespace:
namespace = os.path.basename(root)
if not re.match(r'\w[\.\w-]*', namespace):
- raise InvalidNamespaceError("'%s' is not a valid namespace." % namespace)
+ raise InvalidNamespaceError(
+ "'%s' is not a valid namespace." % namespace)
existed = False
if os.path.exists(root):
if os.path.isfile(root):
- raise BadRepoError('File %s already exists and is not a directory' % root)
+ raise BadRepoError('File %s already exists and is not a directory'
+ % root)
elif os.path.isdir(root):
if not os.access(root, os.R_OK | os.W_OK):
- raise BadRepoError('Cannot create new repo in %s: cannot access directory.' % root)
+ raise BadRepoError(
+ 'Cannot create new repo in %s: cannot access directory.'
+ % root)
if os.listdir(root):
- raise BadRepoError('Cannot create new repo in %s: directory is not empty.' % root)
+ raise BadRepoError(
+ 'Cannot create new repo in %s: directory is not empty.'
+ % root)
existed = True
full_path = os.path.realpath(root)
parent = os.path.dirname(full_path)
if not os.access(parent, os.R_OK | os.W_OK):
- raise BadRepoError("Cannot create repository in %s: can't access parent!" % root)
+ raise BadRepoError(
+ "Cannot create repository in %s: can't access parent!" % root)
try:
config_path = os.path.join(root, repo_config_name)
@@ -827,6 +931,7 @@ class PackageLoadError(spack.error.SpackError):
class UnknownPackageError(PackageLoadError):
"""Raised when we encounter a package spack doesn't have."""
+
def __init__(self, name, repo=None):
msg = None
if repo:
@@ -839,6 +944,7 @@ class UnknownPackageError(PackageLoadError):
class UnknownNamespaceError(PackageLoadError):
"""Raised when we encounter an unknown namespace"""
+
def __init__(self, namespace):
super(UnknownNamespaceError, self).__init__(
"Unknown namespace: %s" % namespace)
@@ -846,6 +952,7 @@ class UnknownNamespaceError(PackageLoadError):
class FailedConstructorError(PackageLoadError):
"""Raised when a package's class constructor fails."""
+
def __init__(self, name, exc_type, exc_obj, exc_tb):
super(FailedConstructorError, self).__init__(
"Class constructor failed for package '%s'." % name,
diff --git a/lib/spack/spack/resource.py b/lib/spack/spack/resource.py
index 24b675f8da..1d4d448298 100644
--- a/lib/spack/spack/resource.py
+++ b/lib/spack/spack/resource.py
@@ -31,9 +31,11 @@ package to enable optional features.
class Resource(object):
+ """Represents an optional resource to be fetched by a package.
+
+ Aggregates a name, a fetcher, a destination and a placement.
"""
- Represents an optional resource. Aggregates a name, a fetcher, a destination and a placement
- """
+
def __init__(self, name, fetcher, destination, placement):
self.name = name
self.fetcher = fetcher
diff --git a/lib/spack/spack/schema/__init__.py b/lib/spack/spack/schema/__init__.py
new file mode 100644
index 0000000000..de45ea921f
--- /dev/null
+++ b/lib/spack/spack/schema/__init__.py
@@ -0,0 +1,33 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+"""This module contains jsonschema files for all of Spack's YAML formats.
+"""
+from llnl.util.lang import list_modules
+
+# Automatically bring in all sub-modules
+__all__ = []
+for mod in list_modules(__path__[0]):
+ __import__('%s.%s' % (__name__, mod))
+ __all__.append(mod)
diff --git a/lib/spack/spack/schema/compilers.py b/lib/spack/spack/schema/compilers.py
new file mode 100644
index 0000000000..282eddf91b
--- /dev/null
+++ b/lib/spack/spack/schema/compilers.py
@@ -0,0 +1,108 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+"""Schema for compilers.yaml configuration file.
+
+.. literalinclude:: ../spack/schema/compilers.py
+ :lines: 32-
+"""
+
+
+schema = {
+ '$schema': 'http://json-schema.org/schema#',
+ 'title': 'Spack compiler configuration file schema',
+ 'type': 'object',
+ 'additionalProperties': False,
+ 'patternProperties': {
+ 'compilers': {
+ 'type': 'array',
+ 'items': {
+ 'compiler': {
+ 'type': 'object',
+ 'additionalProperties': False,
+ 'required': [
+ 'paths', 'spec', 'modules', 'operating_system'],
+ 'properties': {
+ 'paths': {
+ 'type': 'object',
+ 'required': ['cc', 'cxx', 'f77', 'fc'],
+ 'additionalProperties': False,
+ 'properties': {
+ 'cc': {'anyOf': [{'type': 'string'},
+ {'type': 'null'}]},
+ 'cxx': {'anyOf': [{'type': 'string'},
+ {'type': 'null'}]},
+ 'f77': {'anyOf': [{'type': 'string'},
+ {'type': 'null'}]},
+ 'fc': {'anyOf': [{'type': 'string'},
+ {'type': 'null'}]}}},
+ 'flags': {
+ 'type': 'object',
+ 'additionalProperties': False,
+ 'properties': {
+ 'cflags': {'anyOf': [{'type': 'string'},
+ {'type': 'null'}]},
+ 'cxxflags': {'anyOf': [{'type': 'string'},
+ {'type': 'null'}]},
+ 'fflags': {'anyOf': [{'type': 'string'},
+ {'type': 'null'}]},
+ 'cppflags': {'anyOf': [{'type': 'string'},
+ {'type': 'null'}]},
+ 'ldflags': {'anyOf': [{'type': 'string'},
+ {'type': 'null'}]},
+ 'ldlibs': {'anyOf': [{'type': 'string'},
+ {'type': 'null'}]}}},
+ 'spec': {'type': 'string'},
+ 'operating_system': {'type': 'string'},
+ 'alias': {'anyOf': [{'type': 'string'},
+ {'type': 'null'}]},
+ 'modules': {'anyOf': [{'type': 'string'},
+ {'type': 'null'},
+ {'type': 'array'}]},
+ 'environment': {
+ 'type': 'object',
+ 'default': {},
+ 'additionalProperties': False,
+ 'properties': {
+ 'set': {
+ 'type': 'object',
+ 'patternProperties': {
+ r'\w[\w-]*': { # variable name
+ 'type': 'string'
+ }
+ }
+ }
+ }
+ },
+ 'extra_rpaths': {
+ 'type': 'array',
+ 'default': [],
+ 'items': {'type': 'string'}
+ }
+ },
+ },
+ },
+ },
+ },
+}
diff --git a/lib/spack/spack/schema/config.py b/lib/spack/spack/schema/config.py
new file mode 100644
index 0000000000..e51fa69afe
--- /dev/null
+++ b/lib/spack/spack/schema/config.py
@@ -0,0 +1,67 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+"""Schema for config.yaml configuration file.
+
+.. literalinclude:: ../spack/schema/config.py
+ :lines: 32-
+"""
+
+
+schema = {
+ '$schema': 'http://json-schema.org/schema#',
+ 'title': 'Spack module file configuration file schema',
+ 'type': 'object',
+ 'additionalProperties': False,
+ 'patternProperties': {
+ 'config': {
+ 'type': 'object',
+ 'default': {},
+ 'additionalProperties': False,
+ 'properties': {
+ 'install_tree': {'type': 'string'},
+ 'build_stage': {
+ 'oneOf': [
+ {'type': 'string'},
+ {'type': 'array',
+ 'items': {'type': 'string'}}],
+ },
+ 'module_roots': {
+ 'type': 'object',
+ 'additionalProperties': False,
+ 'properties': {
+ 'tcl': {'type': 'string'},
+ 'lmod': {'type': 'string'},
+ 'dotkit': {'type': 'string'},
+ },
+ },
+ 'source_cache': {'type': 'string'},
+ 'misc_cache': {'type': 'string'},
+ 'verify_ssl': {'type': 'boolean'},
+ 'checksum': {'type': 'boolean'},
+ 'dirty': {'type': 'boolean'},
+ }
+ },
+ },
+}
diff --git a/lib/spack/spack/schema/mirrors.py b/lib/spack/spack/schema/mirrors.py
new file mode 100644
index 0000000000..60b865bb42
--- /dev/null
+++ b/lib/spack/spack/schema/mirrors.py
@@ -0,0 +1,48 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+"""Schema for mirrors.yaml configuration file.
+
+.. literalinclude:: ../spack/schema/mirrors.py
+ :lines: 32-
+"""
+
+
+schema = {
+ '$schema': 'http://json-schema.org/schema#',
+ 'title': 'Spack mirror configuration file schema',
+ 'type': 'object',
+ 'additionalProperties': False,
+ 'patternProperties': {
+ r'mirrors': {
+ 'type': 'object',
+ 'default': {},
+ 'additionalProperties': False,
+ 'patternProperties': {
+ r'\w[\w-]*': {
+ 'type': 'string'},
+ },
+ },
+ },
+}
diff --git a/lib/spack/spack/schema/modules.py b/lib/spack/spack/schema/modules.py
new file mode 100644
index 0000000000..2059e14fa6
--- /dev/null
+++ b/lib/spack/spack/schema/modules.py
@@ -0,0 +1,175 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+"""Schema for modules.yaml configuration file.
+
+.. literalinclude:: ../spack/schema/modules.py
+ :lines: 32-
+"""
+
+
+schema = {
+ '$schema': 'http://json-schema.org/schema#',
+ 'title': 'Spack module file configuration file schema',
+ 'type': 'object',
+ 'additionalProperties': False,
+ 'definitions': {
+ 'array_of_strings': {
+ 'type': 'array',
+ 'default': [],
+ 'items': {
+ 'type': 'string'
+ }
+ },
+ 'dictionary_of_strings': {
+ 'type': 'object',
+ 'patternProperties': {
+ r'\w[\w-]*': { # key
+ 'type': 'string'
+ }
+ }
+ },
+ 'dependency_selection': {
+ 'type': 'string',
+ 'enum': ['none', 'direct', 'all']
+ },
+ 'module_file_configuration': {
+ 'type': 'object',
+ 'default': {},
+ 'additionalProperties': False,
+ 'properties': {
+ 'filter': {
+ 'type': 'object',
+ 'default': {},
+ 'additionalProperties': False,
+ 'properties': {
+ 'environment_blacklist': {
+ 'type': 'array',
+ 'default': [],
+ 'items': {
+ 'type': 'string'
+ }
+ }
+ }
+ },
+ 'autoload': {
+ '$ref': '#/definitions/dependency_selection'},
+ 'prerequisites': {
+ '$ref': '#/definitions/dependency_selection'},
+ 'conflict': {
+ '$ref': '#/definitions/array_of_strings'},
+ 'load': {
+ '$ref': '#/definitions/array_of_strings'},
+ 'suffixes': {
+ '$ref': '#/definitions/dictionary_of_strings'},
+ 'environment': {
+ 'type': 'object',
+ 'default': {},
+ 'additionalProperties': False,
+ 'properties': {
+ 'set': {
+ '$ref': '#/definitions/dictionary_of_strings'},
+ 'unset': {
+ '$ref': '#/definitions/array_of_strings'},
+ 'prepend_path': {
+ '$ref': '#/definitions/dictionary_of_strings'},
+ 'append_path': {
+ '$ref': '#/definitions/dictionary_of_strings'}
+ }
+ }
+ }
+ },
+ 'module_type_configuration': {
+ 'type': 'object',
+ 'default': {},
+ 'anyOf': [
+ {'properties': {
+ 'hash_length': {
+ 'type': 'integer',
+ 'minimum': 0,
+ 'default': 7
+ },
+ 'whitelist': {
+ '$ref': '#/definitions/array_of_strings'},
+ 'blacklist': {
+ '$ref': '#/definitions/array_of_strings'},
+ 'naming_scheme': {
+ 'type': 'string' # Can we be more specific here?
+ }
+ }},
+ {'patternProperties': {
+ r'\w[\w-]*': {
+ '$ref': '#/definitions/module_file_configuration'
+ }
+ }}
+ ]
+ }
+ },
+ 'patternProperties': {
+ r'modules': {
+ 'type': 'object',
+ 'default': {},
+ 'additionalProperties': False,
+ 'properties': {
+ 'prefix_inspections': {
+ 'type': 'object',
+ 'patternProperties': {
+ # prefix-relative path to be inspected for existence
+ r'\w[\w-]*': {
+ '$ref': '#/definitions/array_of_strings'}}},
+ 'enable': {
+ 'type': 'array',
+ 'default': [],
+ 'items': {
+ 'type': 'string',
+ 'enum': ['tcl', 'dotkit', 'lmod']}},
+ 'lmod': {
+ 'allOf': [
+ # Base configuration
+ {'$ref': '#/definitions/module_type_configuration'},
+ {
+ 'core_compilers': {
+ '$ref': '#/definitions/array_of_strings'
+ },
+ 'hierarchical_scheme': {
+ '$ref': '#/definitions/array_of_strings'
+ }
+ } # Specific lmod extensions
+ ]},
+ 'tcl': {
+ 'allOf': [
+ # Base configuration
+ {'$ref': '#/definitions/module_type_configuration'},
+ {} # Specific tcl extensions
+ ]},
+ 'dotkit': {
+ 'allOf': [
+ # Base configuration
+ {'$ref': '#/definitions/module_type_configuration'},
+ {} # Specific dotkit extensions
+ ]},
+ }
+ },
+ },
+}
diff --git a/lib/spack/spack/schema/packages.py b/lib/spack/spack/schema/packages.py
new file mode 100644
index 0000000000..bf5648b1b7
--- /dev/null
+++ b/lib/spack/spack/schema/packages.py
@@ -0,0 +1,90 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+"""Schema for packages.yaml configuration files.
+
+.. literalinclude:: ../spack/schema/packages.py
+ :lines: 32-
+"""
+
+
+schema = {
+ '$schema': 'http://json-schema.org/schema#',
+ 'title': 'Spack package configuration file schema',
+ 'type': 'object',
+ 'additionalProperties': False,
+ 'patternProperties': {
+ r'packages': {
+ 'type': 'object',
+ 'default': {},
+ 'additionalProperties': False,
+ 'patternProperties': {
+ r'\w[\w-]*': { # package name
+ 'type': 'object',
+ 'default': {},
+ 'additionalProperties': False,
+ 'properties': {
+ 'version': {
+ 'type': 'array',
+ 'default': [],
+ # version strings
+ 'items': {'anyOf': [{'type': 'string'},
+ {'type': 'number'}]}},
+ 'compiler': {
+ 'type': 'array',
+ 'default': [],
+ 'items': {'type': 'string'}}, # compiler specs
+ 'buildable': {
+ 'type': 'boolean',
+ 'default': True,
+ },
+ 'modules': {
+ 'type': 'object',
+ 'default': {},
+ },
+ 'providers': {
+ 'type': 'object',
+ 'default': {},
+ 'additionalProperties': False,
+ 'patternProperties': {
+ r'\w[\w-]*': {
+ 'type': 'array',
+ 'default': [],
+ 'items': {'type': 'string'}, }, }, },
+ 'paths': {
+ 'type': 'object',
+ 'default': {},
+ },
+ 'variants': {
+ 'oneOf': [
+ {'type': 'string'},
+ {'type': 'array',
+ 'items': {'type': 'string'}}],
+ },
+ },
+ },
+ },
+ },
+ },
+}
diff --git a/lib/spack/spack/schema/repos.py b/lib/spack/spack/schema/repos.py
new file mode 100644
index 0000000000..c7a3495ae1
--- /dev/null
+++ b/lib/spack/spack/schema/repos.py
@@ -0,0 +1,45 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+"""Schema for repos.yaml configuration file.
+
+.. literalinclude:: ../spack/schema/repos.py
+ :lines: 32-
+"""
+
+
+schema = {
+ '$schema': 'http://json-schema.org/schema#',
+ 'title': 'Spack repository configuration file schema',
+ 'type': 'object',
+ 'additionalProperties': False,
+ 'patternProperties': {
+ r'repos': {
+ 'type': 'array',
+ 'default': [],
+ 'items': {
+ 'type': 'string'},
+ },
+ },
+}
diff --git a/lib/spack/spack/spec.py b/lib/spack/spack/spec.py
index 3f1fc115b8..e34f2b799d 100644
--- a/lib/spack/spack/spec.py
+++ b/lib/spack/spack/spec.py
@@ -18,9 +18,9 @@
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
# conditions of the GNU Lesser General Public License for more details.
#
-# You should have received a copy of the GNU Lesser General Public
-# License along with this program; if not, write to the Free Software
-# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+# You should have received a copy of the GNU Lesser General Public License
+# along with this program; if not, write to the Free Software Foundation,
+# Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
"""
Spack allows very fine-grained control over how packages are installed and
@@ -72,7 +72,9 @@ Here is the EBNF grammar for a spec::
dep_list = { ^ spec }
spec = id [ options ]
options = { @version-list | +variant | -variant | ~variant |
- %compiler | =architecture }
+ %compiler | arch=architecture | [ flag ]=value}
+ flag = { cflags | cxxflags | fcflags | fflags | cppflags |
+ ldflags | ldlibs }
variant = id
architecture = id
compiler = id [ version-list ]
@@ -80,6 +82,9 @@ Here is the EBNF grammar for a spec::
version = id | id: | :id | id:id
id = [A-Za-z0-9_][A-Za-z0-9_.-]*
+Identifiers using the <name>=<value> command, such as architectures and
+compiler flags, require a space before the name.
+
There is one context-sensitive part: ids in versions may contain '.', while
other ids may not.
@@ -90,13 +95,12 @@ thing. Spack uses ~variant in directory names and in the canonical form of
specs to avoid ambiguity. Both are provided because ~ can cause shell
expansion when it is the first character in an id typed on the command line.
"""
-import sys
-import itertools
-import hashlib
import base64
+import hashlib
+import ctypes
from StringIO import StringIO
from operator import attrgetter
-import yaml
+
from yaml.error import MarkedYAMLError
import llnl.util.tty as tty
@@ -104,37 +108,74 @@ from llnl.util.lang import *
from llnl.util.tty.color import *
import spack
-import spack.parse
-import spack.error
+import spack.architecture
+import spack.store
import spack.compilers as compilers
-
-from spack.version import *
-from spack.util.string import *
+import spack.error
+import spack.parse
+from spack.build_environment import get_path_from_module, load_module
from spack.util.prefix import Prefix
-from spack.virtual import ProviderIndex
+from spack.util.string import *
+import spack.util.spack_yaml as syaml
+import spack.util.spack_json as sjson
+from spack.util.spack_yaml import syaml_dict
+from spack.util.crypto import prefix_bits
+from spack.version import *
+from spack.provider_index import ProviderIndex
+
+__all__ = [
+ 'Spec',
+ 'alldeps',
+ 'canonical_deptype',
+ 'validate_deptype',
+ 'parse',
+ 'parse_anonymous_spec',
+ 'SpecError',
+ 'SpecParseError',
+ 'DuplicateDependencyError',
+ 'DuplicateVariantError',
+ 'DuplicateCompilerSpecError',
+ 'UnsupportedCompilerError',
+ 'UnknownVariantError',
+ 'DuplicateArchitectureError',
+ 'InconsistentSpecError',
+ 'InvalidDependencyError',
+ 'InvalidDependencyTypeError',
+ 'NoProviderError',
+ 'MultipleProviderError',
+ 'UnsatisfiableSpecError',
+ 'UnsatisfiableSpecNameError',
+ 'UnsatisfiableVersionSpecError',
+ 'UnsatisfiableCompilerSpecError',
+ 'UnsatisfiableVariantSpecError',
+ 'UnsatisfiableCompilerFlagSpecError',
+ 'UnsatisfiableArchitectureSpecError',
+ 'UnsatisfiableProviderSpecError',
+ 'UnsatisfiableDependencySpecError',
+ 'AmbiguousHashError']
# Valid pattern for an identifier in Spack
identifier_re = r'\w[\w-]*'
# Convenient names for color formats so that other things can use them
-compiler_color = '@g'
-version_color = '@c'
-architecture_color = '@m'
-enabled_variant_color = '@B'
+compiler_color = '@g'
+version_color = '@c'
+architecture_color = '@m'
+enabled_variant_color = '@B'
disabled_variant_color = '@r'
-dependency_color = '@.'
-hash_color = '@K'
+dependency_color = '@.'
+hash_color = '@K'
"""This map determines the coloring of specs when using color output.
We make the fields different colors to enhance readability.
See spack.color for descriptions of the color codes. """
-color_formats = {'%' : compiler_color,
- '@' : version_color,
- '=' : architecture_color,
- '+' : enabled_variant_color,
- '~' : disabled_variant_color,
- '^' : dependency_color,
- '#' : hash_color }
+color_formats = {'%': compiler_color,
+ '@': version_color,
+ '=': architecture_color,
+ '+': enabled_variant_color,
+ '~': disabled_variant_color,
+ '^': dependency_color,
+ '#': hash_color}
"""Regex used for splitting by spec field separators."""
_separators = '[%s]' % ''.join(color_formats.keys())
@@ -143,24 +184,53 @@ _separators = '[%s]' % ''.join(color_formats.keys())
every time we call str()"""
_any_version = VersionList([':'])
+# Special types of dependencies.
+alldeps = ('build', 'link', 'run')
+norun = ('link', 'build')
+special_types = {
+ 'alldeps': alldeps,
+ 'all': alldeps, # allow "all" as string but not symbol.
+ 'norun': norun,
+}
-def index_specs(specs):
- """Take a list of specs and return a dict of lists. Dict is
- keyed by spec name and lists include all specs with the
- same name.
- """
- spec_dict = {}
- for spec in specs:
- if not spec.name in spec_dict:
- spec_dict[spec.name] = []
- spec_dict[spec.name].append(spec)
- return spec_dict
+legal_deps = tuple(special_types) + alldeps
+
+"""Max integer helps avoid passing too large a value to cyaml."""
+maxint = 2 ** (ctypes.sizeof(ctypes.c_int) * 8 - 1) - 1
+
+
+def validate_deptype(deptype):
+ if isinstance(deptype, str):
+ if deptype not in legal_deps:
+ raise InvalidDependencyTypeError(
+ "Invalid dependency type: %s" % deptype)
+
+ elif isinstance(deptype, (list, tuple)):
+ for t in deptype:
+ validate_deptype(t)
+
+ elif deptype is None:
+ raise InvalidDependencyTypeError("deptype cannot be None!")
+
+
+def canonical_deptype(deptype):
+ if deptype is None:
+ return alldeps
+
+ elif isinstance(deptype, str):
+ return special_types.get(deptype, (deptype,))
+
+ elif isinstance(deptype, (tuple, list)):
+ return (sum((canonical_deptype(d) for d in deptype), ()))
+
+ return deptype
def colorize_spec(spec):
"""Returns a spec colorized according to the colors specified in
color_formats."""
class insert_color:
+
def __init__(self):
self.last = None
@@ -177,10 +247,212 @@ def colorize_spec(spec):
@key_ordering
+class ArchSpec(object):
+ """ The ArchSpec class represents an abstract architecture specification
+ that a package should be built with. At its core, each ArchSpec is
+ comprised of three elements: a platform (e.g. Linux), an OS (e.g.
+ RHEL6), and a target (e.g. x86_64).
+ """
+
+ # TODO: Formalize the specifications for architectures and then use
+ # the appropriate parser here to read these specifications.
+ def __init__(self, *args):
+ to_attr_string = lambda s: str(s) if s and s != "None" else None
+
+ self.platform, self.platform_os, self.target = (None, None, None)
+
+ if len(args) == 1:
+ spec_like = args[0]
+ if isinstance(spec_like, ArchSpec):
+ self._dup(spec_like)
+ elif isinstance(spec_like, basestring):
+ spec_fields = spec_like.split("-")
+
+ if len(spec_fields) == 3:
+ self.platform, self.platform_os, self.target = tuple(
+ to_attr_string(f) for f in spec_fields)
+ else:
+ raise ValueError("%s is an invalid arch spec" % spec_like)
+ elif len(args) == 3:
+ self.platform = to_attr_string(args[0])
+ self.platform_os = to_attr_string(args[1])
+ self.target = to_attr_string(args[2])
+ elif len(args) != 0:
+ raise TypeError("Can't make arch spec from %s" % args)
+
+ def _autospec(self, spec_like):
+ if isinstance(spec_like, ArchSpec):
+ return spec_like
+ return ArchSpec(spec_like)
+
+ def _cmp_key(self):
+ return (self.platform, self.platform_os, self.target)
+
+ def _dup(self, other):
+ self.platform = other.platform
+ self.platform_os = other.platform_os
+ self.target = other.target
+
+ @property
+ def platform(self):
+ return self._platform
+
+ @platform.setter
+ def platform(self, value):
+ """ The platform of the architecture spec will be verified as a
+ supported Spack platform before it's set to ensure all specs
+ refer to valid platforms.
+ """
+ value = str(value) if value is not None else None
+ self._platform = value
+
+ @property
+ def platform_os(self):
+ return self._platform_os
+
+ @platform_os.setter
+ def platform_os(self, value):
+ """ The OS of the architecture spec will update the platform field
+ if the OS is set to one of the reserved OS types so that the
+ default OS type can be resolved. Since the reserved OS
+ information is only available for the host machine, the platform
+ will assumed to be the host machine's platform.
+ """
+ value = str(value) if value is not None else None
+
+ if value in spack.architecture.Platform.reserved_oss:
+ curr_platform = str(spack.architecture.platform())
+ self.platform = self.platform or curr_platform
+
+ if self.platform != curr_platform:
+ raise ValueError(
+ "Can't set arch spec OS to reserved value '%s' when the "
+ "arch platform (%s) isn't the current platform (%s)" %
+ (value, self.platform, curr_platform))
+
+ spec_platform = spack.architecture.get_platform(self.platform)
+ value = str(spec_platform.operating_system(value))
+
+ self._platform_os = value
+
+ @property
+ def target(self):
+ return self._target
+
+ @target.setter
+ def target(self, value):
+ """ The target of the architecture spec will update the platform field
+ if the target is set to one of the reserved target types so that
+ the default target type can be resolved. Since the reserved target
+ information is only available for the host machine, the platform
+ will assumed to be the host machine's platform.
+ """
+ value = str(value) if value is not None else None
+
+ if value in spack.architecture.Platform.reserved_targets:
+ curr_platform = str(spack.architecture.platform())
+ self.platform = self.platform or curr_platform
+
+ if self.platform != curr_platform:
+ raise ValueError(
+ "Can't set arch spec target to reserved value '%s' when "
+ "the arch platform (%s) isn't the current platform (%s)" %
+ (value, self.platform, curr_platform))
+
+ spec_platform = spack.architecture.get_platform(self.platform)
+ value = str(spec_platform.target(value))
+
+ self._target = value
+
+ def satisfies(self, other, strict=False):
+ other = self._autospec(other)
+ sdict, odict = self.to_cmp_dict(), other.to_cmp_dict()
+
+ if strict or self.concrete:
+ return all(getattr(self, attr) == getattr(other, attr)
+ for attr in odict if odict[attr])
+ else:
+ return all(getattr(self, attr) == getattr(other, attr)
+ for attr in odict if sdict[attr] and odict[attr])
+
+ def constrain(self, other):
+ """ Projects all architecture fields that are specified in the given
+ spec onto the instance spec if they're missing from the instance
+ spec. This will only work if the two specs are compatible.
+ """
+ other = self._autospec(other)
+
+ if not self.satisfies(other):
+ raise UnsatisfiableArchitectureSpecError(self, other)
+
+ constrained = False
+ for attr, svalue in self.to_cmp_dict().iteritems():
+ ovalue = getattr(other, attr)
+ if svalue is None and ovalue is not None:
+ setattr(self, attr, ovalue)
+ constrained = True
+
+ return constrained
+
+ def copy(self):
+ clone = ArchSpec.__new__(ArchSpec)
+ clone._dup(self)
+ return clone
+
+ @property
+ def concrete(self):
+ return all(v for k, v in self.to_cmp_dict().iteritems())
+
+ def to_cmp_dict(self):
+ """Returns a dictionary that can be used for field comparison."""
+ return dict([
+ ('platform', self.platform),
+ ('platform_os', self.platform_os),
+ ('target', self.target)])
+
+ def to_dict(self):
+ d = syaml_dict([
+ ('platform', self.platform),
+ ('platform_os', self.platform_os),
+ ('target', self.target)])
+ return syaml_dict([('arch', d)])
+
+ @staticmethod
+ def from_dict(d):
+ """Import an ArchSpec from raw YAML/JSON data.
+
+ This routine implements a measure of compatibility with older
+ versions of Spack. Spack releases before 0.10 used a single
+ string with no OS or platform identifiers. We import old Spack
+ architectures with platform ``spack09``, OS ``unknown``, and the
+ old arch string as the target.
+
+ Specs from `0.10` or later have a more fleshed out architecture
+ descriptor with a platform, an OS, and a target.
+
+ """
+ if not isinstance(d['arch'], dict):
+ return ArchSpec('spack09', 'unknown', d['arch'])
+
+ d = d['arch']
+ return ArchSpec(d['platform'], d['platform_os'], d['target'])
+
+ def __str__(self):
+ return "%s-%s-%s" % (self.platform, self.platform_os, self.target)
+
+ def __repr__(self):
+ return str(self)
+
+ def __contains__(self, string):
+ return string in str(self)
+
+
+@key_ordering
class CompilerSpec(object):
"""The CompilerSpec field represents the compiler or range of compiler
versions that a package should be built with. CompilerSpecs have a
name and a version list. """
+
def __init__(self, *args):
nargs = len(args)
if nargs == 1:
@@ -198,8 +470,8 @@ class CompilerSpec(object):
else:
raise TypeError(
- "Can only build CompilerSpec from string or CompilerSpec." +
- " Found %s" % type(arg))
+ "Can only build CompilerSpec from string or " +
+ "CompilerSpec. Found %s" % type(arg))
elif nargs == 2:
name, version = args
@@ -211,23 +483,19 @@ class CompilerSpec(object):
raise TypeError(
"__init__ takes 1 or 2 arguments. (%d given)" % nargs)
-
def _add_version(self, version):
self.versions.add(version)
-
def _autospec(self, compiler_spec_like):
if isinstance(compiler_spec_like, CompilerSpec):
return compiler_spec_like
return CompilerSpec(compiler_spec_like)
-
def satisfies(self, other, strict=False):
other = self._autospec(other)
return (self.name == other.name and
self.versions.satisfies(other.versions, strict=strict))
-
def constrain(self, other):
"""Intersect self's versions with other.
@@ -241,44 +509,38 @@ class CompilerSpec(object):
return self.versions.intersect(other.versions)
-
@property
def concrete(self):
"""A CompilerSpec is concrete if its versions are concrete and there
is an available compiler with the right version."""
return self.versions.concrete
-
@property
def version(self):
if not self.concrete:
raise SpecError("Spec is not concrete: " + str(self))
return self.versions[0]
-
def copy(self):
clone = CompilerSpec.__new__(CompilerSpec)
clone.name = self.name
clone.versions = self.versions.copy()
return clone
-
def _cmp_key(self):
return (self.name, self.versions)
-
def to_dict(self):
- d = {'name' : self.name}
+ d = syaml_dict([('name', self.name)])
d.update(self.versions.to_dict())
- return { 'compiler' : d }
+ return syaml_dict([('compiler', d)])
@staticmethod
def from_dict(d):
d = d['compiler']
return CompilerSpec(d['name'], VersionList.from_dict(d))
-
def __str__(self):
out = self.name
if self.versions and self.versions != _any_version:
@@ -291,44 +553,84 @@ class CompilerSpec(object):
@key_ordering
+class DependencySpec(object):
+ """DependencySpecs connect two nodes in the DAG, and contain deptypes.
+
+ Dependencies can be one (or more) of several types:
+
+ - build: needs to be in the PATH at build time.
+ - link: is linked to and added to compiler flags.
+ - run: needs to be in the PATH for the package to run.
+
+ Fields:
+ - spec: Spec depended on by parent.
+ - parent: Spec that depends on `spec`.
+ - deptypes: list of strings, representing dependency relationships.
+ """
+
+ def __init__(self, parent, spec, deptypes):
+ self.parent = parent
+ self.spec = spec
+ self.deptypes = tuple(sorted(set(deptypes)))
+
+ def update_deptypes(self, deptypes):
+ deptypes = tuple(sorted(set(deptypes)))
+ changed = self.deptypes != deptypes
+ self.deptypes = deptypes
+ return changed
+
+ def copy(self):
+ return DependencySpec(self.parent, self.spec, self.deptypes)
+
+ def _cmp_key(self):
+ return (self.parent.name if self.parent else None,
+ self.spec.name if self.spec else None,
+ self.deptypes)
+
+ def __str__(self):
+ return "%s %s--> %s" % (self.parent.name if self.parent else None,
+ self.deptypes,
+ self.spec.name if self.spec else None)
+
+
+@key_ordering
class VariantSpec(object):
"""Variants are named, build-time options for a package. Names depend
on the particular package being built, and each named variant can
be enabled or disabled.
"""
- def __init__(self, name, enabled):
- self.name = name
- self.enabled = enabled
+ def __init__(self, name, value):
+ self.name = name
+ self.value = value
def _cmp_key(self):
- return (self.name, self.enabled)
-
+ return (self.name, self.value)
def copy(self):
- return VariantSpec(self.name, self.enabled)
-
+ return VariantSpec(self.name, self.value)
def __str__(self):
- out = '+' if self.enabled else '~'
- return out + self.name
+ if type(self.value) == bool:
+ return '{0}{1}'.format('+' if self.value else '~', self.name)
+ else:
+ return ' {0}={1} '.format(self.name, self.value)
class VariantMap(HashableMap):
+
def __init__(self, spec):
super(VariantMap, self).__init__()
self.spec = spec
-
def satisfies(self, other, strict=False):
if strict or self.spec._concrete:
- return all(k in self and self[k].enabled == other[k].enabled
+ return all(k in self and self[k].value == other[k].value
for k in other)
else:
- return all(self[k].enabled == other[k].enabled
+ return all(self[k].value == other[k].value
for k in other if k in self)
-
def constrain(self, other):
"""Add all variants in other that aren't in self to self.
@@ -343,11 +645,11 @@ class VariantMap(HashableMap):
changed = False
for k in other:
if k in self:
- if self[k].enabled != other[k].enabled:
+ if self[k].value != other[k].value:
raise UnsatisfiableVariantSpecError(self[k], other[k])
else:
self[k] = other[k].copy()
- changed =True
+ changed = True
return changed
@property
@@ -355,34 +657,100 @@ class VariantMap(HashableMap):
return self.spec._concrete or all(
v in self for v in self.spec.package_class.variants)
-
def copy(self):
clone = VariantMap(None)
for name, variant in self.items():
clone[name] = variant.copy()
return clone
-
def __str__(self):
sorted_keys = sorted(self.keys())
return ''.join(str(self[key]) for key in sorted_keys)
+_valid_compiler_flags = [
+ 'cflags', 'cxxflags', 'fflags', 'ldflags', 'ldlibs', 'cppflags']
+
+
+class FlagMap(HashableMap):
+
+ def __init__(self, spec):
+ super(FlagMap, self).__init__()
+ self.spec = spec
+
+ def satisfies(self, other, strict=False):
+ if strict or (self.spec and self.spec._concrete):
+ return all(f in self and set(self[f]) == set(other[f])
+ for f in other)
+ else:
+ return all(set(self[f]) == set(other[f])
+ for f in other if (other[f] != [] and f in self))
+
+ def constrain(self, other):
+ """Add all flags in other that aren't in self to self.
+
+ Return whether the spec changed.
+ """
+ if other.spec and other.spec._concrete:
+ for k in self:
+ if k not in other:
+ raise UnsatisfiableCompilerFlagSpecError(
+ self[k], '<absent>')
+
+ changed = False
+ for k in other:
+ if k in self and not set(self[k]) <= set(other[k]):
+ raise UnsatisfiableCompilerFlagSpecError(
+ ' '.join(f for f in self[k]),
+ ' '.join(f for f in other[k]))
+ elif k not in self:
+ self[k] = other[k]
+ changed = True
+ return changed
+
+ @staticmethod
+ def valid_compiler_flags():
+ return _valid_compiler_flags
+
+ @property
+ def concrete(self):
+ return all(flag in self for flag in _valid_compiler_flags)
+
+ def copy(self):
+ clone = FlagMap(None)
+ for name, value in self.items():
+ clone[name] = value
+ return clone
+
+ def _cmp_key(self):
+ return tuple((k, tuple(v)) for k, v in sorted(self.iteritems()))
+
+ def __str__(self):
+ sorted_keys = filter(
+ lambda flag: self[flag] != [], sorted(self.keys()))
+ cond_symbol = ' ' if len(sorted_keys) > 0 else ''
+ return cond_symbol + ' '.join(
+ str(key) + '=\"' + ' '.join(
+ str(f) for f in self[key]) + '\"'
+ for key in sorted_keys) + cond_symbol
+
+
class DependencyMap(HashableMap):
+
"""Each spec has a DependencyMap containing specs for its dependencies.
The DependencyMap is keyed by name. """
@property
def concrete(self):
- return all(d.concrete for d in self.values())
-
+ return all((d.spec.concrete and d.deptypes)
+ for d in self.values())
def __str__(self):
- return ''.join(
- ["^" + str(self[name]) for name in sorted(self.keys())])
+ return "{deps: %s}" % ', '.join(str(d) for d in sorted(self.values()))
@key_ordering
class Spec(object):
+
def __init__(self, spec_like, *dep_like, **kwargs):
# Copy if spec_like is a Spec.
if isinstance(spec_like, Spec):
@@ -405,32 +773,86 @@ class Spec(object):
# writes directly into this Spec object.
other = spec_list[0]
self.name = other.name
- self.dependents = other.dependents
self.versions = other.versions
self.architecture = other.architecture
self.compiler = other.compiler
- self.dependencies = other.dependencies
+ self.compiler_flags = other.compiler_flags
+ self.compiler_flags.spec = self
+ self._dependencies = other._dependencies
+ self._dependents = other._dependents
self.variants = other.variants
self.variants.spec = self
self.namespace = other.namespace
+ self._hash = other._hash
+ self._cmp_key_cache = other._cmp_key_cache
# Specs are by default not assumed to be normal, but in some
# cases we've read them from a file want to assume normal.
# This allows us to manipulate specs that Spack doesn't have
# package.py files for.
- self._normal = kwargs.get('normal', False)
+ self._normal = kwargs.get('normal', False)
self._concrete = kwargs.get('concrete', False)
# Allow a spec to be constructed with an external path.
self.external = kwargs.get('external', None)
+ self.external_module = kwargs.get('external_module', None)
# This allows users to construct a spec DAG with literals.
# Note that given two specs a and b, Spec(a) copies a, but
# Spec(a, b) will copy a but just add b as a dep.
+ deptypes = ()
for dep in dep_like:
- spec = dep if isinstance(dep, Spec) else Spec(dep)
- self._add_dependency(spec)
+ if isinstance(dep, Spec):
+ spec = dep
+ elif isinstance(dep, (list, tuple)):
+ # Literals can be deptypes -- if there are tuples in the
+ # list, they will be used as deptypes for the following Spec.
+ deptypes = tuple(dep)
+ continue
+ else:
+ spec = Spec(dep)
+ spec = dep if isinstance(dep, Spec) else Spec(dep)
+ self._add_dependency(spec, deptypes)
+ deptypes = ()
+
+ def __getattr__(self, item):
+ """Delegate to self.package if the attribute is not in the spec"""
+ # This line is to avoid infinite recursion in case package is
+ # not present among self attributes
+ if item.endswith('libs'):
+ return getattr(self.package, item)
+ raise AttributeError(item)
+
+ def get_dependency(self, name):
+ dep = self._dependencies.get(name)
+ if dep is not None:
+ return dep
+ raise InvalidDependencyError(
+ self.name + " does not depend on " + comma_or(name))
+
+ def _find_deps(self, where, deptype):
+ deptype = canonical_deptype(deptype)
+
+ return [dep for dep in where.values()
+ if deptype and (not dep.deptypes or
+ any(d in deptype for d in dep.deptypes))]
+
+ def dependencies(self, deptype=None):
+ return [d.spec
+ for d in self._find_deps(self._dependencies, deptype)]
+
+ def dependents(self, deptype=None):
+ return [d.parent
+ for d in self._find_deps(self._dependents, deptype)]
+
+ def dependencies_dict(self, deptype=None):
+ return dict((d.spec.name, d)
+ for d in self._find_deps(self._dependencies, deptype))
+
+ def dependents_dict(self, deptype=None):
+ return dict((d.parent.name, d)
+ for d in self._find_deps(self._dependents, deptype))
#
# Private routines here are called by the parser when building a spec.
@@ -439,42 +861,85 @@ class Spec(object):
"""Called by the parser to add an allowable version."""
self.versions.add(version)
-
- def _add_variant(self, name, enabled):
+ def _add_variant(self, name, value):
"""Called by the parser to add a variant."""
- if name in self.variants: raise DuplicateVariantError(
+ if name in self.variants:
+ raise DuplicateVariantError(
"Cannot specify variant '%s' twice" % name)
- self.variants[name] = VariantSpec(name, enabled)
+ if isinstance(value, basestring) and value.upper() == 'TRUE':
+ value = True
+ elif isinstance(value, basestring) and value.upper() == 'FALSE':
+ value = False
+ self.variants[name] = VariantSpec(name, value)
+
+ def _add_flag(self, name, value):
+ """Called by the parser to add a known flag.
+ Known flags currently include "arch"
+ """
+ valid_flags = FlagMap.valid_compiler_flags()
+ if name == 'arch' or name == 'architecture':
+ parts = tuple(value.split('-'))
+ plat, os, tgt = parts if len(parts) == 3 else (None, None, value)
+ self._set_architecture(platform=plat, platform_os=os, target=tgt)
+ elif name == 'platform':
+ self._set_architecture(platform=value)
+ elif name == 'os' or name == 'operating_system':
+ self._set_architecture(platform_os=value)
+ elif name == 'target':
+ self._set_architecture(target=value)
+ elif name in valid_flags:
+ assert(self.compiler_flags is not None)
+ self.compiler_flags[name] = value.split()
+ else:
+ self._add_variant(name, value)
+ def _set_architecture(self, **kwargs):
+ """Called by the parser to set the architecture."""
+ arch_attrs = ['platform', 'platform_os', 'target']
+ if self.architecture and self.architecture.concrete:
+ raise DuplicateArchitectureError(
+ "Spec for '%s' cannot have two architectures." % self.name)
+
+ if not self.architecture:
+ new_vals = tuple(kwargs.get(arg, None) for arg in arch_attrs)
+ self.architecture = ArchSpec(*new_vals)
+ else:
+ new_attrvals = [(a, v) for a, v in kwargs.iteritems()
+ if a in arch_attrs]
+ for new_attr, new_value in new_attrvals:
+ if getattr(self.architecture, new_attr):
+ raise DuplicateArchitectureError(
+ "Spec for '%s' cannot have two '%s' specified "
+ "for its architecture" % (self.name, new_attr))
+ else:
+ setattr(self.architecture, new_attr, new_value)
def _set_compiler(self, compiler):
"""Called by the parser to set the compiler."""
- if self.compiler: raise DuplicateCompilerSpecError(
+ if self.compiler:
+ raise DuplicateCompilerSpecError(
"Spec for '%s' cannot have two compilers." % self.name)
self.compiler = compiler
-
- def _set_architecture(self, architecture):
- """Called by the parser to set the architecture."""
- if self.architecture: raise DuplicateArchitectureError(
- "Spec for '%s' cannot have two architectures." % self.name)
- self.architecture = architecture
-
-
- def _add_dependency(self, spec):
+ def _add_dependency(self, spec, deptypes):
"""Called by the parser to add another spec as a dependency."""
- if spec.name in self.dependencies:
- raise DuplicateDependencyError("Cannot depend on '%s' twice" % spec)
- self.dependencies[spec.name] = spec
- spec.dependents[self.name] = self
+ if spec.name in self._dependencies:
+ raise DuplicateDependencyError(
+ "Cannot depend on '%s' twice" % spec)
+
+ # create an edge and add to parent and child
+ dspec = DependencySpec(self, spec, deptypes)
+ self._dependencies[spec.name] = dspec
+ spec._dependents[self.name] = dspec
#
# Public interface
#
@property
def fullname(self):
- return '%s.%s' % (self.namespace, self.name) if self.namespace else self.name
-
+ return (
+ ('%s.%s' % (self.namespace, self.name)) if self.namespace else
+ (self.name if self.name else ''))
@property
def root(self):
@@ -483,30 +948,27 @@ class Spec(object):
installed). This will throw an assertion error if that is not
the case.
"""
- if not self.dependents:
+ if not self._dependents:
return self
# If the spec has multiple dependents, ensure that they all
# lead to the same place. Spack shouldn't deal with any DAGs
# with multiple roots, so something's wrong if we find one.
- depiter = iter(self.dependents.values())
- first_root = next(depiter).root
- assert(all(first_root is d.root for d in depiter))
+ depiter = iter(self._dependents.values())
+ first_root = next(depiter).parent.root
+ assert(all(first_root is d.parent.root for d in depiter))
return first_root
-
@property
def package(self):
return spack.repo.get(self)
-
@property
def package_class(self):
"""Internal package call gets only the class object for a package.
Use this to just get package metadata.
"""
- return spack.repo.get_pkg_class(self.name)
-
+ return spack.repo.get_pkg_class(self.fullname)
@property
def virtual(self):
@@ -519,12 +981,10 @@ class Spec(object):
"""
return Spec.is_virtual(self.name)
-
@staticmethod
def is_virtual(name):
"""Test if a name is virtual without requiring a Spec."""
- return not spack.repo.exists(name)
-
+ return (name is not None) and (not spack.repo.exists(name))
@property
def concrete(self):
@@ -535,18 +995,34 @@ class Spec(object):
if self._concrete:
return True
- self._concrete = bool(not self.virtual
- and self.namespace is not None
- and self.versions.concrete
- and self.variants.concrete
- and self.architecture
- and self.compiler and self.compiler.concrete
- and self.dependencies.concrete)
-
+ self._concrete = bool(not self.virtual and
+ self.namespace is not None and
+ self.versions.concrete and
+ self.variants.concrete and
+ self.architecture and
+ self.architecture.concrete and
+ self.compiler and self.compiler.concrete and
+ self.compiler_flags.concrete and
+ self._dependencies.concrete)
return self._concrete
+ def traverse(self, **kwargs):
+ direction = kwargs.get('direction', 'children')
+ depth = kwargs.get('depth', False)
+
+ get_spec = lambda s: s.spec
+ if direction == 'parents':
+ get_spec = lambda s: s.parent
- def traverse(self, visited=None, d=0, **kwargs):
+ if depth:
+ for d, dspec in self.traverse_edges(**kwargs):
+ yield d, get_spec(dspec)
+ else:
+ for dspec in self.traverse_edges(**kwargs):
+ yield get_spec(dspec)
+
+ def traverse_edges(self, visited=None, d=0, deptype=None,
+ deptype_query=None, dep_spec=None, **kwargs):
"""Generic traversal of the DAG represented by this spec.
This will yield each node in the spec. Options:
@@ -560,7 +1036,7 @@ class Spec(object):
children in the dependency DAG.
cover [=nodes|edges|paths]
- Determines how extensively to cover the dag. Possible vlaues:
+ Determines how extensively to cover the dag. Possible values:
'nodes': Visit each node in the dag only once. Every node
yielded by this function will be unique.
@@ -581,7 +1057,7 @@ class Spec(object):
in the traversal.
root [=True]
- If false, this won't yield the root node, just its descendents.
+ If False, this won't yield the root node, just its descendents.
direction [=children|parents]
If 'children', does a traversal of this spec's children. If
@@ -589,14 +1065,18 @@ class Spec(object):
"""
# get initial values for kwargs
- depth = kwargs.get('depth', False)
- key_fun = kwargs.get('key', id)
+ depth = kwargs.get('depth', False)
+ key_fun = kwargs.get('key', id)
if isinstance(key_fun, basestring):
key_fun = attrgetter(key_fun)
yield_root = kwargs.get('root', True)
- cover = kwargs.get('cover', 'nodes')
- direction = kwargs.get('direction', 'children')
- order = kwargs.get('order', 'pre')
+ cover = kwargs.get('cover', 'nodes')
+ direction = kwargs.get('direction', 'children')
+ order = kwargs.get('order', 'pre')
+
+ deptype = canonical_deptype(deptype)
+ if deptype_query is None:
+ deptype_query = ('link', 'run')
# Make sure kwargs have legal values; raise ValueError if not.
def validate(name, val, allowed_values):
@@ -615,31 +1095,49 @@ class Spec(object):
if key in visited and cover == 'nodes':
return
- # Determine whether and what to yield for this node.
+ def return_val(dspec):
+ if not dspec:
+ # make a fake dspec for the root.
+ if direction == 'parents':
+ dspec = DependencySpec(self, None, ())
+ else:
+ dspec = DependencySpec(None, self, ())
+ return (d, dspec) if depth else dspec
+
yield_me = yield_root or d > 0
- result = (d, self) if depth else self
# Preorder traversal yields before successors
if yield_me and order == 'pre':
- yield result
+ yield return_val(dep_spec)
# Edge traversal yields but skips children of visited nodes
if not (key in visited and cover == 'edges'):
# This code determines direction and yields the children/parents
- successors = self.dependencies
- if direction == 'parents':
- successors = self.dependents
+ if direction == 'children':
+ successors = self.dependencies_dict(deptype)
+ succ = lambda s: s.spec
+ elif direction == 'parents':
+ successors = self.dependents_dict(deptype)
+ succ = lambda s: s.parent
+ else:
+ raise ValueError('Invalid traversal direction: %s' % direction)
visited.add(key)
- for name in sorted(successors):
+ for name, dspec in sorted(successors.items()):
child = successors[name]
- for elt in child.traverse(visited, d+1, **kwargs):
+ children = succ(child).traverse_edges(
+ visited,
+ d=(d + 1),
+ deptype=deptype,
+ deptype_query=deptype_query,
+ dep_spec=dspec,
+ **kwargs)
+ for elt in children:
yield elt
# Postorder traversal yields after successors
if yield_me and order == 'post':
- yield result
-
+ yield return_val(dep_spec)
@property
def short_spec(self):
@@ -647,60 +1145,83 @@ class Spec(object):
instead of completely enumerated."""
return self.format('$_$@$%@$+$=$#')
-
@property
def cshort_spec(self):
"""Returns a version of the spec with the dependencies hashed
instead of completely enumerated."""
return self.format('$_$@$%@$+$=$#', color=True)
-
@property
def prefix(self):
- return Prefix(spack.install_layout.path_for_spec(self))
-
+ return Prefix(spack.store.layout.path_for_spec(self))
def dag_hash(self, length=None):
- """
- Return a hash of the entire spec DAG, including connectivity.
- """
- yaml_text = yaml.dump(
- self.to_node_dict(), default_flow_style=True, width=sys.maxint)
- sha = hashlib.sha1(yaml_text)
- return base64.b32encode(sha.digest()).lower()[:length]
-
+ """Return a hash of the entire spec DAG, including connectivity."""
+ if self._hash:
+ return self._hash[:length]
+ else:
+ yaml_text = syaml.dump(
+ self.to_node_dict(), default_flow_style=True, width=maxint)
+ sha = hashlib.sha1(yaml_text)
+ b32_hash = base64.b32encode(sha.digest()).lower()
+ if self.concrete:
+ self._hash = b32_hash
+ return b32_hash[:length]
+
+ def dag_hash_bit_prefix(self, bits):
+ """Get the first <bits> bits of the DAG hash as an integer type."""
+ return base32_prefix_bits(self.dag_hash(), bits)
def to_node_dict(self):
- d = {
- 'variants' : dict(
- (name,v.enabled) for name, v in self.variants.items()),
- 'arch' : self.architecture,
- 'dependencies' : dict((d, self.dependencies[d].dag_hash())
- for d in sorted(self.dependencies))
- }
-
- # Older concrete specs do not have a namespace. Omit for
- # consistent hashing.
- if not self.concrete or self.namespace:
- d['namespace'] = self.namespace
+ d = syaml_dict()
+
+ if self.versions:
+ d.update(self.versions.to_dict())
+
+ if self.architecture:
+ d.update(self.architecture.to_dict())
if self.compiler:
d.update(self.compiler.to_dict())
- else:
- d['compiler'] = None
- d.update(self.versions.to_dict())
- return { self.name : d }
+ if self.namespace:
+ d['namespace'] = self.namespace
- def to_yaml(self, stream=None):
+ params = syaml_dict(sorted(
+ (name, v.value) for name, v in self.variants.items()))
+ params.update(sorted(self.compiler_flags.items()))
+ if params:
+ d['parameters'] = params
+
+ # TODO: restore build dependencies here once we have less picky
+ # TODO: concretization.
+ deps = self.dependencies_dict(deptype=('link', 'run'))
+ if deps:
+ d['dependencies'] = syaml_dict([
+ (name,
+ syaml_dict([
+ ('hash', dspec.spec.dag_hash()),
+ ('type', sorted(str(s) for s in dspec.deptypes))])
+ ) for name, dspec in sorted(deps.items())
+ ])
+
+ return syaml_dict([(self.name, d)])
+
+ def to_dict(self):
node_list = []
- for s in self.traverse(order='pre'):
+ for s in self.traverse(order='pre', deptype=('link', 'run')):
node = s.to_node_dict()
node[s.name]['hash'] = s.dag_hash()
node_list.append(node)
- return yaml.dump({ 'spec' : node_list },
- stream=stream, default_flow_style=False)
+ return syaml_dict([('spec', node_list)])
+
+ def to_yaml(self, stream=None):
+ return syaml.dump(
+ self.to_dict(), stream=stream, default_flow_style=False)
+
+ def to_json(self, stream=None):
+ return sjson.dump(self.to_dict(), stream)
@staticmethod
def from_node_dict(node):
@@ -709,53 +1230,116 @@ class Spec(object):
spec = Spec(name)
spec.namespace = node.get('namespace', None)
- spec.versions = VersionList.from_dict(node)
- spec.architecture = node['arch']
+ spec._hash = node.get('hash', None)
- if node['compiler'] is None:
- spec.compiler = None
- else:
+ if 'version' in node or 'versions' in node:
+ spec.versions = VersionList.from_dict(node)
+
+ if 'arch' in node:
+ spec.architecture = ArchSpec.from_dict(node)
+
+ if 'compiler' in node:
spec.compiler = CompilerSpec.from_dict(node)
+ else:
+ spec.compiler = None
+
+ if 'parameters' in node:
+ for name, value in node['parameters'].items():
+ if name in _valid_compiler_flags:
+ spec.compiler_flags[name] = value
+ else:
+ spec.variants[name] = VariantSpec(name, value)
+
+ elif 'variants' in node:
+ for name, value in node['variants'].items():
+ spec.variants[name] = VariantSpec(name, value)
+ for name in FlagMap.valid_compiler_flags():
+ spec.compiler_flags[name] = []
- if 'variants' in node:
- for name, enabled in node['variants'].items():
- spec.variants[name] = VariantSpec(name, enabled)
+ # Don't read dependencies here; from_node_dict() is used by
+ # from_yaml() to read the root *and* each dependency spec.
return spec
+ @staticmethod
+ def read_yaml_dep_specs(dependency_dict):
+ """Read the DependencySpec portion of a YAML-formatted Spec.
+
+ This needs to be backward-compatible with older spack spec
+ formats so that reindex will work on old specs/databases.
+ """
+ for dep_name, elt in dependency_dict.items():
+ if isinstance(elt, basestring):
+ # original format, elt is just the dependency hash.
+ dag_hash, deptypes = elt, ['build', 'link']
+ elif isinstance(elt, tuple):
+ # original deptypes format: (used tuples, not future-proof)
+ dag_hash, deptypes = elt
+ elif isinstance(elt, dict):
+ # new format: elements of dependency spec are keyed.
+ dag_hash, deptypes = elt['hash'], elt['type']
+ else:
+ raise SpecError("Couldn't parse dependency types in spec.")
+
+ yield dep_name, dag_hash, list(deptypes)
@staticmethod
- def from_yaml(stream):
+ def from_dict(data):
"""Construct a spec from YAML.
Parameters:
- stream -- string or file object to read from.
-
- TODO: currently discards hashes. Include hashes when they
- represent more than the DAG does.
-
+ data -- a nested dict/list data structure read from YAML or JSON.
"""
- deps = {}
- spec = None
+ nodes = data['spec']
+
+ # Read nodes out of list. Root spec is the first element;
+ # dependencies are the following elements.
+ dep_list = [Spec.from_node_dict(node) for node in nodes]
+ if not dep_list:
+ raise SpecError("YAML spec contains no nodes.")
+ deps = dict((spec.name, spec) for spec in dep_list)
+ spec = dep_list[0]
+
+ for node in nodes:
+ # get dependency dict from the node.
+ name = next(iter(node))
- try:
- yfile = yaml.load(stream)
- except MarkedYAMLError, e:
- raise SpackYAMLError("error parsing YAML spec:", str(e))
+ if 'dependencies' not in node[name]:
+ continue
- for node in yfile['spec']:
- name = next(iter(node))
- dep = Spec.from_node_dict(node)
- if not spec:
- spec = dep
- deps[dep.name] = dep
+ yaml_deps = node[name]['dependencies']
+ for dname, dhash, dtypes in Spec.read_yaml_dep_specs(yaml_deps):
+ # Fill in dependencies by looking them up by name in deps dict
+ deps[name]._dependencies[dname] = DependencySpec(
+ deps[name], deps[dname], dtypes)
- for node in yfile['spec']:
- name = next(iter(node))
- for dep_name in node[name]['dependencies']:
- deps[name].dependencies[dep_name] = deps[dep_name]
return spec
+ @staticmethod
+ def from_yaml(stream):
+ """Construct a spec from YAML.
+
+ Parameters:
+ stream -- string or file object to read from.
+ """
+ try:
+ data = syaml.load(stream)
+ return Spec.from_dict(data)
+ except MarkedYAMLError as e:
+ raise syaml.SpackYAMLError("error parsing YAML spec:", str(e))
+
+ @staticmethod
+ def from_json(stream):
+ """Construct a spec from JSON.
+
+ Parameters:
+ stream -- string or file object to read from.
+ """
+ try:
+ data = sjson.load(stream)
+ return Spec.from_dict(data)
+ except Exception as e:
+ raise sjson.SpackJSONError("error parsing JSON spec:", str(e))
def _concretize_helper(self, presets=None, visited=None):
"""Recursive helper function for concretize().
@@ -763,8 +1347,10 @@ class Spec(object):
concretized, they're added to the presets, and ancestors
will prefer the settings of their children.
"""
- if presets is None: presets = {}
- if visited is None: visited = set()
+ if presets is None:
+ presets = {}
+ if visited is None:
+ visited = set()
if self.name in visited:
return False
@@ -772,56 +1358,42 @@ class Spec(object):
changed = False
# Concretize deps first -- this is a bottom-up process.
- for name in sorted(self.dependencies.keys()):
- changed |= self.dependencies[name]._concretize_helper(presets, visited)
+ for name in sorted(self._dependencies.keys()):
+ changed |= self._dependencies[
+ name].spec._concretize_helper(presets, visited)
if self.name in presets:
changed |= self.constrain(presets[self.name])
-
else:
# Concretize virtual dependencies last. Because they're added
# to presets below, their constraints will all be merged, but we'll
# still need to select a concrete package later.
- changed |= any(
- (spack.concretizer.concretize_architecture(self),
- spack.concretizer.concretize_compiler(self),
- spack.concretizer.concretize_version(self),
- spack.concretizer.concretize_variants(self)))
+ if not self.virtual:
+ changed |= any(
+ (spack.concretizer.concretize_architecture(self),
+ spack.concretizer.concretize_compiler(self),
+ spack.concretizer.concretize_compiler_flags(
+ self), # has to be concretized after compiler
+ spack.concretizer.concretize_version(self),
+ spack.concretizer.concretize_variants(self)))
presets[self.name] = self
visited.add(self.name)
return changed
-
def _replace_with(self, concrete):
"""Replace this virtual spec with a concrete spec."""
assert(self.virtual)
- for name, dependent in self.dependents.items():
+ for name, dep_spec in self._dependents.items():
+ dependent = dep_spec.parent
+ deptypes = dep_spec.deptypes
+
# remove self from all dependents.
- del dependent.dependencies[self.name]
+ del dependent._dependencies[self.name]
# add the replacement, unless it is already a dep of dependent.
- if concrete.name not in dependent.dependencies:
- dependent._add_dependency(concrete)
-
-
- def _replace_node(self, replacement):
- """Replace this spec with another.
-
- Connects all dependents of this spec to its replacement, and
- disconnects this spec from any dependencies it has. New spec
- will have any dependencies the replacement had, and may need
- to be normalized.
-
- """
- for name, dependent in self.dependents.items():
- del dependent.dependencies[self.name]
- dependent._add_dependency(replacement)
-
- for name, dep in self.dependencies.items():
- del dep.dependents[self.name]
- del self.dependencies[dep.name]
-
+ if concrete.name not in dependent._dependencies:
+ dependent._add_dependency(concrete, deptypes)
def _expand_virtual_packages(self):
"""Find virtual packages in this spec, replace them with providers,
@@ -841,12 +1413,14 @@ class Spec(object):
a problem.
"""
# Make an index of stuff this spec already provides
+ # XXX(deptype): 'link' and 'run'?
self_index = ProviderIndex(self.traverse(), restrict=True)
-
changed = False
done = False
+
while not done:
done = True
+ # XXX(deptype): 'link' and 'run'?
for spec in list(self.traverse()):
replacement = None
if spec.virtual:
@@ -855,12 +1429,14 @@ class Spec(object):
# TODO: may break if in-place on self but
# shouldn't happen if root is traversed first.
spec._replace_with(replacement)
- done=False
+ done = False
break
if not replacement:
- # Get a list of possible replacements in order of preference.
- candidates = spack.concretizer.choose_virtual_or_external(spec)
+ # Get a list of possible replacements in order of
+ # preference.
+ candidates = spack.concretizer.choose_virtual_or_external(
+ spec)
# Try the replacements in order, skipping any that cause
# satisfiability problems.
@@ -873,34 +1449,38 @@ class Spec(object):
copy[spec.name]._dup(replacement.copy(deps=False))
try:
- # If there are duplicate providers or duplicate provider
- # deps, consolidate them and merge constraints.
+ # If there are duplicate providers or duplicate
+ # provider deps, consolidate them and merge
+ # constraints.
copy.normalize(force=True)
break
- except SpecError as e:
+ except SpecError:
# On error, we'll try the next replacement.
continue
# If replacement is external then trim the dependencies
- if replacement.external:
- if (spec.dependencies):
+ if replacement.external or replacement.external_module:
+ if (spec._dependencies):
changed = True
- spec.dependencies = DependencyMap()
- replacement.dependencies = DependencyMap()
+ spec._dependencies = DependencyMap()
+ replacement._dependencies = DependencyMap()
+ replacement.architecture = self.architecture
# TODO: could this and the stuff in _dup be cleaned up?
def feq(cfield, sfield):
return (not cfield) or (cfield == sfield)
- if replacement is spec or (feq(replacement.name, spec.name) and
- feq(replacement.versions, spec.versions) and
- feq(replacement.compiler, spec.compiler) and
- feq(replacement.architecture, spec.architecture) and
- feq(replacement.dependencies, spec.dependencies) and
- feq(replacement.variants, spec.variants) and
- feq(replacement.external, spec.external)):
+ if replacement is spec or (
+ feq(replacement.name, spec.name) and
+ feq(replacement.versions, spec.versions) and
+ feq(replacement.compiler, spec.compiler) and
+ feq(replacement.architecture, spec.architecture) and
+ feq(replacement._dependencies, spec._dependencies) and
+ feq(replacement.variants, spec.variants) and
+ feq(replacement.external, spec.external) and
+ feq(replacement.external_module,
+ spec.external_module)):
continue
-
# Refine this spec to the candidate. This uses
# replace_with AND dup so that it can work in
# place. TODO: make this more efficient.
@@ -911,12 +1491,11 @@ class Spec(object):
changed = True
self_index.update(spec)
- done=False
+ done = False
break
return changed
-
def concretize(self):
"""A spec is concrete if it describes one build of a package uniquely.
This will ensure that this spec is concrete.
@@ -925,10 +1504,12 @@ class Spec(object):
of a package, this will add constraints to make it concrete.
Some rigorous validation and checks are also performed on the spec.
- Concretizing ensures that it is self-consistent and that it's consistent
- with requirements of its pacakges. See flatten() and normalize() for
- more details on this.
+ Concretizing ensures that it is self-consistent and that it's
+ consistent with requirements of its pacakges. See flatten() and
+ normalize() for more details on this.
"""
+ if not self.name:
+ raise SpecError("Attempting to concretize anonymous spec")
if self._concrete:
return
@@ -941,9 +1522,9 @@ class Spec(object):
self._expand_virtual_packages(),
self._concretize_helper())
changed = any(changes)
- force=True
+ force = True
- for s in self.traverse():
+ for s in self.traverse(deptype_query=alldeps):
# After concretizing, assign namespaces to anything left.
# Note that this doesn't count as a "change". The repository
# configuration is constant throughout a spack run, and
@@ -955,20 +1536,27 @@ class Spec(object):
if s.namespace is None:
s.namespace = spack.repo.repo_for_pkg(s.name).namespace
+ for s in self.traverse(root=False):
+ if s.external_module:
+ compiler = spack.compilers.compiler_for_spec(
+ s.compiler, s.architecture)
+ for mod in compiler.modules:
+ load_module(mod)
+
+ s.external = get_path_from_module(s.external_module)
+
# Mark everything in the spec as concrete, as well.
self._mark_concrete()
-
- def _mark_concrete(self):
+ def _mark_concrete(self, value=True):
"""Mark this spec and its dependencies as concrete.
Only for internal use -- client code should use "concretize"
unless there is a need to force a spec to be concrete.
"""
- for s in self.traverse():
- s._normal = True
- s._concrete = True
-
+ for s in self.traverse(deptype_query=alldeps):
+ s._normal = value
+ s._concrete = value
def concretized(self):
"""This is a non-destructive version of concretize(). First clones,
@@ -978,7 +1566,6 @@ class Spec(object):
clone.concretize()
return clone
-
def flat_dependencies(self, **kwargs):
"""Return a DependencyMap containing all of this spec's
dependencies with their constraints merged.
@@ -990,51 +1577,43 @@ class Spec(object):
returns them.
"""
copy = kwargs.get('copy', True)
+ deptype_query = kwargs.get('deptype_query')
- flat_deps = DependencyMap()
+ flat_deps = {}
try:
- for spec in self.traverse(root=False):
+ deptree = self.traverse(root=False, deptype_query=deptype_query)
+ for spec in deptree:
+
if spec.name not in flat_deps:
if copy:
- flat_deps[spec.name] = spec.copy(deps=False)
- else:
- flat_deps[spec.name] = spec
+ spec = spec.copy(deps=False)
+ flat_deps[spec.name] = spec
else:
flat_deps[spec.name].constrain(spec)
if not copy:
- for dep in flat_deps.values():
- dep.dependencies.clear()
- dep.dependents.clear()
- self.dependencies.clear()
+ for spec in flat_deps.values():
+ spec._dependencies.clear()
+ spec._dependents.clear()
+ self._dependencies.clear()
return flat_deps
- except UnsatisfiableSpecError, e:
+ except UnsatisfiableSpecError as e:
# Here, the DAG contains two instances of the same package
# with inconsistent constraints. Users cannot produce
# inconsistent specs like this on the command line: the
# parser doesn't allow it. Spack must be broken!
raise InconsistentSpecError("Invalid Spec DAG: %s" % e.message)
-
- def index(self):
+ def index(self, deptype=None):
"""Return DependencyMap that points to all the dependencies in this
spec."""
dm = DependencyMap()
- for spec in self.traverse():
+ for spec in self.traverse(deptype=deptype):
dm[spec.name] = spec
return dm
-
- def flatten(self):
- """Pull all dependencies up to the root (this spec).
- Merge constraints for dependencies with the same name, and if they
- conflict, throw an exception. """
- for dep in self.flat_dependencies(copy=False):
- self._add_dependency(dep)
-
-
def _evaluate_dependency_conditions(self, name):
"""Evaluate all the conditions on a dependency with this name.
@@ -1054,13 +1633,12 @@ class Spec(object):
dep = Spec(name)
try:
dep.constrain(dep_spec)
- except UnsatisfiableSpecError, e:
- e.message = ("Conflicting conditional dependencies on package "
- "%s for spec %s" % (self.name, self))
+ except UnsatisfiableSpecError as e:
+ e.message = ("Conflicting conditional dependencies on"
+ "package %s for spec %s" % (self.name, self))
raise e
return dep
-
def _find_provider(self, vdep, provider_index):
"""Find provider for a virtual spec in the provider index.
Raise an exception if there is a conflicting virtual
@@ -1072,7 +1650,8 @@ class Spec(object):
# If there is a provider for the vpkg, then use that instead of
# the virtual package.
if providers:
- # Remove duplicate providers that can concretize to the same result.
+ # Remove duplicate providers that can concretize to the same
+ # result.
for provider in providers:
for spec in providers:
if spec is not provider and provider.satisfies(spec):
@@ -1091,11 +1670,11 @@ class Spec(object):
elif required:
raise UnsatisfiableProviderSpecError(required[0], vdep)
-
- def _merge_dependency(self, dep, visited, spec_deps, provider_index):
+ def _merge_dependency(self, dep, deptypes, visited, spec_deps,
+ provider_index):
"""Merge the dependency into this spec.
- This is the core of the normalize() method. There are a few basic steps:
+ This is the core of normalize(). There are some basic steps:
* If dep is virtual, evaluate whether it corresponds to an
existing concrete dependency, and merge if so.
@@ -1135,27 +1714,34 @@ class Spec(object):
if dep.name not in spec_deps:
spec_deps[dep.name] = dep.copy()
changed = True
+ else:
+ dspec = spec_deps[dep.name]
+ if self.name not in dspec._dependents:
+ self._add_dependency(dspec, deptypes)
+ else:
+ dependent = dspec._dependents[self.name]
+ changed = dependent.update_deptypes(deptypes)
# Constrain package information with spec info
try:
changed |= spec_deps[dep.name].constrain(dep)
- except UnsatisfiableSpecError, e:
- e.message = "Invalid spec: '%s'. "
+ except UnsatisfiableSpecError as e:
+ e.message = "Invalid spec: '%s'. "
e.message += "Package %s requires %s %s, but spec asked for %s"
- e.message %= (spec_deps[dep.name], dep.name, e.constraint_type,
- e.required, e.provided)
+ e.message %= (spec_deps[dep.name], dep.name,
+ e.constraint_type, e.required, e.provided)
raise e
# Add merged spec to my deps and recurse
dependency = spec_deps[dep.name]
- if dep.name not in self.dependencies:
- self._add_dependency(dependency)
+ if dep.name not in self._dependencies:
+ self._add_dependency(dependency, deptypes)
- changed |= dependency._normalize_helper(visited, spec_deps, provider_index)
+ changed |= dependency._normalize_helper(
+ visited, spec_deps, provider_index)
return changed
-
def _normalize_helper(self, visited, spec_deps, provider_index):
"""Recursive helper function for _normalize."""
if self.name in visited:
@@ -1164,7 +1750,7 @@ class Spec(object):
# if we descend into a virtual spec, there's nothing more
# to normalize. Concretize will finish resolving it later.
- if self.virtual or self.external:
+ if self.virtual or self.external or self.external_module:
return False
# Combine constraints from package deps with constraints from
@@ -1178,78 +1764,85 @@ class Spec(object):
for dep_name in pkg.dependencies:
# Do we depend on dep_name? If so pkg_dep is not None.
pkg_dep = self._evaluate_dependency_conditions(dep_name)
-
+ deptypes = pkg.dependency_types[dep_name]
# If pkg_dep is a dependency, merge it.
if pkg_dep:
changed |= self._merge_dependency(
- pkg_dep, visited, spec_deps, provider_index)
+ pkg_dep, deptypes, visited, spec_deps, provider_index)
any_change |= changed
return any_change
-
def normalize(self, force=False):
"""When specs are parsed, any dependencies specified are hanging off
the root, and ONLY the ones that were explicitly provided are there.
Normalization turns a partial flat spec into a DAG, where:
1. Known dependencies of the root package are in the DAG.
- 2. Each node's dependencies dict only contains its known direct deps.
+ 2. Each node's dependencies dict only contains its known direct
+ deps.
3. There is only ONE unique spec for each package in the DAG.
* This includes virtual packages. If there a non-virtual
package that provides a virtual package that is in the spec,
then we replace the virtual package with the non-virtual one.
- TODO: normalize should probably implement some form of cycle detection,
- to ensure that the spec is actually a DAG.
-
+ TODO: normalize should probably implement some form of cycle
+ detection, to ensure that the spec is actually a DAG.
"""
+ if not self.name:
+ raise SpecError("Attempting to normalize anonymous spec")
+
if self._normal and not force:
return False
+ # avoid any assumptions about concreteness when forced
+ if force:
+ self._mark_concrete(False)
+
# Ensure first that all packages & compilers in the DAG exist.
self.validate_names()
-
# Get all the dependencies into one DependencyMap
- spec_deps = self.flat_dependencies(copy=False)
+ spec_deps = self.flat_dependencies(copy=False, deptype_query=alldeps)
# Initialize index of virtual dependency providers if
# concretize didn't pass us one already
- provider_index = ProviderIndex(spec_deps.values(), restrict=True)
+ provider_index = ProviderIndex(
+ [s for s in spec_deps.values()], restrict=True)
# traverse the package DAG and fill out dependencies according
# to package files & their 'when' specs
visited = set()
+
any_change = self._normalize_helper(visited, spec_deps, provider_index)
# If there are deps specified but not visited, they're not
# actually deps of this package. Raise an error.
extra = set(spec_deps.keys()).difference(visited)
if extra:
- raise InvalidDependencyException(
+ raise InvalidDependencyError(
self.name + " does not depend on " + comma_or(extra))
# Mark the spec as normal once done.
self._normal = True
return any_change
-
def normalized(self):
- """Return a normalized copy of this spec without modifying this spec."""
+ """
+ Return a normalized copy of this spec without modifying this spec.
+ """
clone = self.copy()
clone.normalize()
return clone
-
def validate_names(self):
"""This checks that names of packages and compilers in this spec are real.
If they're not, it will raise either UnknownPackageError or
UnsupportedCompilerError.
"""
for spec in self.traverse():
- # Don't get a package for a virtual name.
- if not spec.virtual:
+ # raise an UnknownPackageError if the spec's package isn't real.
+ if (not spec.virtual) and spec.name:
spack.repo.get(spec.fullname)
# validate compiler in addition to the package name.
@@ -1262,7 +1855,6 @@ class Spec(object):
if vname not in spec.package_class.variants:
raise UnknownVariantError(spec.name, vname)
-
def constrain(self, other, deps=True):
"""Merge the constraints of other with self.
@@ -1270,26 +1862,37 @@ class Spec(object):
"""
other = self._autospec(other)
- if not self.name == other.name:
+ if not (self.name == other.name or
+ (not self.name) or
+ (not other.name)):
raise UnsatisfiableSpecNameError(self.name, other.name)
- if other.namespace is not None:
- if self.namespace is not None and other.namespace != self.namespace:
- raise UnsatisfiableSpecNameError(self.fullname, other.fullname)
+ if (other.namespace is not None and
+ self.namespace is not None and
+ other.namespace != self.namespace):
+ raise UnsatisfiableSpecNameError(self.fullname, other.fullname)
if not self.versions.overlaps(other.versions):
raise UnsatisfiableVersionSpecError(self.versions, other.versions)
for v in other.variants:
if (v in self.variants and
- self.variants[v].enabled != other.variants[v].enabled):
+ self.variants[v].value != other.variants[v].value):
raise UnsatisfiableVariantSpecError(self.variants[v],
other.variants[v])
- if self.architecture is not None and other.architecture is not None:
- if self.architecture != other.architecture:
- raise UnsatisfiableArchitectureSpecError(self.architecture,
- other.architecture)
+ # TODO: Check out the logic here
+ sarch, oarch = self.architecture, other.architecture
+ if sarch is not None and oarch is not None:
+ if sarch.platform is not None and oarch.platform is not None:
+ if sarch.platform != oarch.platform:
+ raise UnsatisfiableArchitectureSpecError(sarch, oarch)
+ if sarch.platform_os is not None and oarch.platform_os is not None:
+ if sarch.platform_os != oarch.platform_os:
+ raise UnsatisfiableArchitectureSpecError(sarch, oarch)
+ if sarch.target is not None and oarch.target is not None:
+ if sarch.target != oarch.target:
+ raise UnsatisfiableArchitectureSpecError(sarch, oarch)
changed = False
if self.compiler is not None and other.compiler is not None:
@@ -1301,21 +1904,31 @@ class Spec(object):
changed |= self.versions.intersect(other.versions)
changed |= self.variants.constrain(other.variants)
- old = self.architecture
- self.architecture = self.architecture or other.architecture
- changed |= (self.architecture != old)
+ changed |= self.compiler_flags.constrain(other.compiler_flags)
+
+ old = str(self.architecture)
+ sarch, oarch = self.architecture, other.architecture
+ if sarch is None or other.architecture is None:
+ self.architecture = sarch or oarch
+ else:
+ if sarch.platform is None or oarch.platform is None:
+ self.architecture.platform = sarch.platform or oarch.platform
+ if sarch.platform_os is None or oarch.platform_os is None:
+ sarch.platform_os = sarch.platform_os or oarch.platform_os
+ if sarch.target is None or oarch.target is None:
+ sarch.target = sarch.target or oarch.target
+ changed |= (str(self.architecture) != old)
if deps:
changed |= self._constrain_dependencies(other)
return changed
-
def _constrain_dependencies(self, other):
"""Apply constraints of other spec's dependencies to this spec."""
other = self._autospec(other)
- if not self.dependencies or not other.dependencies:
+ if not self._dependencies or not other._dependencies:
return False
# TODO: might want more detail than this, e.g. specific deps
@@ -1329,31 +1942,31 @@ class Spec(object):
for name in self.common_dependencies(other):
changed |= self[name].constrain(other[name], deps=False)
-
# Update with additional constraints from other spec
for name in other.dep_difference(self):
- self._add_dependency(other[name].copy())
+ dep_spec_copy = other.get_dependency(name)
+ dep_copy = dep_spec_copy.spec
+ deptypes = dep_spec_copy.deptypes
+ self._add_dependency(dep_copy.copy(), deptypes)
changed = True
return changed
-
def common_dependencies(self, other):
"""Return names of dependencies that self an other have in common."""
+ # XXX(deptype): handle deptypes via deptype kwarg.
common = set(
s.name for s in self.traverse(root=False))
common.intersection_update(
s.name for s in other.traverse(root=False))
return common
-
def constrained(self, other, deps=True):
"""Return a constrained copy without modifying this spec."""
clone = self.copy(deps=deps)
clone.constrain(other, deps)
return clone
-
def dep_difference(self, other):
"""Returns dependencies in self that are not in other."""
mine = set(s.name for s in self.traverse(root=False))
@@ -1361,21 +1974,24 @@ class Spec(object):
s.name for s in other.traverse(root=False))
return mine
-
def _autospec(self, spec_like):
- """Used to convert arguments to specs. If spec_like is a spec, returns it.
- If it's a string, tries to parse a string. If that fails, tries to parse
- a local spec from it (i.e. name is assumed to be self's name).
+ """
+ Used to convert arguments to specs. If spec_like is a spec, returns
+ it. If it's a string, tries to parse a string. If that fails, tries
+ to parse a local spec from it (i.e. name is assumed to be self's name).
"""
if isinstance(spec_like, spack.spec.Spec):
return spec_like
try:
- return spack.spec.Spec(spec_like)
+ spec = spack.spec.Spec(spec_like)
+ if not spec.name:
+ raise SpecError(
+ "anonymous package -- this will always be handled")
+ return spec
except SpecError:
return parse_anonymous_spec(spec_like, self.name)
-
def satisfies(self, other, deps=True, strict=False):
"""Determine if this spec satisfies all constraints of another.
@@ -1391,25 +2007,30 @@ class Spec(object):
"""
other = self._autospec(other)
+ # The only way to satisfy a concrete spec is to match its hash exactly.
+ if other._concrete:
+ return self._concrete and self.dag_hash() == other.dag_hash()
+
# A concrete provider can satisfy a virtual dependency.
if not self.virtual and other.virtual:
pkg = spack.repo.get(self.fullname)
if pkg.provides(other.name):
- for provided, when_spec in pkg.provided.items():
- if self.satisfies(when_spec, deps=False, strict=strict):
+ for provided, when_specs in pkg.provided.items():
+ if any(self.satisfies(when_spec, deps=False, strict=strict)
+ for when_spec in when_specs):
if provided.satisfies(other):
return True
return False
# Otherwise, first thing we care about is whether the name matches
- if self.name != other.name:
+ if self.name != other.name and self.name and other.name:
return False
# namespaces either match, or other doesn't require one.
- if other.namespace is not None:
- if self.namespace is not None and self.namespace != other.namespace:
- return False
-
+ if (other.namespace is not None and
+ self.namespace is not None and
+ self.namespace != other.namespace):
+ return False
if self.versions and other.versions:
if not self.versions.satisfies(other.versions, strict=strict):
return False
@@ -1423,37 +2044,52 @@ class Spec(object):
elif strict and (other.compiler and not self.compiler):
return False
- if not self.variants.satisfies(other.variants, strict=strict):
+ var_strict = strict
+ if (not self.name) or (not other.name):
+ var_strict = True
+ if not self.variants.satisfies(other.variants, strict=var_strict):
return False
# Architecture satisfaction is currently just string equality.
# If not strict, None means unconstrained.
if self.architecture and other.architecture:
- if self.architecture != other.architecture:
+ if not self.architecture.satisfies(other.architecture, strict):
return False
elif strict and (other.architecture and not self.architecture):
return False
+ if not self.compiler_flags.satisfies(
+ other.compiler_flags,
+ strict=strict):
+ return False
+
# If we need to descend into dependencies, do it, otherwise we're done.
if deps:
- return self.satisfies_dependencies(other, strict=strict)
+ deps_strict = strict
+ if not (self.name and other.name):
+ deps_strict = True
+ return self.satisfies_dependencies(other, strict=deps_strict)
else:
return True
-
def satisfies_dependencies(self, other, strict=False):
- """This checks constraints on common dependencies against each other."""
+ """
+ This checks constraints on common dependencies against each other.
+ """
other = self._autospec(other)
if strict:
- if other.dependencies and not self.dependencies:
+ if other._dependencies and not self._dependencies:
return False
- if not all(dep in self.dependencies for dep in other.dependencies):
+ alldeps = set(d.name for d in self.traverse(root=False))
+ if not all(dep.name in alldeps
+ for dep in other.traverse(root=False)):
return False
- elif not self.dependencies or not other.dependencies:
- # if either spec doesn't restrict dependencies then both are compatible.
+ elif not self._dependencies or not other._dependencies:
+ # if either spec doesn't restrict dependencies then both are
+ # compatible.
return True
# Handle first-order constraints directly
@@ -1469,11 +2105,12 @@ class Spec(object):
if not self_index.satisfies(other_index):
return False
- # These two loops handle cases where there is an overly restrictive vpkg
- # in one spec for a provider in the other (e.g., mpi@3: is not compatible
- # with mpich2)
+ # These two loops handle cases where there is an overly restrictive
+ # vpkg in one spec for a provider in the other (e.g., mpi@3: is not
+ # compatible with mpich2)
for spec in self.virtual_dependencies():
- if spec.name in other_index and not other_index.providers_for(spec):
+ if (spec.name in other_index and
+ not other_index.providers_for(spec)):
return False
for spec in other.virtual_dependencies():
@@ -1482,13 +2119,11 @@ class Spec(object):
return True
-
def virtual_dependencies(self):
"""Return list of any virtual deps in this spec."""
return [spec for spec in self.traverse() if spec.virtual]
-
- def _dup(self, other, **kwargs):
+ def _dup(self, other, deps=True, cleardeps=True):
"""Copy the spec other into self. This is an overwriting
copy. It does not copy any dependents (parents), but by default
copies dependencies.
@@ -1497,67 +2132,107 @@ class Spec(object):
Options:
dependencies[=True]
- Whether deps should be copied too. Set to false to copy a
+ Whether deps should be copied too. Set to False to copy a
spec but not its dependencies.
"""
# We don't count dependencies as changes here
changed = True
if hasattr(self, 'name'):
- changed = (self.name != other.name and self.versions != other.versions and
- self.architecture != other.architecture and self.compiler != other.compiler and
- self.variants != other.variants and self._normal != other._normal and
- self.concrete != other.concrete and self.external != other.external)
+ changed = (self.name != other.name and
+ self.versions != other.versions and
+ self.architecture != other.architecture and
+ self.compiler != other.compiler and
+ self.variants != other.variants and
+ self._normal != other._normal and
+ self.concrete != other.concrete and
+ self.external != other.external and
+ self.external_module != other.external_module and
+ self.compiler_flags != other.compiler_flags)
# Local node attributes get copied first.
self.name = other.name
self.versions = other.versions.copy()
- self.architecture = other.architecture
+ self.architecture = other.architecture.copy() if other.architecture \
+ else None
self.compiler = other.compiler.copy() if other.compiler else None
- if kwargs.get('cleardeps', True):
- self.dependents = DependencyMap()
- self.dependencies = DependencyMap()
+ if cleardeps:
+ self._dependents = DependencyMap()
+ self._dependencies = DependencyMap()
+ self.compiler_flags = other.compiler_flags.copy()
self.variants = other.variants.copy()
self.variants.spec = self
self.external = other.external
+ self.external_module = other.external_module
self.namespace = other.namespace
- # If we copy dependencies, preserve DAG structure in the new spec
- if kwargs.get('deps', True):
- # This copies the deps from other using _dup(deps=False)
- new_nodes = other.flat_dependencies()
- new_nodes[self.name] = self
-
- # Hook everything up properly here by traversing.
- for spec in other.traverse(cover='nodes'):
- parent = new_nodes[spec.name]
- for child in spec.dependencies:
- if child not in parent.dependencies:
- parent._add_dependency(new_nodes[child])
-
- # Since we preserved structure, we can copy _normal safely.
- self._normal = other._normal
- self._concrete = other._concrete
self.external = other.external
+ self.external_module = other.external_module
+
+ # If we copy dependencies, preserve DAG structure in the new spec
+ if deps:
+ deptypes = alldeps # by default copy all deptypes
+
+ # if caller restricted deptypes to be copied, adjust that here.
+ if isinstance(deps, (tuple, list)):
+ deptypes = deps
+
+ self._dup_deps(other, deptypes)
+
+ # These fields are all cached results of expensive operations.
+ # If we preserved the original structure, we can copy them
+ # safely. If not, they need to be recomputed.
+ if deps is True or deps == alldeps:
+ self._hash = other._hash
+ self._cmp_key_cache = other._cmp_key_cache
+ self._normal = other._normal
+ self._concrete = other._concrete
+ else:
+ self._hash = None
+ self._cmp_key_cache = None
+ self._normal = False
+ self._concrete = False
+
return changed
+ def _dup_deps(self, other, deptypes):
+ new_specs = {self.name: self}
+ for dspec in other.traverse_edges(cover='edges', root=False):
+ if (dspec.deptypes and
+ not any(d in deptypes for d in dspec.deptypes)):
+ continue
+
+ if dspec.parent.name not in new_specs:
+ new_specs[dspec.parent.name] = dspec.parent.copy(deps=False)
+ if dspec.spec.name not in new_specs:
+ new_specs[dspec.spec.name] = dspec.spec.copy(deps=False)
- def copy(self, **kwargs):
+ new_specs[dspec.parent.name]._add_dependency(
+ new_specs[dspec.spec.name], dspec.deptypes)
+
+ def copy(self, deps=True):
"""Return a copy of this spec.
- By default, returns a deep copy. Supply dependencies=False
- to get a shallow copy.
+
+ By default, returns a deep copy. To control how dependencies are
+ copied, supply:
+
+ deps=True: deep copy
+
+ deps=False: shallow copy (no dependencies)
+
+ deps=('link', 'build'):
+ only build and link dependencies. Similar for other deptypes.
+
"""
clone = Spec.__new__(Spec)
- clone._dup(self, **kwargs)
+ clone._dup(self, deps=deps)
return clone
-
@property
def version(self):
if not self.versions.concrete:
raise SpecError("Spec version is not concrete: " + str(self))
return self.versions[0]
-
def __getitem__(self, name):
"""Get a dependency from the spec by its name."""
for spec in self.traverse():
@@ -1576,9 +2251,8 @@ class Spec(object):
raise KeyError("No spec with name %s in %s" % (name, self))
-
def __contains__(self, spec):
- """True if this spec satisfis the provided spec, or if any dependency
+ """True if this spec satisfies the provided spec, or if any dependency
does. If the spec has no name, then we parse this one first.
"""
spec = self._autospec(spec)
@@ -1588,14 +2262,12 @@ class Spec(object):
return False
-
def sorted_deps(self):
"""Return a list of all dependencies sorted by name."""
deps = self.flat_dependencies()
return tuple(deps[name] for name in sorted(deps))
-
- def _eq_dag(self, other, vs, vo):
+ def _eq_dag(self, other, vs, vo, deptypes):
"""Recursive helper for eq_dag and ne_dag. Does the actual DAG
traversal."""
vs.add(id(self))
@@ -1604,38 +2276,43 @@ class Spec(object):
if self.ne_node(other):
return False
- if len(self.dependencies) != len(other.dependencies):
+ if len(self._dependencies) != len(other._dependencies):
return False
- ssorted = [self.dependencies[name] for name in sorted(self.dependencies)]
- osorted = [other.dependencies[name] for name in sorted(other.dependencies)]
+ ssorted = [self._dependencies[name]
+ for name in sorted(self._dependencies)]
+ osorted = [other._dependencies[name]
+ for name in sorted(other._dependencies)]
- for s, o in zip(ssorted, osorted):
+ for s_dspec, o_dspec in zip(ssorted, osorted):
+ if deptypes and s_dspec.deptypes != o_dspec.deptypes:
+ return False
+
+ s, o = s_dspec.spec, o_dspec.spec
visited_s = id(s) in vs
visited_o = id(o) in vo
# Check for duplicate or non-equal dependencies
- if visited_s != visited_o: return False
+ if visited_s != visited_o:
+ return False
# Skip visited nodes
- if visited_s or visited_o: continue
+ if visited_s or visited_o:
+ continue
# Recursive check for equality
- if not s._eq_dag(o, vs, vo):
+ if not s._eq_dag(o, vs, vo, deptypes):
return False
return True
+ def eq_dag(self, other, deptypes=True):
+ """True if the full dependency DAGs of specs are equal."""
+ return self._eq_dag(other, set(), set(), deptypes)
- def eq_dag(self, other):
- """True if the full dependency DAGs of specs are equal"""
- return self._eq_dag(other, set(), set())
-
-
- def ne_dag(self, other):
- """True if the full dependency DAGs of specs are not equal"""
- return not self.eq_dag(other)
-
+ def ne_dag(self, other, deptypes=True):
+ """True if the full dependency DAGs of specs are not equal."""
+ return not self.eq_dag(other, set(), set(), deptypes)
def _cmp_node(self):
"""Comparison key for just *this node* and not its deps."""
@@ -1644,19 +2321,17 @@ class Spec(object):
self.versions,
self.variants,
self.architecture,
- self.compiler)
-
+ self.compiler,
+ self.compiler_flags)
def eq_node(self, other):
"""Equality with another spec, not including dependencies."""
return self._cmp_node() == other._cmp_node()
-
def ne_node(self, other):
"""Inequality with another spec, not including dependencies."""
return self._cmp_node() != other._cmp_node()
-
def _cmp_key(self):
"""This returns a key for the spec *including* DAG structure.
@@ -1664,56 +2339,73 @@ class Spec(object):
1. A tuple describing this node in the DAG.
2. The hash of each of this node's dependencies' cmp_keys.
"""
- return self._cmp_node() + (
- tuple(hash(self.dependencies[name])
- for name in sorted(self.dependencies)),)
+ if self._cmp_key_cache:
+ return self._cmp_key_cache
+ dep_tuple = tuple(
+ (d.spec.name, hash(d.spec), tuple(sorted(d.deptypes)))
+ for name, d in sorted(self._dependencies.items()))
+
+ key = (self._cmp_node(), dep_tuple)
+ if self._concrete:
+ self._cmp_key_cache = key
+ return key
def colorized(self):
return colorize_spec(self)
-
- def format(self, format_string='$_$@$%@$+$=', **kwargs):
- """Prints out particular pieces of a spec, depending on what is
- in the format string. The format strings you can provide are::
-
- $_ Package name
- $. Full package name (with namespace)
- $@ Version with '@' prefix
- $% Compiler with '%' prefix
- $%@ Compiler with '%' prefix & compiler version with '@' prefix
- $+ Options
- $= Architecture with '=' prefix
- $# 7-char prefix of DAG hash with '-' prefix
- $$ $
-
- You can also use full-string versions, which leave off the prefixes:
-
- ${PACKAGE} Package name
- ${VERSION} Version
- ${COMPILER} Full compiler string
- ${COMPILERNAME} Compiler name
- ${COMPILERVER} Compiler version
- ${OPTIONS} Options
- ${ARCHITECTURE} Architecture
- ${SHA1} Dependencies 8-char sha1 prefix
-
- ${SPACK_ROOT} The spack root directory
- ${SPACK_INSTALL} The default spack install directory, ${SPACK_PREFIX}/opt
-
- Optionally you can provide a width, e.g. $20_ for a 20-wide name.
- Like printf, you can provide '-' for left justification, e.g.
- $-20_ for a left-justified name.
-
- Anything else is copied verbatim into the output stream.
-
- *Example:* ``$_$@$+`` translates to the name, version, and options
- of the package, but no dependencies, arch, or compiler.
-
- TODO: allow, e.g., $6# to customize short hash length
- TODO: allow, e.g., $## for full hash.
- """
- color = kwargs.get('color', False)
+ def format(self, format_string='$_$@$%@+$+$=', **kwargs):
+ """
+ Prints out particular pieces of a spec, depending on what is
+ in the format string. The format strings you can provide are::
+
+ $_ Package name
+ $. Full package name (with namespace)
+ $@ Version with '@' prefix
+ $% Compiler with '%' prefix
+ $%@ Compiler with '%' prefix & compiler version with '@' prefix
+ $%+ Compiler with '%' prefix & compiler flags prefixed by name
+ $%@+ Compiler, compiler version, and compiler flags with same
+ prefixes as above
+ $+ Options
+ $= Architecture prefixed by 'arch='
+ $# 7-char prefix of DAG hash with '-' prefix
+ $$ $
+
+ You can also use full-string versions, which elide the prefixes::
+
+ ${PACKAGE} Package name
+ ${VERSION} Version
+ ${COMPILER} Full compiler string
+ ${COMPILERNAME} Compiler name
+ ${COMPILERVER} Compiler version
+ ${COMPILERFLAGS} Compiler flags
+ ${OPTIONS} Options
+ ${ARCHITECTURE} Architecture
+ ${SHA1} Dependencies 8-char sha1 prefix
+ ${HASH:len} DAG hash with optional length specifier
+
+ ${SPACK_ROOT} The spack root directory
+ ${SPACK_INSTALL} The default spack install directory,
+ ${SPACK_PREFIX}/opt
+ ${PREFIX} The package prefix
+
+ Note these are case-insensitive: for example you can specify either
+ ``${PACKAGE}`` or ``${package}``.
+
+ Optionally you can provide a width, e.g. ``$20_`` for a 20-wide name.
+ Like printf, you can provide '-' for left justification, e.g.
+ ``$-20_`` for a left-justified name.
+
+ Anything else is copied verbatim into the output stream.
+
+ *Example:* ``$_$@$+`` translates to the name, version, and options
+ of the package, but no dependencies, arch, or compiler.
+
+ TODO: allow, e.g., ``$6#`` to customize short hash length
+ TODO: allow, e.g., ``$##`` for full hash.
+ """
+ color = kwargs.get('color', False)
length = len(format_string)
out = StringIO()
named = escape = compiler = False
@@ -1740,7 +2432,8 @@ class Spec(object):
fmt += 's'
if c == '_':
- out.write(fmt % self.name)
+ name = self.name if self.name else ''
+ out.write(fmt % name)
elif c == '.':
out.write(fmt % self.fullname)
elif c == '@':
@@ -1754,8 +2447,9 @@ class Spec(object):
if self.variants:
write(fmt % str(self.variants), c)
elif c == '=':
- if self.architecture:
- write(fmt % (c + str(self.architecture)), c)
+ if self.architecture and str(self.architecture):
+ a_str = ' arch' + c + str(self.architecture) + ' '
+ write(fmt % (a_str), c)
elif c == '#':
out.write('-' + fmt % (self.dag_hash(7)))
elif c == '$':
@@ -1770,22 +2464,29 @@ class Spec(object):
elif compiler:
if c == '@':
if (self.compiler and self.compiler.versions and
- self.compiler.versions != _any_version):
+ self.compiler.versions != _any_version):
write(c + str(self.compiler.versions), '%')
+ elif c == '+':
+ if self.compiler_flags:
+ write(fmt % str(self.compiler_flags), '%')
+ compiler = False
elif c == '$':
escape = True
+ compiler = False
else:
out.write(c)
- compiler = False
+ compiler = False
elif named:
if not c == '}':
if i == length - 1:
- raise ValueError("Error: unterminated ${ in format: '%s'"
- % format_string)
+ raise ValueError("Error: unterminated ${ in format:"
+ "'%s'" % format_string)
named_str += c
- continue;
+ continue
+ named_str = named_str.upper()
if named_str == 'PACKAGE':
+ name = self.name if self.name else ''
write(fmt % self.name, '@')
if named_str == 'VERSION':
if self.versions and self.versions != _any_version:
@@ -1796,22 +2497,34 @@ class Spec(object):
elif named_str == 'COMPILERNAME':
if self.compiler:
write(fmt % self.compiler.name, '%')
- elif named_str == 'COMPILERVER':
+ elif named_str in ['COMPILERVER', 'COMPILERVERSION']:
if self.compiler:
write(fmt % self.compiler.versions, '%')
+ elif named_str == 'COMPILERFLAGS':
+ if self.compiler:
+ write(fmt % str(self.compiler_flags), '%')
elif named_str == 'OPTIONS':
if self.variants:
write(fmt % str(self.variants), '+')
elif named_str == 'ARCHITECTURE':
- if self.architecture:
- write(fmt % str(self.architecture), '=')
+ if self.architecture and str(self.architecture):
+ write(fmt % str(self.architecture) + ' ', ' arch=')
elif named_str == 'SHA1':
if self.dependencies:
out.write(fmt % str(self.dag_hash(7)))
elif named_str == 'SPACK_ROOT':
out.write(fmt % spack.prefix)
elif named_str == 'SPACK_INSTALL':
- out.write(fmt % spack.install_path)
+ out.write(fmt % spack.store.root)
+ elif named_str == 'PREFIX':
+ out.write(fmt % self.prefix)
+ elif named_str.startswith('HASH'):
+ if named_str.startswith('HASH:'):
+ _, hashlen = named_str.split(':')
+ hashlen = int(hashlen)
+ else:
+ hashlen = None
+ out.write(fmt % (self.dag_hash(hashlen)))
named = False
@@ -1826,48 +2539,113 @@ class Spec(object):
result = out.getvalue()
return result
-
def dep_string(self):
return ''.join("^" + dep.format() for dep in self.sorted_deps())
+ def __cmp__(self, other):
+ from package_prefs import pkgsort
+
+ # Package name sort order is not configurable, always goes alphabetical
+ if self.name != other.name:
+ return cmp(self.name, other.name)
+
+ # Package version is second in compare order
+ pkgname = self.name
+ if self.versions != other.versions:
+ return pkgsort().version_compare(
+ pkgname, self.versions, other.versions)
+
+ # Compiler is third
+ if self.compiler != other.compiler:
+ return pkgsort().compiler_compare(
+ pkgname, self.compiler, other.compiler)
+
+ # Variants
+ if self.variants != other.variants:
+ return pkgsort().variant_compare(
+ pkgname, self.variants, other.variants)
+
+ # Target
+ if self.architecture != other.architecture:
+ return pkgsort().architecture_compare(
+ pkgname, self.architecture, other.architecture)
+
+ # Dependency is not configurable
+ if self._dependencies != other._dependencies:
+ return -1 if self._dependencies < other._dependencies else 1
+
+ # Equal specs
+ return 0
def __str__(self):
- return self.format() + self.dep_string()
+ ret = self.format() + self.dep_string()
+ return ret.strip()
+ def _install_status(self):
+ """Helper for tree to print DB install status."""
+ if not self.concrete:
+ return None
+ try:
+ record = spack.store.db.get_record(self)
+ return record.installed
+ except KeyError:
+ return None
def tree(self, **kwargs):
"""Prints out this spec and its dependencies, tree-formatted
with indentation."""
- color = kwargs.pop('color', False)
- depth = kwargs.pop('depth', False)
- showid = kwargs.pop('ids', False)
- cover = kwargs.pop('cover', 'nodes')
+ color = kwargs.pop('color', False)
+ depth = kwargs.pop('depth', False)
+ hashes = kwargs.pop('hashes', False)
+ hlen = kwargs.pop('hashlen', None)
+ install_status = kwargs.pop('install_status', False)
+ cover = kwargs.pop('cover', 'nodes')
indent = kwargs.pop('indent', 0)
- fmt = kwargs.pop('format', '$_$@$%@$+$=')
+ fmt = kwargs.pop('format', '$_$@$%@+$+$=')
prefix = kwargs.pop('prefix', None)
+ show_types = kwargs.pop('show_types', False)
+ deptypes = kwargs.pop('deptypes', ('build', 'link'))
check_kwargs(kwargs, self.tree)
out = ""
- cur_id = 0
- ids = {}
- for d, node in self.traverse(order='pre', cover=cover, depth=True):
+ for d, dep_spec in self.traverse_edges(
+ order='pre', cover=cover, depth=True, deptypes=deptypes):
+ node = dep_spec.spec
+
if prefix is not None:
out += prefix(node)
out += " " * indent
+
if depth:
out += "%-4d" % d
- if not id(node) in ids:
- cur_id += 1
- ids[id(node)] = cur_id
- if showid:
- out += "%-4d" % ids[id(node)]
+
+ if install_status:
+ status = node._install_status()
+ if status is None:
+ out += " " # Package isn't installed
+ elif status:
+ out += colorize("@g{[+]} ", color=color) # installed
+ else:
+ out += colorize("@r{[-]} ", color=color) # missing
+
+ if hashes:
+ out += colorize('@K{%s} ', color=color) % node.dag_hash(hlen)
+
+ if show_types:
+ out += '['
+ if dep_spec.deptypes:
+ for t in alldeps:
+ out += ''.join(t[0] if t in dep_spec.deptypes else ' ')
+ else:
+ out += ' ' * len(alldeps)
+ out += '] '
+
out += (" " * d)
if d > 0:
out += "^"
out += node.format(fmt, color=color) + "\n"
return out
-
def __repr__(self):
return str(self)
@@ -1875,68 +2653,151 @@ class Spec(object):
#
# These are possible token types in the spec grammar.
#
-DEP, AT, COLON, COMMA, ON, OFF, PCT, EQ, ID = range(9)
+HASH, DEP, AT, COLON, COMMA, ON, OFF, PCT, EQ, ID, VAL = range(11)
+
class SpecLexer(spack.parse.Lexer):
+
"""Parses tokens that make up spack specs."""
+
def __init__(self):
super(SpecLexer, self).__init__([
- (r'\^', lambda scanner, val: self.token(DEP, val)),
- (r'\@', lambda scanner, val: self.token(AT, val)),
- (r'\:', lambda scanner, val: self.token(COLON, val)),
- (r'\,', lambda scanner, val: self.token(COMMA, val)),
- (r'\+', lambda scanner, val: self.token(ON, val)),
- (r'\-', lambda scanner, val: self.token(OFF, val)),
- (r'\~', lambda scanner, val: self.token(OFF, val)),
- (r'\%', lambda scanner, val: self.token(PCT, val)),
- (r'\=', lambda scanner, val: self.token(EQ, val)),
+ (r'/', lambda scanner, val: self.token(HASH, val)),
+ (r'\^', lambda scanner, val: self.token(DEP, val)),
+ (r'\@', lambda scanner, val: self.token(AT, val)),
+ (r'\:', lambda scanner, val: self.token(COLON, val)),
+ (r'\,', lambda scanner, val: self.token(COMMA, val)),
+ (r'\+', lambda scanner, val: self.token(ON, val)),
+ (r'\-', lambda scanner, val: self.token(OFF, val)),
+ (r'\~', lambda scanner, val: self.token(OFF, val)),
+ (r'\%', lambda scanner, val: self.token(PCT, val)),
+ (r'\=', lambda scanner, val: self.token(EQ, val)),
# This is more liberal than identifier_re (see above).
# Checked by check_identifier() for better error messages.
(r'\w[\w.-]*', lambda scanner, val: self.token(ID, val)),
- (r'\s+', lambda scanner, val: None)])
+ (r'\s+', lambda scanner, val: None)],
+ [EQ],
+ [(r'[\S].*', lambda scanner, val: self.token(VAL, val)),
+ (r'\s+', lambda scanner, val: None)],
+ [VAL])
+
+
+# Lexer is always the same for every parser.
+_lexer = SpecLexer()
class SpecParser(spack.parse.Parser):
- def __init__(self):
- super(SpecParser, self).__init__(SpecLexer())
+ def __init__(self):
+ super(SpecParser, self).__init__(_lexer)
+ self.previous = None
def do_parse(self):
specs = []
try:
- while self.next:
- if self.accept(ID):
- specs.append(self.spec())
+ while self.next or self.previous:
+ # TODO: clean this parsing up a bit
+ if self.previous:
+ # We picked up the name of this spec while finishing the
+ # previous spec
+ specs.append(self.spec(self.previous.value))
+ self.previous = None
+ elif self.accept(ID):
+ self.previous = self.token
+ if self.accept(EQ):
+ # We're either parsing an anonymous spec beginning
+ # with a key-value pair or adding a key-value pair
+ # to the last spec
+ if not specs:
+ specs.append(self.spec(None))
+ self.expect(VAL)
+ specs[-1]._add_flag(
+ self.previous.value, self.token.value)
+ self.previous = None
+ else:
+ # We're parsing a new spec by name
+ value = self.previous.value
+ self.previous = None
+ specs.append(self.spec(value))
+ elif self.accept(HASH):
+ # We're finding a spec by hash
+ specs.append(self.spec_by_hash())
elif self.accept(DEP):
if not specs:
- self.last_token_error("Dependency has no package")
- self.expect(ID)
- specs[-1]._add_dependency(self.spec())
+ # We're parsing an anonymous spec beginning with a
+ # dependency
+ self.previous = self.token
+ specs.append(self.spec(None))
+ self.previous = None
+ if self.accept(HASH):
+ # We're finding a dependency by hash for an anonymous
+ # spec
+ dep = self.spec_by_hash()
+ else:
+ # We're adding a dependency to the last spec
+ self.expect(ID)
+ dep = self.spec(self.token.value)
+
+ # command line deps get empty deptypes now.
+ # Real deptypes are assigned later per packages.
+ specs[-1]._add_dependency(dep, ())
else:
- self.unexpected_token()
- except spack.parse.ParseError, e:
+ # If the next token can be part of a valid anonymous spec,
+ # create the anonymous spec
+ if self.next.type in (AT, ON, OFF, PCT):
+ specs.append(self.spec(None))
+ else:
+ self.unexpected_token()
+
+ except spack.parse.ParseError as e:
raise SpecParseError(e)
+ # If the spec has an os or a target and no platform, give it
+ # the default platform
+ platform_default = spack.architecture.platform().name
+ for spec in specs:
+ for s in spec.traverse():
+ if s.architecture and not s.architecture.platform and \
+ (s.architecture.platform_os or s.architecture.target):
+ s._set_architecture(platform=platform_default)
return specs
-
def parse_compiler(self, text):
self.setup(text)
return self.compiler()
+ def spec_by_hash(self):
+ self.expect(ID)
+
+ specs = spack.store.db.query()
+ matches = [spec for spec in specs if
+ spec.dag_hash()[:len(self.token.value)] == self.token.value]
+
+ if not matches:
+ tty.die("%s does not match any installed packages." %
+ self.token.value)
- def spec(self):
+ if len(matches) != 1:
+ raise AmbiguousHashError(
+ "Multiple packages specify hash %s." % self.token.value,
+ *matches)
+
+ return matches[0]
+
+ def spec(self, name):
"""Parse a spec out of the input. If a spec is supplied, then initialize
and return it instead of creating a new one."""
-
- spec_namespace, dot, spec_name = self.token.value.rpartition('.')
- if not spec_namespace:
+ if name:
+ spec_namespace, dot, spec_name = name.rpartition('.')
+ if not spec_namespace:
+ spec_namespace = None
+ self.check_identifier(spec_name)
+ else:
spec_namespace = None
-
- self.check_identifier(spec_name)
+ spec_name = None
# This will init the spec without calling __init__.
spec = Spec.__new__(Spec)
@@ -1946,9 +2807,13 @@ class SpecParser(spack.parse.Parser):
spec.architecture = None
spec.compiler = None
spec.external = None
- spec.dependents = DependencyMap()
- spec.dependencies = DependencyMap()
+ spec.external_module = None
+ spec.compiler_flags = FlagMap(spec)
+ spec._dependents = DependencyMap()
+ spec._dependencies = DependencyMap()
spec.namespace = spec_namespace
+ spec._hash = None
+ spec._cmp_key_cache = None
spec._normal = False
spec._concrete = False
@@ -1957,6 +2822,16 @@ class SpecParser(spack.parse.Parser):
# unspecified or not.
added_version = False
+ if self.previous and self.previous.value == DEP:
+ if self.accept(HASH):
+ spec.add_dependency(self.spec_by_hash())
+ else:
+ self.expect(ID)
+ if self.accept(EQ):
+ raise SpecParseError(spack.parse.ParseError(
+ "", "", "Expected dependency received anonymous spec"))
+ spec.add_dependency(self.spec(self.token.value))
+
while self.next:
if self.accept(AT):
vlist = self.version_list()
@@ -1973,8 +2848,16 @@ class SpecParser(spack.parse.Parser):
elif self.accept(PCT):
spec._set_compiler(self.compiler())
- elif self.accept(EQ):
- spec._set_architecture(self.architecture())
+ elif self.accept(ID):
+ self.previous = self.token
+ if self.accept(EQ):
+ # We're adding a key-value pair to the spec
+ self.expect(VAL)
+ spec._add_flag(self.previous.value, self.token.value)
+ self.previous = None
+ else:
+ # We've found the start of a new spec. Go back to do_parse
+ break
else:
break
@@ -1985,17 +2868,14 @@ class SpecParser(spack.parse.Parser):
return spec
-
- def variant(self):
- self.expect(ID)
- self.check_identifier()
- return self.token.value
-
-
- def architecture(self):
- self.expect(ID)
- return self.token.value
-
+ def variant(self, name=None):
+ # TODO: Make generalized variants possible
+ if name:
+ return name
+ else:
+ self.expect(ID)
+ self.check_identifier()
+ return self.token.value
def version(self):
start = None
@@ -2013,11 +2893,12 @@ class SpecParser(spack.parse.Parser):
# No colon and no id: invalid version.
self.next_token_error("Invalid version specifier")
- if start: start = Version(start)
- if end: end = Version(end)
+ if start:
+ start = Version(start)
+ if end:
+ end = Version(end)
return VersionRange(start, end)
-
def version_list(self):
vlist = []
vlist.append(self.version())
@@ -2025,7 +2906,6 @@ class SpecParser(spack.parse.Parser):
vlist.append(self.version())
return vlist
-
def compiler(self):
self.expect(ID)
self.check_identifier()
@@ -2041,7 +2921,6 @@ class SpecParser(spack.parse.Parser):
compiler.versions = VersionList(':')
return compiler
-
def check_identifier(self, id=None):
"""The only identifiers that can contain '.' are versions, but version
ids are context-sensitive so we have to check on a case-by-case
@@ -2074,9 +2953,16 @@ def parse_anonymous_spec(spec_like, pkg_name):
if isinstance(spec_like, str):
try:
anon_spec = Spec(spec_like)
+ if anon_spec.name != pkg_name:
+ raise SpecParseError(spack.parse.ParseError(
+ "",
+ "",
+ "Expected anonymous spec for package %s but found spec for"
+ "package %s" % (pkg_name, anon_spec.name)))
except SpecParseError:
- anon_spec = Spec(pkg_name + spec_like)
- if anon_spec.name != pkg_name: raise ValueError(
+ anon_spec = Spec(pkg_name + ' ' + spec_like)
+ if anon_spec.name != pkg_name:
+ raise ValueError(
"Invalid spec for package %s: %s" % (pkg_name, spec_like))
else:
anon_spec = spec_like.copy()
@@ -2088,10 +2974,18 @@ def parse_anonymous_spec(spec_like, pkg_name):
return anon_spec
+def base32_prefix_bits(hash_string, bits):
+ """Return the first <bits> bits of a base32 string as an integer."""
+ if bits > len(hash_string) * 5:
+ raise ValueError("Too many bits! Requested %d bit prefix of '%s'."
+ % (bits, hash_string))
+
+ hash_bytes = base64.b32decode(hash_string, casefold=True)
+ return prefix_bits(hash_bytes, bits)
+
+
class SpecError(spack.error.SpackError):
"""Superclass for all errors that occur while constructing specs."""
- def __init__(self, message):
- super(SpecError, self).__init__(message)
class SpecParseError(SpecError):
@@ -2104,20 +2998,14 @@ class SpecParseError(SpecError):
class DuplicateDependencyError(SpecError):
"""Raised when the same dependency occurs in a spec twice."""
- def __init__(self, message):
- super(DuplicateDependencyError, self).__init__(message)
class DuplicateVariantError(SpecError):
"""Raised when the same variant occurs in a spec twice."""
- def __init__(self, message):
- super(DuplicateVariantError, self).__init__(message)
class DuplicateCompilerSpecError(SpecError):
"""Raised when the same compiler occurs in a spec twice."""
- def __init__(self, message):
- super(DuplicateCompilerSpecError, self).__init__(message)
class UnsupportedCompilerError(SpecError):
@@ -2136,22 +3024,20 @@ class UnknownVariantError(SpecError):
class DuplicateArchitectureError(SpecError):
"""Raised when the same architecture occurs in a spec twice."""
- def __init__(self, message):
- super(DuplicateArchitectureError, self).__init__(message)
class InconsistentSpecError(SpecError):
"""Raised when two nodes in the same spec DAG have inconsistent
constraints."""
- def __init__(self, message):
- super(InconsistentSpecError, self).__init__(message)
-class InvalidDependencyException(SpecError):
+class InvalidDependencyError(SpecError):
"""Raised when a dependency in a spec is not actually a dependency
of the package."""
- def __init__(self, message):
- super(InvalidDependencyException, self).__init__(message)
+
+
+class InvalidDependencyTypeError(SpecError):
+ """Raised when a dependency type is not a legal Spack dep type."""
class NoProviderError(SpecError):
@@ -2216,6 +3102,13 @@ class UnsatisfiableVariantSpecError(UnsatisfiableSpecError):
provided, required, "variant")
+class UnsatisfiableCompilerFlagSpecError(UnsatisfiableSpecError):
+ """Raised when a spec variant conflicts with package constraints."""
+ def __init__(self, provided, required):
+ super(UnsatisfiableCompilerFlagSpecError, self).__init__(
+ provided, required, "compiler_flags")
+
+
class UnsatisfiableArchitectureSpecError(UnsatisfiableSpecError):
"""Raised when a spec architecture conflicts with package constraints."""
def __init__(self, provided, required):
@@ -2230,6 +3123,7 @@ class UnsatisfiableProviderSpecError(UnsatisfiableSpecError):
super(UnsatisfiableProviderSpecError, self).__init__(
provided, required, "provider")
+
# TODO: get rid of this and be more specific about particular incompatible
# dep constraints
class UnsatisfiableDependencySpecError(UnsatisfiableSpecError):
@@ -2238,6 +3132,9 @@ class UnsatisfiableDependencySpecError(UnsatisfiableSpecError):
super(UnsatisfiableDependencySpecError, self).__init__(
provided, required, "dependency")
-class SpackYAMLError(spack.error.SpackError):
- def __init__(self, msg, yaml_error):
- super(SpackYAMLError, self).__init__(msg, str(yaml_error))
+
+class AmbiguousHashError(SpecError):
+ def __init__(self, msg, *specs):
+ super(AmbiguousHashError, self).__init__(msg)
+ for spec in specs:
+ print(' ', spec.format('$.$@$%@+$+$=$#'))
diff --git a/lib/spack/spack/stage.py b/lib/spack/spack/stage.py
index a76ec168ad..91f77839d8 100644
--- a/lib/spack/spack/stage.py
+++ b/lib/spack/spack/stage.py
@@ -23,12 +23,16 @@
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
import os
+import sys
import errno
+import hashlib
import shutil
import tempfile
+import getpass
from urlparse import urljoin
import llnl.util.tty as tty
+import llnl.util.lock
from llnl.util.filesystem import *
import spack.util.pattern as pattern
@@ -37,8 +41,73 @@ import spack
import spack.config
import spack.fetch_strategy as fs
import spack.error
+from spack.version import *
+from spack.util.path import canonicalize_path
+from spack.util.crypto import prefix_bits, bit_length
-STAGE_PREFIX = 'spack-stage-'
+_stage_prefix = 'spack-stage-'
+
+
+def _first_accessible_path(paths):
+ """Find a tmp dir that exists that we can access."""
+ for path in paths:
+ try:
+ # try to create the path if it doesn't exist.
+ path = canonicalize_path(path)
+ mkdirp(path)
+
+ # ensure accessible
+ if not can_access(path):
+ continue
+
+ # return it if successful.
+ return path
+
+ except OSError:
+ tty.debug('OSError while checking temporary path: %s' % path)
+ continue
+
+ return None
+
+
+# cached temporary root
+_tmp_root = None
+_use_tmp_stage = True
+
+
+def get_tmp_root():
+ global _tmp_root, _use_tmp_stage
+
+ if not _use_tmp_stage:
+ return None
+
+ if _tmp_root is None:
+ config = spack.config.get_config('config')
+ candidates = config['build_stage']
+ if isinstance(candidates, basestring):
+ candidates = [candidates]
+
+ path = _first_accessible_path(candidates)
+ if not path:
+ raise StageError("No accessible stage paths in %s", candidates)
+
+ # Return None to indicate we're using a local staging area.
+ if path == canonicalize_path(spack.stage_path):
+ _use_tmp_stage = False
+ return None
+
+ # ensure that any temp path is unique per user, so users don't
+ # fight over shared temporary space.
+ user = getpass.getuser()
+ if user not in path:
+ path = os.path.join(path, user, 'spack-stage')
+ else:
+ path = os.path.join(path, 'spack-stage')
+
+ mkdirp(path)
+ _tmp_root = path
+
+ return _tmp_root
class Stage(object):
@@ -48,14 +117,14 @@ class Stage(object):
some source code is downloaded and built before being installed.
It handles fetching the source code, either as an archive to be
expanded or by checking it out of a repository. A stage's
- lifecycle looks like this:
+ lifecycle looks like this::
- ```
- with Stage() as stage: # Context manager creates and destroys the stage directory
- stage.fetch() # Fetch a source archive into the stage.
- stage.expand_archive() # Expand the source archive.
- <install> # Build and install the archive. (handled by user of Stage)
- ```
+ with Stage() as stage: # Context manager creates and destroys the
+ # stage directory
+ stage.fetch() # Fetch a source archive into the stage.
+ stage.expand_archive() # Expand the source archive.
+ <install> # Build and install the archive.
+ # (handled by user of Stage)
When used as a context manager, the stage is automatically
destroyed if no exception is raised by the context. If an
@@ -63,18 +132,17 @@ class Stage(object):
destroyed, for potential reuse later.
You can also use the stage's create/destroy functions manually,
- like this:
-
- ```
- stage = Stage()
- try:
- stage.create() # Explicitly create the stage directory.
- stage.fetch() # Fetch a source archive into the stage.
- stage.expand_archive() # Expand the source archive.
- <install> # Build and install the archive. (handled by user of Stage)
- finally:
- stage.destroy() # Explicitly destroy the stage directory.
- ```
+ like this::
+
+ stage = Stage()
+ try:
+ stage.create() # Explicitly create the stage directory.
+ stage.fetch() # Fetch a source archive into the stage.
+ stage.expand_archive() # Expand the source archive.
+ <install> # Build and install the archive.
+ # (handled by user of Stage)
+ finally:
+ stage.destroy() # Explicitly destroy the stage directory.
If spack.use_tmp_stage is True, spack will attempt to create
stages in a tmp directory. Otherwise, stages are created directly
@@ -88,8 +156,13 @@ class Stage(object):
similar, and are intended to persist for only one run of spack.
"""
- def __init__(self, url_or_fetch_strategy,
- name=None, mirror_path=None, keep=False, path=None):
+ """Shared dict of all stage locks."""
+ stage_locks = {}
+
+ def __init__(
+ self, url_or_fetch_strategy,
+ name=None, mirror_path=None, keep=False, path=None, lock=True,
+ search_fn=None):
"""Create a stage object.
Parameters:
url_or_fetch_strategy
@@ -120,18 +193,22 @@ class Stage(object):
elif isinstance(url_or_fetch_strategy, fs.FetchStrategy):
self.fetcher = url_or_fetch_strategy
else:
- raise ValueError("Can't construct Stage without url or fetch strategy")
+ raise ValueError(
+ "Can't construct Stage without url or fetch strategy")
self.fetcher.set_stage(self)
- self.default_fetcher = self.fetcher # self.fetcher can change with mirrors.
- self.skip_checksum_for_mirror = True # used for mirrored archives of repositories.
+ # self.fetcher can change with mirrors.
+ self.default_fetcher = self.fetcher
+ self.search_fn = search_fn
+ # used for mirrored archives of repositories.
+ self.skip_checksum_for_mirror = True
- # TODO : this uses a protected member of tempfile, but seemed the only way to get a temporary name
- # TODO : besides, the temporary link name won't be the same as the temporary stage area in tmp_root
+ # TODO : this uses a protected member of tempfile, but seemed the only
+ # TODO : way to get a temporary name besides, the temporary link name
+ # TODO : won't be the same as the temporary stage area in tmp_root
self.name = name
if name is None:
- self.name = STAGE_PREFIX + next(tempfile._get_candidate_names())
+ self.name = _stage_prefix + next(tempfile._get_candidate_names())
self.mirror_path = mirror_path
- self.tmp_root = find_tmp_root()
# Try to construct here a temporary name for the stage directory
# If this is a named stage, then construct a named path.
@@ -143,6 +220,19 @@ class Stage(object):
# Flag to decide whether to delete the stage folder on exit or not
self.keep = keep
+ # File lock for the stage directory. We use one file for all
+ # stage locks. See Spec.prefix_lock for details on this approach.
+ self._lock = None
+ if lock:
+ if self.name not in Stage.stage_locks:
+ sha1 = hashlib.sha1(self.name).digest()
+ lock_id = prefix_bits(sha1, bit_length(sys.maxsize))
+ stage_lock_path = join_path(spack.stage_path, '.lock')
+
+ Stage.stage_locks[self.name] = llnl.util.lock.Lock(
+ stage_lock_path, lock_id, 1)
+
+ self._lock = Stage.stage_locks[self.name]
def __enter__(self):
"""
@@ -151,10 +241,11 @@ class Stage(object):
Returns:
self
"""
+ if self._lock is not None:
+ self._lock.acquire_write(timeout=60)
self.create()
return self
-
def __exit__(self, exc_type, exc_val, exc_tb):
"""
Exiting from a stage context will delete the stage directory unless:
@@ -173,12 +264,13 @@ class Stage(object):
if exc_type is None and not self.keep:
self.destroy()
+ if self._lock is not None:
+ self._lock.release_write()
def _need_to_create_path(self):
"""Makes sure nothing weird has happened since the last time we
looked at path. Returns True if path already exists and is ok.
- Returns False if path needs to be created.
- """
+ Returns False if path needs to be created."""
# Path doesn't exist yet. Will need to create it.
if not os.path.exists(self.path):
return True
@@ -190,13 +282,15 @@ class Stage(object):
# Path looks ok, but need to check the target of the link.
if os.path.islink(self.path):
- real_path = os.path.realpath(self.path)
- real_tmp = os.path.realpath(self.tmp_root)
+ tmp_root = get_tmp_root()
+ if tmp_root is not None:
+ real_path = os.path.realpath(self.path)
+ real_tmp = os.path.realpath(tmp_root)
- if spack.use_tmp_stage:
# If we're using a tmp dir, it's a link, and it points at the
# right spot, then keep it.
- if (real_path.startswith(real_tmp) and os.path.exists(real_path)):
+ if (real_path.startswith(real_tmp) and
+ os.path.exists(real_path)):
return False
else:
# otherwise, just unlink it and start over.
@@ -204,7 +298,8 @@ class Stage(object):
return True
else:
- # If we're not tmp mode, then it's a link and we want a directory.
+ # If we're not tmp mode, then it's a link and we want a
+ # directory.
os.unlink(self.path)
return True
@@ -214,25 +309,29 @@ class Stage(object):
def expected_archive_files(self):
"""Possible archive file paths."""
paths = []
- if isinstance(self.fetcher, fs.URLFetchStrategy):
- paths.append(os.path.join(self.path, os.path.basename(self.fetcher.url)))
+ if isinstance(self.default_fetcher, fs.URLFetchStrategy):
+ paths.append(os.path.join(
+ self.path, os.path.basename(self.default_fetcher.url)))
if self.mirror_path:
- paths.append(os.path.join(self.path, os.path.basename(self.mirror_path)))
+ paths.append(os.path.join(
+ self.path, os.path.basename(self.mirror_path)))
return paths
@property
+ def save_filename(self):
+ possible_filenames = self.expected_archive_files
+ if possible_filenames:
+ # This prefers using the URL associated with the default fetcher if
+ # available, so that the fetched resource name matches the remote
+ # name
+ return possible_filenames[0]
+
+ @property
def archive_file(self):
"""Path to the source archive within this stage directory."""
- paths = []
- if isinstance(self.fetcher, fs.URLFetchStrategy):
- paths.append(os.path.join(self.path, os.path.basename(self.fetcher.url)))
-
- if self.mirror_path:
- paths.append(os.path.join(self.path, os.path.basename(self.mirror_path)))
-
- for path in paths:
+ for path in self.expected_archive_files:
if os.path.exists(path):
return path
else:
@@ -262,7 +361,8 @@ class Stage(object):
return None
def chdir(self):
- """Changes directory to the stage path. Or dies if it is not set up."""
+ """Changes directory to the stage path. Or dies if it is not set
+ up."""
if os.path.isdir(self.path):
os.chdir(self.path)
else:
@@ -294,8 +394,12 @@ class Stage(object):
# then use the same digest. `spack mirror` ensures that
# the checksum will be the same.
digest = None
+ expand = True
+ extension = None
if isinstance(self.default_fetcher, fs.URLFetchStrategy):
digest = self.default_fetcher.digest
+ expand = self.default_fetcher.expand_archive
+ extension = self.default_fetcher.extension
# Have to skip the checksum for things archived from
# repositories. How can this be made safer?
@@ -303,14 +407,34 @@ class Stage(object):
# Add URL strategies for all the mirrors with the digest
for url in urls:
- fetchers.insert(0, fs.URLFetchStrategy(url, digest))
-
- for fetcher in fetchers:
+ fetchers.insert(
+ 0, fs.URLFetchStrategy(
+ url, digest, expand=expand, extension=extension))
+ if self.default_fetcher.cachable:
+ fetchers.insert(
+ 0, spack.fetch_cache.fetcher(
+ self.mirror_path, digest, expand=expand,
+ extension=extension))
+
+ def generate_fetchers():
+ for fetcher in fetchers:
+ yield fetcher
+ # The search function may be expensive, so wait until now to
+ # call it so the user can stop if a prior fetcher succeeded
+ if self.search_fn and not mirror_only:
+ dynamic_fetchers = self.search_fn()
+ for fetcher in dynamic_fetchers:
+ yield fetcher
+
+ for fetcher in generate_fetchers():
try:
fetcher.set_stage(self)
self.fetcher = fetcher
self.fetcher.fetch()
break
+ except spack.fetch_strategy.NoCacheError as e:
+ # Don't bother reporting when something is not cached.
+ continue
except spack.error.SpackError as e:
tty.msg("Fetching from %s failed." % fetcher)
tty.debug(e)
@@ -323,7 +447,8 @@ class Stage(object):
def check(self):
"""Check the downloaded archive against a checksum digest.
No-op if this stage checks code out of a repository."""
- if self.fetcher is not self.default_fetcher and self.skip_checksum_for_mirror:
+ if self.fetcher is not self.default_fetcher and \
+ self.skip_checksum_for_mirror:
tty.warn("Fetching from mirror without a checksum!",
"This package is normally checked out from a version "
"control system, but it has been archived on a spack "
@@ -333,11 +458,13 @@ class Stage(object):
else:
self.fetcher.check()
+ def cache_local(self):
+ spack.fetch_cache.store(self.fetcher, self.mirror_path)
+
def expand_archive(self):
"""Changes to the stage directory and attempt to expand the downloaded
- archive. Fail if the stage is not set up or if the archive is not yet
- downloaded.
- """
+ archive. Fail if the stage is not set up or if the archive is not yet
+ downloaded."""
archive_dir = self.source_path
if not archive_dir:
self.fetcher.expand()
@@ -351,11 +478,11 @@ class Stage(object):
"""
path = self.source_path
if not path:
- tty.die("Attempt to chdir before expanding archive.")
+ raise StageError("Attempt to chdir before expanding archive.")
else:
os.chdir(path)
if not os.listdir(path):
- tty.die("Archive was empty for %s" % self.name)
+ raise StageError("Archive was empty for %s" % self.name)
def restage(self):
"""Removes the expanded archive path if it exists, then re-expands
@@ -364,26 +491,29 @@ class Stage(object):
self.fetcher.reset()
def create(self):
- """
- Creates the stage directory
+ """Creates the stage directory.
- If self.tmp_root evaluates to False, the stage directory is
- created directly under spack.stage_path, otherwise this will
- attempt to create a stage in a temporary directory and link it
- into spack.stage_path.
+ If get_tmp_root() is None, the stage directory is created
+ directly under spack.stage_path, otherwise this will attempt to
+ create a stage in a temporary directory and link it into
+ spack.stage_path.
Spack will use the first writable location in spack.tmp_dirs
to create a stage. If there is no valid location in tmp_dirs,
fall back to making the stage inside spack.stage_path.
+
"""
# Create the top-level stage directory
mkdirp(spack.stage_path)
- remove_dead_links(spack.stage_path)
- # If a tmp_root exists then create a directory there and then link it in the stage area,
- # otherwise create the stage directory in self.path
+ remove_if_dead_link(self.path)
+
+ # If a tmp_root exists then create a directory there and then link it
+ # in the stage area, otherwise create the stage directory in self.path
if self._need_to_create_path():
- if self.tmp_root:
- tmp_dir = tempfile.mkdtemp('', STAGE_PREFIX, self.tmp_root)
+ tmp_root = get_tmp_root()
+ if tmp_root is not None:
+ tmp_dir = tempfile.mkdtemp('', _stage_prefix, tmp_root)
+ tty.debug('link %s -> %s' % (self.path, tmp_dir))
os.symlink(tmp_dir, self.path)
else:
mkdirp(self.path)
@@ -402,6 +532,7 @@ class Stage(object):
class ResourceStage(Stage):
+
def __init__(self, url_or_fetch_strategy, root, resource, **kwargs):
super(ResourceStage, self).__init__(url_or_fetch_strategy, **kwargs)
self.root_stage = root
@@ -411,12 +542,15 @@ class ResourceStage(Stage):
super(ResourceStage, self).expand_archive()
root_stage = self.root_stage
resource = self.resource
- placement = os.path.basename(self.source_path) if resource.placement is None else resource.placement
+ placement = os.path.basename(self.source_path) \
+ if resource.placement is None \
+ else resource.placement
if not isinstance(placement, dict):
placement = {'': placement}
# Make the paths in the dictionary absolute and link
for key, value in placement.iteritems():
- target_path = join_path(root_stage.source_path, resource.destination)
+ target_path = join_path(
+ root_stage.source_path, resource.destination)
destination_path = join_path(target_path, value)
source_path = join_path(self.source_path, key)
@@ -430,21 +564,23 @@ class ResourceStage(Stage):
if not os.path.exists(destination_path):
# Create a symlink
- tty.info('Moving resource stage\n\tsource : {stage}\n\tdestination : {destination}'.format(
- stage=source_path, destination=destination_path
- ))
+ tty.info('Moving resource stage\n\tsource : '
+ '{stage}\n\tdestination : {destination}'.format(
+ stage=source_path, destination=destination_path
+ ))
shutil.move(source_path, destination_path)
-@pattern.composite(method_list=['fetch', 'create', 'check', 'expand_archive', 'restage', 'destroy'])
+@pattern.composite(method_list=['fetch', 'create', 'check', 'expand_archive',
+ 'restage', 'destroy', 'cache_local'])
class StageComposite:
- """
- Composite for Stage type objects. The first item in this composite is considered to be the root package, and
- operations that return a value are forwarded to it.
- """
+ """Composite for Stage type objects. The first item in this composite is
+ considered to be the root package, and operations that return a value are
+ forwarded to it."""
#
# __enter__ and __exit__ delegate to all stages in the composite.
#
+
def __enter__(self):
for item in self:
item.__enter__()
@@ -473,6 +609,10 @@ class StageComposite:
def archive_file(self):
return self[0].archive_file
+ @property
+ def mirror_path(self):
+ return self[0].mirror_path
+
class DIYStage(object):
"""Simple class that allows any directory to be a spack stage."""
@@ -489,13 +629,16 @@ class DIYStage(object):
raise ChdirError("Setup failed: no such directory: " + self.path)
# DIY stages do nothing as context managers.
- def __enter__(self): pass
- def __exit__(self, exc_type, exc_val, exc_tb): pass
+ def __enter__(self):
+ pass
+
+ def __exit__(self, exc_type, exc_val, exc_tb):
+ pass
def chdir_to_source(self):
self.chdir()
- def fetch(self, mirror_only):
+ def fetch(self, *args, **kwargs):
tty.msg("No need to fetch for DIY.")
def check(self):
@@ -511,6 +654,9 @@ class DIYStage(object):
# No need to destroy DIY stage.
pass
+ def cache_local(self):
+ tty.msg("Sources for DIY stages are not cached")
+
def _get_mirrors():
"""Get mirrors from spack configuration."""
@@ -532,25 +678,6 @@ def purge():
remove_linked_tree(stage_path)
-def find_tmp_root():
- if spack.use_tmp_stage:
- for tmp in spack.tmp_dirs:
- try:
- # Replace %u with username
- expanded = expand_user(tmp)
-
- # try to create a directory for spack stuff
- mkdirp(expanded)
-
- # return it if successful.
- return expanded
-
- except OSError:
- continue
-
- return None
-
-
class StageError(spack.error.SpackError):
""""Superclass for all errors encountered during staging."""
diff --git a/lib/spack/spack/store.py b/lib/spack/spack/store.py
new file mode 100644
index 0000000000..3f559315d2
--- /dev/null
+++ b/lib/spack/spack/store.py
@@ -0,0 +1,75 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+"""Components that manage Spack's installation tree.
+
+An install tree, or "build store" consists of two parts:
+
+ 1. A package database that tracks what is installed.
+ 2. A directory layout that determines how the installations
+ are laid out.
+
+The store contains all the install prefixes for packages installed by
+Spack. The simplest store could just contain prefixes named by DAG hash,
+but we use a fancier directory layout to make browsing the store and
+debugging easier.
+
+The directory layout is currently hard-coded to be a YAMLDirectoryLayout,
+so called because it stores build metadata within each prefix, in
+`spec.yaml` files. In future versions of Spack we may consider allowing
+install trees to define their own layouts with some per-tree
+configuration.
+
+"""
+import os
+import spack
+import spack.config
+from spack.util.path import canonicalize_path
+from spack.database import Database
+from spack.directory_layout import YamlDirectoryLayout
+
+__author__ = "Benedikt Hegner (CERN)"
+__all__ = ['db', 'layout', 'root']
+
+#
+# Read in the config
+#
+config = spack.config.get_config("config")
+
+#
+# Set up the install path
+#
+root = canonicalize_path(
+ config.get('install_tree', os.path.join(spack.opt_path, 'spack')))
+
+#
+# Set up the installed packages database
+#
+db = Database(root)
+
+#
+# This controls how spack lays out install prefixes and
+# stage directories.
+#
+layout = YamlDirectoryLayout(root)
diff --git a/lib/spack/spack/test/__init__.py b/lib/spack/spack/test/__init__.py
index 1668e271fa..ed1ec23bca 100644
--- a/lib/spack/spack/test/__init__.py
+++ b/lib/spack/spack/test/__init__.py
@@ -22,67 +22,3 @@
# License along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
-import sys
-
-import llnl.util.tty as tty
-import nose
-import spack
-from llnl.util.filesystem import join_path
-from llnl.util.tty.colify import colify
-from spack.test.tally_plugin import Tally
-"""Names of tests to be included in Spack's test suite"""
-test_names = ['versions', 'url_parse', 'url_substitution', 'packages', 'stage',
- 'spec_syntax', 'spec_semantics', 'spec_dag', 'concretize',
- 'multimethod', 'install', 'package_sanity', 'config',
- 'directory_layout', 'pattern', 'python_version', 'git_fetch',
- 'svn_fetch', 'hg_fetch', 'mirror', 'modules', 'url_extrapolate',
- 'cc', 'link_tree', 'spec_yaml', 'optional_deps',
- 'make_executable', 'configure_guess', 'lock', 'database',
- 'namespace_trie', 'yaml', 'sbang', 'environment',
- 'cmd.uninstall', 'cmd.test_install']
-
-
-def list_tests():
- """Return names of all tests that can be run for Spack."""
- return test_names
-
-
-def run(names, outputDir, verbose=False):
- """Run tests with the supplied names. Names should be a list. If
- it's empty, run ALL of Spack's tests."""
- if not names:
- names = test_names
- else:
- for test in names:
- if test not in test_names:
- tty.error("%s is not a valid spack test name." % test,
- "Valid names are:")
- colify(sorted(test_names), indent=4)
- sys.exit(1)
-
- tally = Tally()
- for test in names:
- module = 'spack.test.' + test
- print(module)
-
- tty.msg("Running test: %s" % test)
-
- runOpts = ["--with-%s" % spack.test.tally_plugin.Tally.name]
-
- if outputDir:
- xmlOutputFname = "unittests-{0}.xml".format(test)
- xmlOutputPath = join_path(outputDir, xmlOutputFname)
- runOpts += ["--with-xunit",
- "--xunit-file={0}".format(xmlOutputPath)]
- argv = [""] + runOpts + [module]
- nose.run(argv=argv, addplugins=[tally])
-
- succeeded = not tally.failCount and not tally.errorCount
- tty.msg("Tests Complete.", "%5d tests run" % tally.numberOfTestsRun,
- "%5d failures" % tally.failCount, "%5d errors" % tally.errorCount)
-
- if succeeded:
- tty.info("OK", format='g')
- else:
- tty.info("FAIL", format='r')
- sys.exit(1)
diff --git a/lib/spack/spack/test/architecture.py b/lib/spack/spack/test/architecture.py
new file mode 100644
index 0000000000..fb4113361c
--- /dev/null
+++ b/lib/spack/spack/test/architecture.py
@@ -0,0 +1,161 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+""" Test checks if the architecture class is created correctly and also that
+ the functions are looking for the correct architecture name
+"""
+import itertools
+import os
+import platform as py_platform
+import spack
+import spack.architecture
+from spack.spec import Spec
+from spack.platforms.cray import Cray
+from spack.platforms.linux import Linux
+from spack.platforms.bgq import Bgq
+from spack.platforms.darwin import Darwin
+
+
+def test_dict_functions_for_architecture():
+ arch = spack.architecture.Arch()
+ arch.platform = spack.architecture.platform()
+ arch.platform_os = arch.platform.operating_system('default_os')
+ arch.target = arch.platform.target('default_target')
+
+ new_arch = spack.architecture.Arch.from_dict(arch.to_dict())
+
+ assert arch == new_arch
+ assert isinstance(arch, spack.architecture.Arch)
+ assert isinstance(arch.platform, spack.architecture.Platform)
+ assert isinstance(arch.platform_os, spack.architecture.OperatingSystem)
+ assert isinstance(arch.target, spack.architecture.Target)
+ assert isinstance(new_arch, spack.architecture.Arch)
+ assert isinstance(new_arch.platform, spack.architecture.Platform)
+ assert isinstance(new_arch.platform_os, spack.architecture.OperatingSystem)
+ assert isinstance(new_arch.target, spack.architecture.Target)
+
+
+def test_platform():
+ output_platform_class = spack.architecture.real_platform()
+ if os.path.exists('/opt/cray/craype'):
+ my_platform_class = Cray()
+ elif os.path.exists('/bgsys'):
+ my_platform_class = Bgq()
+ elif 'Linux' in py_platform.system():
+ my_platform_class = Linux()
+ elif 'Darwin' in py_platform.system():
+ my_platform_class = Darwin()
+
+ assert str(output_platform_class) == str(my_platform_class)
+
+
+def test_boolness():
+ # Make sure architecture reports that it's False when nothing's set.
+ arch = spack.architecture.Arch()
+ assert not arch
+
+ # Dummy architecture parts
+ plat = spack.architecture.platform()
+ plat_os = plat.operating_system('default_os')
+ plat_target = plat.target('default_target')
+
+ # Make sure architecture reports that it's True when anything is set.
+ arch = spack.architecture.Arch()
+ arch.platform = plat
+ assert arch
+
+ arch = spack.architecture.Arch()
+ arch.platform_os = plat_os
+ assert arch
+
+ arch = spack.architecture.Arch()
+ arch.target = plat_target
+ assert arch
+
+
+def test_user_front_end_input(config):
+ """Test when user inputs just frontend that both the frontend target
+ and frontend operating system match
+ """
+ platform = spack.architecture.platform()
+ frontend_os = str(platform.operating_system('frontend'))
+ frontend_target = str(platform.target('frontend'))
+
+ frontend_spec = Spec('libelf os=frontend target=frontend')
+ frontend_spec.concretize()
+
+ assert frontend_os == frontend_spec.architecture.platform_os
+ assert frontend_target == frontend_spec.architecture.target
+
+
+def test_user_back_end_input(config):
+ """Test when user inputs backend that both the backend target and
+ backend operating system match
+ """
+ platform = spack.architecture.platform()
+ backend_os = str(platform.operating_system("backend"))
+ backend_target = str(platform.target("backend"))
+
+ backend_spec = Spec("libelf os=backend target=backend")
+ backend_spec.concretize()
+
+ assert backend_os == backend_spec.architecture.platform_os
+ assert backend_target == backend_spec.architecture.target
+
+
+def test_user_defaults(config):
+ platform = spack.architecture.platform()
+ default_os = str(platform.operating_system("default_os"))
+ default_target = str(platform.target("default_target"))
+
+ default_spec = Spec("libelf") # default is no args
+ default_spec.concretize()
+
+ assert default_os == default_spec.architecture.platform_os
+ assert default_target == default_spec.architecture.target
+
+
+def test_user_input_combination(config):
+ platform = spack.architecture.platform()
+ os_list = platform.operating_sys.keys()
+ target_list = platform.targets.keys()
+ additional = ["fe", "be", "frontend", "backend"]
+
+ os_list.extend(additional)
+ target_list.extend(additional)
+
+ combinations = itertools.product(os_list, target_list)
+ results = []
+ for arch in combinations:
+ o, t = arch
+ spec = Spec("libelf os=%s target=%s" % (o, t))
+ spec.concretize()
+ results.append(
+ spec.architecture.platform_os == str(platform.operating_system(o))
+ )
+ results.append(
+ spec.architecture.target == str(platform.target(t))
+ )
+ res = all(results)
+ assert res
diff --git a/lib/spack/spack/test/build_system_guess.py b/lib/spack/spack/test/build_system_guess.py
new file mode 100644
index 0000000000..82bf1964b2
--- /dev/null
+++ b/lib/spack/spack/test/build_system_guess.py
@@ -0,0 +1,65 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+
+import pytest
+import spack.cmd.create
+import spack.util.executable
+import spack.stage
+
+
+@pytest.fixture(
+ scope='function',
+ params=[
+ ('configure', 'autotools'),
+ ('CMakeLists.txt', 'cmake'),
+ ('SConstruct', 'scons'),
+ ('setup.py', 'python'),
+ ('NAMESPACE', 'r'),
+ ('WORKSPACE', 'bazel'),
+ ('foobar', 'generic')
+ ]
+)
+def url_and_build_system(request, tmpdir):
+ """Sets up the resources to be pulled by the stage with
+ the appropriate file name and returns their url along with
+ the correct build-system guess
+ """
+ tar = spack.util.executable.which('tar')
+ orig_dir = tmpdir.chdir()
+ filename, system = request.param
+ tmpdir.ensure('archive', filename)
+ tar('czf', 'archive.tar.gz', 'archive')
+ url = 'file://' + str(tmpdir.join('archive.tar.gz'))
+ yield url, system
+ orig_dir.chdir()
+
+
+def test_build_systems(url_and_build_system):
+ url, build_system = url_and_build_system
+ with spack.stage.Stage(url) as stage:
+ stage.fetch()
+ guesser = spack.cmd.create.BuildSystemGuesser()
+ guesser(stage, url)
+ assert build_system == guesser.build_system
diff --git a/lib/spack/spack/test/build_systems.py b/lib/spack/spack/test/build_systems.py
new file mode 100644
index 0000000000..2cafba0333
--- /dev/null
+++ b/lib/spack/spack/test/build_systems.py
@@ -0,0 +1,42 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+
+import spack
+from spack.build_environment import get_std_cmake_args
+from spack.spec import Spec
+
+
+def test_cmake_std_args(config, builtin_mock):
+ # Call the function on a CMakePackage instance
+ s = Spec('cmake-client')
+ s.concretize()
+ pkg = spack.repo.get(s)
+ assert pkg.std_cmake_args == get_std_cmake_args(pkg)
+
+ # Call it on another kind of package
+ s = Spec('mpich')
+ s.concretize()
+ pkg = spack.repo.get(s)
+ assert get_std_cmake_args(pkg)
diff --git a/lib/spack/spack/test/cc.py b/lib/spack/spack/test/cc.py
index a630866143..74b6b31654 100644
--- a/lib/spack/spack/test/cc.py
+++ b/lib/spack/spack/test/cc.py
@@ -45,24 +45,30 @@ test_command = [
'-llib1', '-llib2',
'arg4',
'-Wl,--end-group',
- '-Xlinker', '-rpath', '-Xlinker', '/third/rpath', '-Xlinker', '-rpath', '-Xlinker', '/fourth/rpath',
+ '-Xlinker', '-rpath', '-Xlinker', '/third/rpath', '-Xlinker',
+ '-rpath', '-Xlinker', '/fourth/rpath',
'-llib3', '-llib4',
'arg5', 'arg6']
-class CompilerTest(unittest.TestCase):
+class CompilerWrapperTest(unittest.TestCase):
def setUp(self):
self.cc = Executable(join_path(spack.build_env_path, "cc"))
self.ld = Executable(join_path(spack.build_env_path, "ld"))
self.cpp = Executable(join_path(spack.build_env_path, "cpp"))
+ self.cxx = Executable(join_path(spack.build_env_path, "c++"))
+ self.fc = Executable(join_path(spack.build_env_path, "fc"))
self.realcc = "/bin/mycc"
self.prefix = "/spack-test-prefix"
os.environ['SPACK_CC'] = self.realcc
+ os.environ['SPACK_CXX'] = self.realcc
+ os.environ['SPACK_FC'] = self.realcc
+
os.environ['SPACK_PREFIX'] = self.prefix
- os.environ['SPACK_ENV_PATH']="test"
+ os.environ['SPACK_ENV_PATH'] = "test"
os.environ['SPACK_DEBUG_LOG_DIR'] = "."
os.environ['SPACK_COMPILER_SPEC'] = "gcc@4.4.7"
os.environ['SPACK_SHORT_SPEC'] = "foo@1.2"
@@ -92,55 +98,119 @@ class CompilerTest(unittest.TestCase):
if 'SPACK_DEPENDENCIES' in os.environ:
del os.environ['SPACK_DEPENDENCIES']
-
def tearDown(self):
shutil.rmtree(self.tmp_deps, True)
-
def check_cc(self, command, args, expected):
os.environ['SPACK_TEST_COMMAND'] = command
self.assertEqual(self.cc(*args, output=str).strip(), expected)
+ def check_cxx(self, command, args, expected):
+ os.environ['SPACK_TEST_COMMAND'] = command
+ self.assertEqual(self.cxx(*args, output=str).strip(), expected)
+
+ def check_fc(self, command, args, expected):
+ os.environ['SPACK_TEST_COMMAND'] = command
+ self.assertEqual(self.fc(*args, output=str).strip(), expected)
def check_ld(self, command, args, expected):
os.environ['SPACK_TEST_COMMAND'] = command
self.assertEqual(self.ld(*args, output=str).strip(), expected)
-
def check_cpp(self, command, args, expected):
os.environ['SPACK_TEST_COMMAND'] = command
self.assertEqual(self.cpp(*args, output=str).strip(), expected)
-
def test_vcheck_mode(self):
self.check_cc('dump-mode', ['-I/include', '--version'], "vcheck")
self.check_cc('dump-mode', ['-I/include', '-V'], "vcheck")
self.check_cc('dump-mode', ['-I/include', '-v'], "vcheck")
self.check_cc('dump-mode', ['-I/include', '-dumpversion'], "vcheck")
self.check_cc('dump-mode', ['-I/include', '--version', '-c'], "vcheck")
- self.check_cc('dump-mode', ['-I/include', '-V', '-o', 'output'], "vcheck")
-
+ self.check_cc('dump-mode', ['-I/include',
+ '-V', '-o', 'output'], "vcheck")
def test_cpp_mode(self):
self.check_cc('dump-mode', ['-E'], "cpp")
self.check_cpp('dump-mode', [], "cpp")
-
def test_as_mode(self):
self.check_cc('dump-mode', ['-S'], "as")
-
def test_ccld_mode(self):
self.check_cc('dump-mode', [], "ccld")
self.check_cc('dump-mode', ['foo.c', '-o', 'foo'], "ccld")
- self.check_cc('dump-mode', ['foo.c', '-o', 'foo', '-Wl,-rpath,foo'], "ccld")
- self.check_cc('dump-mode', ['foo.o', 'bar.o', 'baz.o', '-o', 'foo', '-Wl,-rpath,foo'], "ccld")
-
+ self.check_cc('dump-mode', ['foo.c', '-o',
+ 'foo', '-Wl,-rpath,foo'], "ccld")
+ self.check_cc(
+ 'dump-mode',
+ ['foo.o', 'bar.o', 'baz.o', '-o', 'foo', '-Wl,-rpath,foo'],
+ "ccld")
def test_ld_mode(self):
self.check_ld('dump-mode', [], "ld")
- self.check_ld('dump-mode', ['foo.o', 'bar.o', 'baz.o', '-o', 'foo', '-Wl,-rpath,foo'], "ld")
+ self.check_ld(
+ 'dump-mode',
+ ['foo.o', 'bar.o', 'baz.o', '-o', 'foo', '-Wl,-rpath,foo'],
+ "ld")
+
+ def test_flags(self):
+ os.environ['SPACK_LDFLAGS'] = '-L foo'
+ os.environ['SPACK_LDLIBS'] = '-lfoo'
+ os.environ['SPACK_CPPFLAGS'] = '-g -O1'
+ os.environ['SPACK_CFLAGS'] = '-Wall'
+ os.environ['SPACK_CXXFLAGS'] = '-Werror'
+ os.environ['SPACK_FFLAGS'] = '-w'
+
+ # Test ldflags added properly in ld mode
+ self.check_ld('dump-args', test_command,
+ "ld " +
+ '-rpath ' + self.prefix + '/lib ' +
+ '-rpath ' + self.prefix + '/lib64 ' +
+ '-L foo ' +
+ ' '.join(test_command) + ' ' +
+ '-lfoo')
+
+ # Test cppflags added properly in cpp mode
+ self.check_cpp('dump-args', test_command,
+ "cpp " +
+ '-g -O1 ' +
+ ' '.join(test_command))
+
+ # Test ldflags, cppflags, and language specific flags are added in
+ # proper order
+ self.check_cc('dump-args', test_command,
+ self.realcc + ' ' +
+ '-Wl,-rpath,' + self.prefix + '/lib ' +
+ '-Wl,-rpath,' + self.prefix + '/lib64 ' +
+ '-g -O1 ' +
+ '-Wall ' +
+ '-L foo ' +
+ ' '.join(test_command) + ' ' +
+ '-lfoo')
+
+ self.check_cxx('dump-args', test_command,
+ self.realcc + ' ' +
+ '-Wl,-rpath,' + self.prefix + '/lib ' +
+ '-Wl,-rpath,' + self.prefix + '/lib64 ' +
+ '-g -O1 ' +
+ '-Werror ' +
+ '-L foo ' +
+ ' '.join(test_command) + ' ' +
+ '-lfoo')
+
+ self.check_fc('dump-args', test_command,
+ self.realcc + ' ' +
+ '-Wl,-rpath,' + self.prefix + '/lib ' +
+ '-Wl,-rpath,' + self.prefix + '/lib64 ' +
+ '-w ' +
+ '-g -O1 ' +
+ '-L foo ' +
+ ' '.join(test_command) + ' ' +
+ '-lfoo')
+ os.environ['SPACK_LDFLAGS'] = ''
+ os.environ['SPACK_LDLIBS'] = ''
def test_dep_rpath(self):
"""Ensure RPATHs for root package are added."""
@@ -150,10 +220,11 @@ class CompilerTest(unittest.TestCase):
'-Wl,-rpath,' + self.prefix + '/lib64 ' +
' '.join(test_command))
-
def test_dep_include(self):
"""Ensure a single dependency include directory is added."""
os.environ['SPACK_DEPENDENCIES'] = self.dep4
+ os.environ['SPACK_RPATH_DEPS'] = os.environ['SPACK_DEPENDENCIES']
+ os.environ['SPACK_LINK_DEPS'] = os.environ['SPACK_DEPENDENCIES']
self.check_cc('dump-args', test_command,
self.realcc + ' ' +
'-Wl,-rpath,' + self.prefix + '/lib ' +
@@ -161,10 +232,11 @@ class CompilerTest(unittest.TestCase):
'-I' + self.dep4 + '/include ' +
' '.join(test_command))
-
def test_dep_lib(self):
"""Ensure a single dependency RPATH is added."""
os.environ['SPACK_DEPENDENCIES'] = self.dep2
+ os.environ['SPACK_RPATH_DEPS'] = os.environ['SPACK_DEPENDENCIES']
+ os.environ['SPACK_LINK_DEPS'] = os.environ['SPACK_DEPENDENCIES']
self.check_cc('dump-args', test_command,
self.realcc + ' ' +
'-Wl,-rpath,' + self.prefix + '/lib ' +
@@ -173,11 +245,34 @@ class CompilerTest(unittest.TestCase):
'-Wl,-rpath,' + self.dep2 + '/lib64 ' +
' '.join(test_command))
+ def test_dep_lib_no_rpath(self):
+ """Ensure a single dependency link flag is added with no dep RPATH."""
+ os.environ['SPACK_DEPENDENCIES'] = self.dep2
+ os.environ['SPACK_LINK_DEPS'] = os.environ['SPACK_DEPENDENCIES']
+ self.check_cc('dump-args', test_command,
+ self.realcc + ' ' +
+ '-Wl,-rpath,' + self.prefix + '/lib ' +
+ '-Wl,-rpath,' + self.prefix + '/lib64 ' +
+ '-L' + self.dep2 + '/lib64 ' +
+ ' '.join(test_command))
+
+ def test_dep_lib_no_lib(self):
+ """Ensure a single dependency RPATH is added with no -L."""
+ os.environ['SPACK_DEPENDENCIES'] = self.dep2
+ os.environ['SPACK_RPATH_DEPS'] = os.environ['SPACK_DEPENDENCIES']
+ self.check_cc('dump-args', test_command,
+ self.realcc + ' ' +
+ '-Wl,-rpath,' + self.prefix + '/lib ' +
+ '-Wl,-rpath,' + self.prefix + '/lib64 ' +
+ '-Wl,-rpath,' + self.dep2 + '/lib64 ' +
+ ' '.join(test_command))
def test_all_deps(self):
"""Ensure includes and RPATHs for all deps are added. """
os.environ['SPACK_DEPENDENCIES'] = ':'.join([
self.dep1, self.dep2, self.dep3, self.dep4])
+ os.environ['SPACK_RPATH_DEPS'] = os.environ['SPACK_DEPENDENCIES']
+ os.environ['SPACK_LINK_DEPS'] = os.environ['SPACK_DEPENDENCIES']
# This is probably more constrained than it needs to be; it
# checks order within prepended args and doesn't strictly have
@@ -202,11 +297,12 @@ class CompilerTest(unittest.TestCase):
' '.join(test_command))
-
def test_ld_deps(self):
"""Ensure no (extra) -I args or -Wl, are passed in ld mode."""
os.environ['SPACK_DEPENDENCIES'] = ':'.join([
self.dep1, self.dep2, self.dep3, self.dep4])
+ os.environ['SPACK_RPATH_DEPS'] = os.environ['SPACK_DEPENDENCIES']
+ os.environ['SPACK_LINK_DEPS'] = os.environ['SPACK_DEPENDENCIES']
self.check_ld('dump-args', test_command,
'ld ' +
@@ -224,10 +320,46 @@ class CompilerTest(unittest.TestCase):
' '.join(test_command))
+ def test_ld_deps_no_rpath(self):
+ """Ensure SPACK_RPATH_DEPS controls RPATHs for ld."""
+ os.environ['SPACK_DEPENDENCIES'] = ':'.join([
+ self.dep1, self.dep2, self.dep3, self.dep4])
+ os.environ['SPACK_LINK_DEPS'] = os.environ['SPACK_DEPENDENCIES']
+
+ self.check_ld('dump-args', test_command,
+ 'ld ' +
+ '-rpath ' + self.prefix + '/lib ' +
+ '-rpath ' + self.prefix + '/lib64 ' +
+
+ '-L' + self.dep3 + '/lib64 ' +
+ '-L' + self.dep2 + '/lib64 ' +
+ '-L' + self.dep1 + '/lib ' +
+
+ ' '.join(test_command))
+
+ def test_ld_deps_no_link(self):
+ """Ensure SPACK_LINK_DEPS controls -L for ld."""
+ os.environ['SPACK_DEPENDENCIES'] = ':'.join([
+ self.dep1, self.dep2, self.dep3, self.dep4])
+ os.environ['SPACK_RPATH_DEPS'] = os.environ['SPACK_DEPENDENCIES']
+
+ self.check_ld('dump-args', test_command,
+ 'ld ' +
+ '-rpath ' + self.prefix + '/lib ' +
+ '-rpath ' + self.prefix + '/lib64 ' +
+
+ '-rpath ' + self.dep3 + '/lib64 ' +
+ '-rpath ' + self.dep2 + '/lib64 ' +
+ '-rpath ' + self.dep1 + '/lib ' +
+
+ ' '.join(test_command))
+
def test_ld_deps_reentrant(self):
"""Make sure ld -r is handled correctly on OS's where it doesn't
support rpaths."""
os.environ['SPACK_DEPENDENCIES'] = ':'.join([self.dep1])
+ os.environ['SPACK_RPATH_DEPS'] = os.environ['SPACK_DEPENDENCIES']
+ os.environ['SPACK_LINK_DEPS'] = os.environ['SPACK_DEPENDENCIES']
os.environ['SPACK_SHORT_SPEC'] = "foo@1.2=linux-x86_64"
reentrant_test_command = ['-r'] + test_command
diff --git a/lib/spack/spack/test/cmd/find.py b/lib/spack/spack/test/cmd/find.py
new file mode 100644
index 0000000000..dcd123d46e
--- /dev/null
+++ b/lib/spack/spack/test/cmd/find.py
@@ -0,0 +1,53 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+import spack.cmd.find
+from spack.util.pattern import Bunch
+
+
+def test_query_arguments():
+ query_arguments = spack.cmd.find.query_arguments
+ # Default arguments
+ args = Bunch(
+ only_missing=False,
+ missing=False,
+ unknown=False,
+ explicit=False,
+ implicit=False
+ )
+ q_args = query_arguments(args)
+ assert 'installed' in q_args
+ assert 'known' in q_args
+ assert 'explicit' in q_args
+ assert q_args['installed'] is True
+ assert q_args['known'] is any
+ assert q_args['explicit'] is any
+ # Check that explicit works correctly
+ args.explicit = True
+ q_args = query_arguments(args)
+ assert q_args['explicit'] is True
+ args.explicit = False
+ args.implicit = True
+ q_args = query_arguments(args)
+ assert q_args['explicit'] is False
diff --git a/lib/spack/spack/test/cmd/test_install.py b/lib/spack/spack/test/cmd/install.py
index d17e013ed2..304eb04a55 100644
--- a/lib/spack/spack/test/cmd/test_install.py
+++ b/lib/spack/spack/test/cmd/install.py
@@ -22,18 +22,27 @@
# License along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
+import StringIO
+import argparse
+import codecs
import collections
-from contextlib import contextmanager
+import contextlib
+import unittest
-import StringIO
+import llnl.util.filesystem
+import spack
+import spack.cmd
+import spack.cmd.install as install
FILE_REGISTRY = collections.defaultdict(StringIO.StringIO)
+
# Monkey-patch open to write module files to a StringIO instance
-@contextmanager
-def mock_open(filename, mode):
+@contextlib.contextmanager
+def mock_open(filename, mode, *args):
if not mode == 'wb':
- raise RuntimeError('test.test_install : unexpected opening mode for monkey-patched open')
+ message = 'test.test_install : unexpected opening mode for mock_open'
+ raise RuntimeError(message)
FILE_REGISTRY[filename] = StringIO.StringIO()
@@ -44,31 +53,40 @@ def mock_open(filename, mode):
FILE_REGISTRY[filename] = handle.getvalue()
handle.close()
-import os
-import itertools
-import unittest
-
-import spack
-import spack.cmd
-
-
-# The use of __import__ is necessary to maintain a name with hyphen (which cannot be an identifier in python)
-test_install = __import__("spack.cmd.test-install", fromlist=['test_install'])
-
class MockSpec(object):
+
def __init__(self, name, version, hashStr=None):
- self.dependencies = {}
+ self._dependencies = {}
self.name = name
self.version = version
self.hash = hashStr if hashStr else hash((name, version))
+ def _deptype_norm(self, deptype):
+ if deptype is None:
+ return spack.alldeps
+ # Force deptype to be a tuple so that we can do set intersections.
+ if isinstance(deptype, str):
+ return (deptype,)
+ return deptype
+
+ def _find_deps(self, where, deptype):
+ deptype = self._deptype_norm(deptype)
+
+ return [dep.spec
+ for dep in where.values()
+ if deptype and any(d in deptype for d in dep.deptypes)]
+
+ def dependencies(self, deptype=None):
+ return self._find_deps(self._dependencies, deptype)
+
+ def dependents(self, deptype=None):
+ return self._find_deps(self._dependents, deptype)
+
def traverse(self, order=None):
- for _, spec in self.dependencies.items():
- yield spec
+ for _, spec in self._dependencies.items():
+ yield spec.spec
yield self
- #allDeps = itertools.chain.from_iterable(i.traverse() for i in self.dependencies.itervalues())
- #return set(itertools.chain([self], allDeps))
def dag_hash(self):
return self.hash
@@ -79,6 +97,7 @@ class MockSpec(object):
class MockPackage(object):
+
def __init__(self, spec, buildLogPath):
self.name = spec.name
self.spec = spec
@@ -86,10 +105,13 @@ class MockPackage(object):
self.build_log_path = buildLogPath
def do_install(self, *args, **kwargs):
+ for x in self.spec.dependencies():
+ x.package.do_install(*args, **kwargs)
self.installed = True
class MockPackageDb(object):
+
def __init__(self, init=None):
self.specToPkg = {}
if init:
@@ -102,35 +124,30 @@ class MockPackageDb(object):
def mock_fetch_log(path):
return []
-specX = MockSpec('X', "1.2.0")
-specY = MockSpec('Y', "2.3.8")
-specX.dependencies['Y'] = specY
+
+specX = MockSpec('X', '1.2.0')
+specY = MockSpec('Y', '2.3.8')
+specX._dependencies['Y'] = spack.spec.DependencySpec(
+ specX, specY, spack.alldeps)
pkgX = MockPackage(specX, 'logX')
pkgY = MockPackage(specY, 'logY')
-
-
-class MockArgs(object):
- def __init__(self, package):
- self.package = package
- self.jobs = None
- self.no_checksum = False
- self.output = None
+specX.package = pkgX
+specY.package = pkgY
# TODO: add test(s) where Y fails to install
-class TestInstallTest(unittest.TestCase):
- """
- Tests test-install where X->Y
- """
+class InstallTestJunitLog(unittest.TestCase):
+ """Tests test-install where X->Y"""
def setUp(self):
- super(TestInstallTest, self).setUp()
-
+ super(InstallTestJunitLog, self).setUp()
+ install.PackageBase = MockPackage
# Monkey patch parse specs
+
def monkey_parse_specs(x, concretize):
- if x == 'X':
+ if x == ['X']:
return [specX]
- elif x == 'Y':
+ elif x == ['Y']:
return [specY]
return []
@@ -138,14 +155,15 @@ class TestInstallTest(unittest.TestCase):
spack.cmd.parse_specs = monkey_parse_specs
# Monkey patch os.mkdirp
- self.os_mkdir = os.mkdir
- os.mkdir = lambda x: True
+ self.mkdirp = llnl.util.filesystem.mkdirp
+ llnl.util.filesystem.mkdirp = lambda x: True
# Monkey patch open
- test_install.open = mock_open
+ self.codecs_open = codecs.open
+ codecs.open = mock_open
# Clean FILE_REGISTRY
- FILE_REGISTRY = collections.defaultdict(StringIO.StringIO)
+ FILE_REGISTRY.clear()
pkgX.installed = False
pkgY.installed = False
@@ -157,21 +175,24 @@ class TestInstallTest(unittest.TestCase):
def tearDown(self):
# Remove the monkey patched test_install.open
- test_install.open = open
+ codecs.open = self.codecs_open
# Remove the monkey patched os.mkdir
- os.mkdir = self.os_mkdir
- del self.os_mkdir
+ llnl.util.filesystem.mkdirp = self.mkdirp
+ del self.mkdirp
# Remove the monkey patched parse_specs
spack.cmd.parse_specs = self.parse_specs
del self.parse_specs
- super(TestInstallTest, self).tearDown()
+ super(InstallTestJunitLog, self).tearDown()
spack.repo = self.saved_db
def test_installing_both(self):
- test_install.test_install(None, MockArgs('X') )
+ parser = argparse.ArgumentParser()
+ install.setup_parser(parser)
+ args = parser.parse_args(['--log-format=junit', 'X'])
+ install.install(parser, args)
self.assertEqual(len(FILE_REGISTRY), 1)
for _, content in FILE_REGISTRY.items():
self.assertTrue('tests="2"' in content)
@@ -181,10 +202,14 @@ class TestInstallTest(unittest.TestCase):
def test_dependency_already_installed(self):
pkgX.installed = True
pkgY.installed = True
- test_install.test_install(None, MockArgs('X'))
+ parser = argparse.ArgumentParser()
+ install.setup_parser(parser)
+ args = parser.parse_args(['--log-format=junit', 'X'])
+ install.install(parser, args)
self.assertEqual(len(FILE_REGISTRY), 1)
for _, content in FILE_REGISTRY.items():
self.assertTrue('tests="2"' in content)
self.assertTrue('failures="0"' in content)
self.assertTrue('errors="0"' in content)
- self.assertEqual(sum('skipped' in line for line in content.split('\n')), 2)
+ self.assertEqual(
+ sum('skipped' in line for line in content.split('\n')), 2)
diff --git a/lib/spack/spack/test/cmd/module.py b/lib/spack/spack/test/cmd/module.py
new file mode 100644
index 0000000000..03ce1ef206
--- /dev/null
+++ b/lib/spack/spack/test/cmd/module.py
@@ -0,0 +1,106 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+import argparse
+import os.path
+
+import pytest
+import spack.cmd.module as module
+import spack.modules as modules
+
+
+def _get_module_files(args):
+ return [modules.module_types[args.module_type](spec).file_name
+ for spec in args.specs()]
+
+
+@pytest.fixture(scope='module')
+def parser():
+ """Returns the parser for the module command"""
+ parser = argparse.ArgumentParser()
+ module.setup_parser(parser)
+ return parser
+
+
+@pytest.fixture(
+ params=[
+ ['rm', 'doesnotexist'], # Try to remove a non existing module [tcl]
+ ['find', 'mpileaks'], # Try to find a module with multiple matches
+ ['find', 'doesnotexist'], # Try to find a module with no matches
+ ]
+)
+def failure_args(request):
+ """A list of arguments that will cause a failure"""
+ return request.param
+
+
+# TODO : test the --delete-tree option
+# TODO : this requires having a separate directory for test modules
+# TODO : add tests for loads and find to check the prompt format
+
+
+def test_exit_with_failure(database, parser, failure_args):
+ args = parser.parse_args(failure_args)
+ with pytest.raises(SystemExit):
+ module.module(parser, args)
+
+
+def test_remove_and_add_tcl(database, parser):
+ # Remove existing modules [tcl]
+ args = parser.parse_args(['rm', '-y', 'mpileaks'])
+ module_files = _get_module_files(args)
+ for item in module_files:
+ assert os.path.exists(item)
+ module.module(parser, args)
+ for item in module_files:
+ assert not os.path.exists(item)
+
+ # Add them back [tcl]
+ args = parser.parse_args(['refresh', '-y', 'mpileaks'])
+ module.module(parser, args)
+ for item in module_files:
+ assert os.path.exists(item)
+
+
+def test_find(database, parser):
+ # Try to find a module
+ args = parser.parse_args(['find', 'libelf'])
+ module.module(parser, args)
+
+
+def test_remove_and_add_dotkit(database, parser):
+ # Remove existing modules [dotkit]
+ args = parser.parse_args(['rm', '-y', '-m', 'dotkit', 'mpileaks'])
+ module_files = _get_module_files(args)
+ for item in module_files:
+ assert os.path.exists(item)
+ module.module(parser, args)
+ for item in module_files:
+ assert not os.path.exists(item)
+
+ # Add them back [dotkit]
+ args = parser.parse_args(['refresh', '-y', '-m', 'dotkit', 'mpileaks'])
+ module.module(parser, args)
+ for item in module_files:
+ assert os.path.exists(item)
diff --git a/lib/spack/spack/test/cmd/test_compiler_cmd.py b/lib/spack/spack/test/cmd/test_compiler_cmd.py
new file mode 100644
index 0000000000..647404e6da
--- /dev/null
+++ b/lib/spack/spack/test/cmd/test_compiler_cmd.py
@@ -0,0 +1,94 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+import pytest
+import llnl.util.filesystem
+
+import spack.cmd.compiler
+import spack.compilers
+import spack.spec
+import spack.util.pattern
+from spack.version import Version
+
+test_version = '4.5-spacktest'
+
+
+@pytest.fixture()
+def mock_compiler_dir(tmpdir):
+ """Return a directory containing a fake, but detectable compiler."""
+
+ tmpdir.ensure('bin', dir=True)
+ bin_dir = tmpdir.join('bin')
+
+ gcc_path = bin_dir.join('gcc')
+ gxx_path = bin_dir.join('g++')
+ gfortran_path = bin_dir.join('gfortran')
+
+ gcc_path.write("""\
+#!/bin/sh
+
+for arg in "$@"; do
+ if [ "$arg" = -dumpversion ]; then
+ echo '%s'
+ fi
+done
+""" % test_version)
+
+ # Create some mock compilers in the temporary directory
+ llnl.util.filesystem.set_executable(str(gcc_path))
+ gcc_path.copy(gxx_path, mode=True)
+ gcc_path.copy(gfortran_path, mode=True)
+
+ return str(tmpdir)
+
+
+@pytest.mark.usefixtures('config', 'builtin_mock')
+class TestCompilerCommand(object):
+
+ def test_compiler_remove(self):
+ args = spack.util.pattern.Bunch(
+ all=True, compiler_spec='gcc@4.5.0', add_paths=[], scope=None
+ )
+ spack.cmd.compiler.compiler_remove(args)
+ compilers = spack.compilers.all_compilers()
+ assert spack.spec.CompilerSpec("gcc@4.5.0") not in compilers
+
+ def test_compiler_add(self, mock_compiler_dir):
+ # Compilers available by default.
+ old_compilers = set(spack.compilers.all_compilers())
+
+ args = spack.util.pattern.Bunch(
+ all=None,
+ compiler_spec=None,
+ add_paths=[mock_compiler_dir],
+ scope=None
+ )
+ spack.cmd.compiler.compiler_find(args)
+
+ # Ensure new compiler is in there
+ new_compilers = set(spack.compilers.all_compilers())
+ new_compiler = new_compilers - old_compilers
+ assert new_compiler
+ c = new_compiler.pop()
+ assert c.version == Version(test_version)
diff --git a/lib/spack/spack/test/cmd/uninstall.py b/lib/spack/spack/test/cmd/uninstall.py
index 9fffaace40..bfbb9b8148 100644
--- a/lib/spack/spack/test/cmd/uninstall.py
+++ b/lib/spack/spack/test/cmd/uninstall.py
@@ -22,12 +22,13 @@
# License along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
-import spack.test.mock_database
-
-from spack.cmd.uninstall import uninstall
+import pytest
+import spack.store
+import spack.cmd.uninstall
class MockArgs(object):
+
def __init__(self, packages, all=False, force=False, dependents=False):
self.packages = packages
self.all = all
@@ -36,26 +37,28 @@ class MockArgs(object):
self.yes_to_all = True
-class TestUninstall(spack.test.mock_database.MockDatabase):
- def test_uninstall(self):
- parser = None
- # Multiple matches
- args = MockArgs(['mpileaks'])
- self.assertRaises(SystemExit, uninstall, parser, args)
- # Installed dependents
- args = MockArgs(['libelf'])
- self.assertRaises(SystemExit, uninstall, parser, args)
- # Recursive uninstall
- args = MockArgs(['callpath'], all=True, dependents=True)
+def test_uninstall(database):
+ parser = None
+ uninstall = spack.cmd.uninstall.uninstall
+ # Multiple matches
+ args = MockArgs(['mpileaks'])
+ with pytest.raises(SystemExit):
+ uninstall(parser, args)
+ # Installed dependents
+ args = MockArgs(['libelf'])
+ with pytest.raises(SystemExit):
uninstall(parser, args)
+ # Recursive uninstall
+ args = MockArgs(['callpath'], all=True, dependents=True)
+ uninstall(parser, args)
- all_specs = spack.install_layout.all_specs()
- self.assertEqual(len(all_specs), 7)
- # query specs with multiple configurations
- mpileaks_specs = [s for s in all_specs if s.satisfies('mpileaks')]
- callpath_specs = [s for s in all_specs if s.satisfies('callpath')]
- mpi_specs = [s for s in all_specs if s.satisfies('mpi')]
+ all_specs = spack.store.layout.all_specs()
+ assert len(all_specs) == 7
+ # query specs with multiple configurations
+ mpileaks_specs = [s for s in all_specs if s.satisfies('mpileaks')]
+ callpath_specs = [s for s in all_specs if s.satisfies('callpath')]
+ mpi_specs = [s for s in all_specs if s.satisfies('mpi')]
- self.assertEqual(len(mpileaks_specs), 0)
- self.assertEqual(len(callpath_specs), 0)
- self.assertEqual(len(mpi_specs), 3)
+ assert len(mpileaks_specs) == 0
+ assert len(callpath_specs) == 0
+ assert len(mpi_specs) == 3
diff --git a/lib/spack/spack/test/concretize.py b/lib/spack/spack/test/concretize.py
index 799fdae3a9..b7cad503a3 100644
--- a/lib/spack/spack/test/concretize.py
+++ b/lib/spack/spack/test/concretize.py
@@ -22,222 +22,256 @@
# License along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
+import pytest
import spack
+import spack.architecture
+from spack.concretize import find_spec
from spack.spec import Spec, CompilerSpec
from spack.version import ver
-from spack.concretize import find_spec
-from spack.test.mock_packages_test import *
-
-class ConcretizeTest(MockPackagesTest):
-
- def check_spec(self, abstract, concrete):
- if abstract.versions.concrete:
- self.assertEqual(abstract.versions, concrete.versions)
-
- if abstract.variants:
- for name in abstract.variants:
- avariant = abstract.variants[name]
- cvariant = concrete.variants[name]
- self.assertEqual(avariant.enabled, cvariant.enabled)
-
- for name in abstract.package.variants:
- self.assertTrue(name in concrete.variants)
-
- if abstract.compiler and abstract.compiler.concrete:
- self.assertEqual(abstract.compiler, concrete.compiler)
-
- if abstract.architecture and abstract.architecture.concrete:
- self.assertEqual(abstract.architecture, concrete.architecture)
-
-
- def check_concretize(self, abstract_spec):
- abstract = Spec(abstract_spec)
- concrete = abstract.concretized()
-
- self.assertFalse(abstract.concrete)
- self.assertTrue(concrete.concrete)
- self.check_spec(abstract, concrete)
-
- return concrete
-
-
- def test_concretize_no_deps(self):
- self.check_concretize('libelf')
- self.check_concretize('libelf@0.8.13')
-
-
- def test_concretize_dag(self):
- self.check_concretize('callpath')
- self.check_concretize('mpileaks')
- self.check_concretize('libelf')
- def test_concretize_variant(self):
- self.check_concretize('mpich+debug')
- self.check_concretize('mpich~debug')
- self.check_concretize('mpich')
-
+def check_spec(abstract, concrete):
+ if abstract.versions.concrete:
+ assert abstract.versions == concrete.versions
+
+ if abstract.variants:
+ for name in abstract.variants:
+ avariant = abstract.variants[name]
+ cvariant = concrete.variants[name]
+ assert avariant.value == cvariant.value
+
+ if abstract.compiler_flags:
+ for flag in abstract.compiler_flags:
+ aflag = abstract.compiler_flags[flag]
+ cflag = concrete.compiler_flags[flag]
+ assert set(aflag) <= set(cflag)
+
+ for name in abstract.package.variants:
+ assert name in concrete.variants
+
+ for flag in concrete.compiler_flags.valid_compiler_flags():
+ assert flag in concrete.compiler_flags
+
+ if abstract.compiler and abstract.compiler.concrete:
+ assert abstract.compiler == concrete.compiler
+
+ if abstract.architecture and abstract.architecture.concrete:
+ assert abstract.architecture == concrete.architecture
+
+
+def check_concretize(abstract_spec):
+ abstract = Spec(abstract_spec)
+ concrete = abstract.concretized()
+ assert not abstract.concrete
+ assert concrete.concrete
+ check_spec(abstract, concrete)
+ return concrete
+
+
+@pytest.fixture(
+ params=[
+ # no_deps
+ 'libelf', 'libelf@0.8.13',
+ # dag
+ 'callpath', 'mpileaks', 'libelf',
+ # variant
+ 'mpich+debug', 'mpich~debug', 'mpich debug=2', 'mpich',
+ # compiler flags
+ 'mpich cppflags="-O3"',
+ # with virtual
+ 'mpileaks ^mpi', 'mpileaks ^mpi@:1.1', 'mpileaks ^mpi@2:',
+ 'mpileaks ^mpi@2.1', 'mpileaks ^mpi@2.2', 'mpileaks ^mpi@2.2',
+ 'mpileaks ^mpi@:1', 'mpileaks ^mpi@1.2:2'
+ ]
+)
+def spec(request):
+ """Spec to be concretized"""
+ return request.param
+
+
+@pytest.mark.usefixtures('config', 'builtin_mock')
+class TestConcretize(object):
+ def test_concretize(self, spec):
+ check_concretize(spec)
+
+ def test_concretize_mention_build_dep(self):
+ spec = check_concretize('cmake-client ^cmake@3.4.3')
+ # Check parent's perspective of child
+ dependency = spec.dependencies_dict()['cmake']
+ assert set(dependency.deptypes) == set(['build'])
+ # Check child's perspective of parent
+ cmake = spec['cmake']
+ dependent = cmake.dependents_dict()['cmake-client']
+ assert set(dependent.deptypes) == set(['build'])
def test_concretize_preferred_version(self):
- spec = self.check_concretize('python')
- self.assertEqual(spec.versions, ver('2.7.11'))
-
- spec = self.check_concretize('python@3.5.1')
- self.assertEqual(spec.versions, ver('3.5.1'))
-
-
- def test_concretize_with_virtual(self):
- self.check_concretize('mpileaks ^mpi')
- self.check_concretize('mpileaks ^mpi@:1.1')
- self.check_concretize('mpileaks ^mpi@2:')
- self.check_concretize('mpileaks ^mpi@2.1')
- self.check_concretize('mpileaks ^mpi@2.2')
- self.check_concretize('mpileaks ^mpi@2.2')
- self.check_concretize('mpileaks ^mpi@:1')
- self.check_concretize('mpileaks ^mpi@1.2:2')
-
+ spec = check_concretize('python')
+ assert spec.versions == ver('2.7.11')
+ spec = check_concretize('python@3.5.1')
+ assert spec.versions == ver('3.5.1')
def test_concretize_with_restricted_virtual(self):
- self.check_concretize('mpileaks ^mpich2')
-
- concrete = self.check_concretize('mpileaks ^mpich2@1.1')
- self.assertTrue(concrete['mpich2'].satisfies('mpich2@1.1'))
+ check_concretize('mpileaks ^mpich2')
- concrete = self.check_concretize('mpileaks ^mpich2@1.2')
- self.assertTrue(concrete['mpich2'].satisfies('mpich2@1.2'))
+ concrete = check_concretize('mpileaks ^mpich2@1.1')
+ assert concrete['mpich2'].satisfies('mpich2@1.1')
- concrete = self.check_concretize('mpileaks ^mpich2@:1.5')
- self.assertTrue(concrete['mpich2'].satisfies('mpich2@:1.5'))
+ concrete = check_concretize('mpileaks ^mpich2@1.2')
+ assert concrete['mpich2'].satisfies('mpich2@1.2')
- concrete = self.check_concretize('mpileaks ^mpich2@:1.3')
- self.assertTrue(concrete['mpich2'].satisfies('mpich2@:1.3'))
+ concrete = check_concretize('mpileaks ^mpich2@:1.5')
+ assert concrete['mpich2'].satisfies('mpich2@:1.5')
- concrete = self.check_concretize('mpileaks ^mpich2@:1.2')
- self.assertTrue(concrete['mpich2'].satisfies('mpich2@:1.2'))
+ concrete = check_concretize('mpileaks ^mpich2@:1.3')
+ assert concrete['mpich2'].satisfies('mpich2@:1.3')
- concrete = self.check_concretize('mpileaks ^mpich2@:1.1')
- self.assertTrue(concrete['mpich2'].satisfies('mpich2@:1.1'))
+ concrete = check_concretize('mpileaks ^mpich2@:1.2')
+ assert concrete['mpich2'].satisfies('mpich2@:1.2')
- concrete = self.check_concretize('mpileaks ^mpich2@1.1:')
- self.assertTrue(concrete['mpich2'].satisfies('mpich2@1.1:'))
+ concrete = check_concretize('mpileaks ^mpich2@:1.1')
+ assert concrete['mpich2'].satisfies('mpich2@:1.1')
- concrete = self.check_concretize('mpileaks ^mpich2@1.5:')
- self.assertTrue(concrete['mpich2'].satisfies('mpich2@1.5:'))
+ concrete = check_concretize('mpileaks ^mpich2@1.1:')
+ assert concrete['mpich2'].satisfies('mpich2@1.1:')
- concrete = self.check_concretize('mpileaks ^mpich2@1.3.1:1.4')
- self.assertTrue(concrete['mpich2'].satisfies('mpich2@1.3.1:1.4'))
+ concrete = check_concretize('mpileaks ^mpich2@1.5:')
+ assert concrete['mpich2'].satisfies('mpich2@1.5:')
+ concrete = check_concretize('mpileaks ^mpich2@1.3.1:1.4')
+ assert concrete['mpich2'].satisfies('mpich2@1.3.1:1.4')
def test_concretize_with_provides_when(self):
"""Make sure insufficient versions of MPI are not in providers list when
- we ask for some advanced version.
+ we ask for some advanced version.
"""
- self.assertTrue(not any(spec.satisfies('mpich2@:1.0')
- for spec in spack.repo.providers_for('mpi@2.1')))
-
- self.assertTrue(not any(spec.satisfies('mpich2@:1.1')
- for spec in spack.repo.providers_for('mpi@2.2')))
-
- self.assertTrue(not any(spec.satisfies('mpich2@:1.1')
- for spec in spack.repo.providers_for('mpi@2.2')))
-
- self.assertTrue(not any(spec.satisfies('mpich@:1')
- for spec in spack.repo.providers_for('mpi@2')))
-
- self.assertTrue(not any(spec.satisfies('mpich@:1')
- for spec in spack.repo.providers_for('mpi@3')))
-
- self.assertTrue(not any(spec.satisfies('mpich2')
- for spec in spack.repo.providers_for('mpi@3')))
-
+ repo = spack.repo
+ assert not any(
+ s.satisfies('mpich2@:1.0') for s in repo.providers_for('mpi@2.1')
+ )
+ assert not any(
+ s.satisfies('mpich2@:1.1') for s in repo.providers_for('mpi@2.2')
+ )
+ assert not any(
+ s.satisfies('mpich@:1') for s in repo.providers_for('mpi@2')
+ )
+ assert not any(
+ s.satisfies('mpich@:1') for s in repo.providers_for('mpi@3')
+ )
+ assert not any(
+ s.satisfies('mpich2') for s in repo.providers_for('mpi@3')
+ )
+
+ def test_provides_handles_multiple_providers_of_same_vesrion(self):
+ """
+ """
+ providers = spack.repo.providers_for('mpi@3.0')
+
+ # Note that providers are repo-specific, so we don't misinterpret
+ # providers, but vdeps are not namespace-specific, so we can
+ # associate vdeps across repos.
+ assert Spec('builtin.mock.multi-provider-mpi@1.10.3') in providers
+ assert Spec('builtin.mock.multi-provider-mpi@1.10.2') in providers
+ assert Spec('builtin.mock.multi-provider-mpi@1.10.1') in providers
+ assert Spec('builtin.mock.multi-provider-mpi@1.10.0') in providers
+ assert Spec('builtin.mock.multi-provider-mpi@1.8.8') in providers
+
+ def concretize_multi_provider(self):
+ s = Spec('mpileaks ^multi-provider-mpi@3.0')
+ s.concretize()
+ assert s['mpi'].version == ver('1.10.3')
def test_concretize_two_virtuals(self):
"""Test a package with multiple virtual dependencies."""
- s = Spec('hypre').concretize()
-
+ Spec('hypre').concretize()
- def test_concretize_two_virtuals_with_one_bound(self):
+ def test_concretize_two_virtuals_with_one_bound(
+ self, refresh_builtin_mock
+ ):
"""Test a package with multiple virtual dependencies and one preset."""
- s = Spec('hypre ^openblas').concretize()
-
+ Spec('hypre ^openblas').concretize()
def test_concretize_two_virtuals_with_two_bound(self):
- """Test a package with multiple virtual dependencies and two of them preset."""
- s = Spec('hypre ^openblas ^netlib-lapack').concretize()
-
+ """Test a package with multiple virtual deps and two of them preset."""
+ Spec('hypre ^openblas ^netlib-lapack').concretize()
def test_concretize_two_virtuals_with_dual_provider(self):
"""Test a package with multiple virtual dependencies and force a provider
- that provides both."""
- s = Spec('hypre ^openblas-with-lapack').concretize()
-
-
- def test_concretize_two_virtuals_with_dual_provider_and_a_conflict(self):
- """Test a package with multiple virtual dependencies and force a provider
- that provides both, and another conflicting package that provides one."""
+ that provides both.
+ """
+ Spec('hypre ^openblas-with-lapack').concretize()
+
+ def test_concretize_two_virtuals_with_dual_provider_and_a_conflict(
+ self
+ ):
+ """Test a package with multiple virtual dependencies and force a
+ provider that provides both, and another conflicting package that
+ provides one.
+ """
s = Spec('hypre ^openblas-with-lapack ^netlib-lapack')
- self.assertRaises(spack.spec.MultipleProviderError, s.concretize)
-
+ with pytest.raises(spack.spec.MultipleProviderError):
+ s.concretize()
def test_virtual_is_fully_expanded_for_callpath(self):
# force dependence on fake "zmpi" by asking for MPI 10.0
spec = Spec('callpath ^mpi@10.0')
- self.assertTrue('mpi' in spec.dependencies)
- self.assertFalse('fake' in spec)
-
+ assert 'mpi' in spec._dependencies
+ assert 'fake' not in spec
spec.concretize()
-
- self.assertTrue('zmpi' in spec.dependencies)
- self.assertTrue(all(not 'mpi' in d.dependencies for d in spec.traverse()))
- self.assertTrue('zmpi' in spec)
- self.assertTrue('mpi' in spec)
-
- self.assertTrue('fake' in spec.dependencies['zmpi'])
-
-
- def test_virtual_is_fully_expanded_for_mpileaks(self):
+ assert 'zmpi' in spec._dependencies
+ assert all('mpi' not in d._dependencies for d in spec.traverse())
+ assert 'zmpi' in spec
+ assert 'mpi' in spec
+ assert 'fake' in spec._dependencies['zmpi'].spec
+
+ def test_virtual_is_fully_expanded_for_mpileaks(
+ self
+ ):
spec = Spec('mpileaks ^mpi@10.0')
- self.assertTrue('mpi' in spec.dependencies)
- self.assertFalse('fake' in spec)
-
+ assert 'mpi' in spec._dependencies
+ assert 'fake' not in spec
spec.concretize()
-
- self.assertTrue('zmpi' in spec.dependencies)
- self.assertTrue('callpath' in spec.dependencies)
- self.assertTrue('zmpi' in spec.dependencies['callpath'].dependencies)
- self.assertTrue('fake' in spec.dependencies['callpath'].dependencies['zmpi'].dependencies)
-
- self.assertTrue(all(not 'mpi' in d.dependencies for d in spec.traverse()))
- self.assertTrue('zmpi' in spec)
- self.assertTrue('mpi' in spec)
-
+ assert 'zmpi' in spec._dependencies
+ assert 'callpath' in spec._dependencies
+ assert 'zmpi' in spec._dependencies['callpath'].spec._dependencies
+ assert 'fake' in spec._dependencies['callpath'].spec._dependencies[
+ 'zmpi'].spec._dependencies # NOQA: ignore=E501
+ assert all('mpi' not in d._dependencies for d in spec.traverse())
+ assert 'zmpi' in spec
+ assert 'mpi' in spec
def test_my_dep_depends_on_provider_of_my_virtual_dep(self):
- spec = Spec('indirect_mpich')
+ spec = Spec('indirect-mpich')
spec.normalize()
spec.concretize()
-
def test_compiler_inheritance(self):
spec = Spec('mpileaks')
spec.normalize()
-
spec['dyninst'].compiler = CompilerSpec('clang')
spec.concretize()
-
# TODO: not exactly the syntax I would like.
- self.assertTrue(spec['libdwarf'].compiler.satisfies('clang'))
- self.assertTrue(spec['libelf'].compiler.satisfies('clang'))
-
+ assert spec['libdwarf'].compiler.satisfies('clang')
+ assert spec['libelf'].compiler.satisfies('clang')
def test_external_package(self):
- spec = Spec('externaltool')
+ spec = Spec('externaltool%gcc')
spec.concretize()
-
- self.assertEqual(spec['externaltool'].external, '/path/to/external_tool')
- self.assertFalse('externalprereq' in spec)
- self.assertTrue(spec['externaltool'].compiler.satisfies('gcc'))
-
+ assert spec['externaltool'].external == '/path/to/external_tool'
+ assert 'externalprereq' not in spec
+ assert spec['externaltool'].compiler.satisfies('gcc')
+
+ def test_external_package_module(self):
+ # No tcl modules on darwin/linux machines
+ # TODO: improved way to check for this.
+ platform = spack.architecture.real_platform().name
+ if platform == 'darwin' or platform == 'linux':
+ return
+
+ spec = Spec('externalmodule')
+ spec.concretize()
+ assert spec['externalmodule'].external_module == 'external-module'
+ assert 'externalprereq' not in spec
+ assert spec['externalmodule'].compiler.satisfies('gcc')
def test_nobuild_package(self):
got_error = False
@@ -246,17 +280,15 @@ class ConcretizeTest(MockPackagesTest):
spec.concretize()
except spack.concretize.NoBuildError:
got_error = True
- self.assertTrue(got_error)
-
+ assert got_error
def test_external_and_virtual(self):
spec = Spec('externaltest')
spec.concretize()
- self.assertEqual(spec['externaltool'].external, '/path/to/external_tool')
- self.assertEqual(spec['stuff'].external, '/path/to/external_virtual_gcc')
- self.assertTrue(spec['externaltool'].compiler.satisfies('gcc'))
- self.assertTrue(spec['stuff'].compiler.satisfies('gcc'))
-
+ assert spec['externaltool'].external == '/path/to/external_tool'
+ assert spec['stuff'].external == '/path/to/external_virtual_gcc'
+ assert spec['externaltool'].compiler.satisfies('gcc')
+ assert spec['stuff'].compiler.satisfies('gcc')
def test_find_spec_parents(self):
"""Tests the spec finding logic used by concretization. """
@@ -266,8 +298,7 @@ class ConcretizeTest(MockPackagesTest):
Spec('d +foo')),
Spec('e +foo'))
- self.assertEqual('a', find_spec(s['b'], lambda s: '+foo' in s).name)
-
+ assert 'a' == find_spec(s['b'], lambda s: '+foo' in s).name
def test_find_spec_children(self):
s = Spec('a',
@@ -275,14 +306,13 @@ class ConcretizeTest(MockPackagesTest):
Spec('c'),
Spec('d +foo')),
Spec('e +foo'))
- self.assertEqual('d', find_spec(s['b'], lambda s: '+foo' in s).name)
+ assert 'd' == find_spec(s['b'], lambda s: '+foo' in s).name
s = Spec('a',
Spec('b +foo',
Spec('c +foo'),
Spec('d')),
Spec('e +foo'))
- self.assertEqual('c', find_spec(s['b'], lambda s: '+foo' in s).name)
-
+ assert 'c' == find_spec(s['b'], lambda s: '+foo' in s).name
def test_find_spec_sibling(self):
s = Spec('a',
@@ -290,8 +320,8 @@ class ConcretizeTest(MockPackagesTest):
Spec('c'),
Spec('d')),
Spec('e +foo'))
- self.assertEqual('e', find_spec(s['b'], lambda s: '+foo' in s).name)
- self.assertEqual('b', find_spec(s['e'], lambda s: '+foo' in s).name)
+ assert 'e' == find_spec(s['b'], lambda s: '+foo' in s).name
+ assert 'b' == find_spec(s['e'], lambda s: '+foo' in s).name
s = Spec('a',
Spec('b +foo',
@@ -299,8 +329,7 @@ class ConcretizeTest(MockPackagesTest):
Spec('d')),
Spec('e',
Spec('f +foo')))
- self.assertEqual('f', find_spec(s['b'], lambda s: '+foo' in s).name)
-
+ assert 'f' == find_spec(s['b'], lambda s: '+foo' in s).name
def test_find_spec_self(self):
s = Spec('a',
@@ -308,8 +337,7 @@ class ConcretizeTest(MockPackagesTest):
Spec('c'),
Spec('d')),
Spec('e'))
- self.assertEqual('b', find_spec(s['b'], lambda s: '+foo' in s).name)
-
+ assert 'b' == find_spec(s['b'], lambda s: '+foo' in s).name
def test_find_spec_none(self):
s = Spec('a',
@@ -317,11 +345,10 @@ class ConcretizeTest(MockPackagesTest):
Spec('c'),
Spec('d')),
Spec('e'))
- self.assertEqual(None, find_spec(s['b'], lambda s: '+foo' in s))
-
+ assert find_spec(s['b'], lambda s: '+foo' in s) is None
def test_compiler_child(self):
s = Spec('mpileaks%clang ^dyninst%gcc')
s.concretize()
- self.assertTrue(s['mpileaks'].satisfies('%clang'))
- self.assertTrue(s['dyninst'].satisfies('%gcc'))
+ assert s['mpileaks'].satisfies('%clang')
+ assert s['dyninst'].satisfies('%gcc')
diff --git a/lib/spack/spack/test/concretize_preferences.py b/lib/spack/spack/test/concretize_preferences.py
new file mode 100644
index 0000000000..54df4e1563
--- /dev/null
+++ b/lib/spack/spack/test/concretize_preferences.py
@@ -0,0 +1,173 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+import pytest
+
+import spack
+import spack.util.spack_yaml as syaml
+from spack.spec import Spec
+from spack.package_prefs import PreferredPackages
+
+
+@pytest.fixture()
+def concretize_scope(config, tmpdir):
+ """Adds a scope for concretization preferences"""
+ tmpdir.ensure_dir('concretize')
+ spack.config.ConfigScope(
+ 'concretize', str(tmpdir.join('concretize'))
+ )
+ yield
+ # This is kind of weird, but that's how config scopes are
+ # set in ConfigScope.__init__
+ spack.config.config_scopes.pop('concretize')
+ spack.package_prefs._pkgsort = PreferredPackages()
+
+ # reset provider index each time, too
+ spack.repo._provider_index = None
+
+
+def concretize(abstract_spec):
+ return Spec(abstract_spec).concretized()
+
+
+def update_packages(pkgname, section, value):
+ """Update config and reread package list"""
+ conf = {pkgname: {section: value}}
+ spack.config.update_config('packages', conf, 'concretize')
+ spack.package_prefs._pkgsort = PreferredPackages()
+
+
+def assert_variant_values(spec, **variants):
+ concrete = concretize(spec)
+ for variant, value in variants.items():
+ assert concrete.variants[variant].value == value
+
+
+@pytest.mark.usefixtures('concretize_scope', 'builtin_mock')
+class TestConcretizePreferences(object):
+ def test_preferred_variants(self):
+ """Test preferred variants are applied correctly
+ """
+ update_packages('mpileaks', 'variants', '~debug~opt+shared+static')
+ assert_variant_values(
+ 'mpileaks', debug=False, opt=False, shared=True, static=True
+ )
+ update_packages(
+ 'mpileaks', 'variants', ['+debug', '+opt', '~shared', '-static']
+ )
+ assert_variant_values(
+ 'mpileaks', debug=True, opt=True, shared=False, static=False
+ )
+
+ def test_preferred_compilers(self, refresh_builtin_mock):
+ """Test preferred compilers are applied correctly
+ """
+ update_packages('mpileaks', 'compiler', ['clang@3.3'])
+ spec = concretize('mpileaks')
+ assert spec.compiler == spack.spec.CompilerSpec('clang@3.3')
+
+ update_packages('mpileaks', 'compiler', ['gcc@4.5.0'])
+ spec = concretize('mpileaks')
+ assert spec.compiler == spack.spec.CompilerSpec('gcc@4.5.0')
+
+ def test_preferred_versions(self):
+ """Test preferred package versions are applied correctly
+ """
+ update_packages('mpileaks', 'version', ['2.3'])
+ spec = concretize('mpileaks')
+ assert spec.version == spack.spec.Version('2.3')
+
+ update_packages('mpileaks', 'version', ['2.2'])
+ spec = concretize('mpileaks')
+ assert spec.version == spack.spec.Version('2.2')
+
+ def test_preferred_providers(self):
+ """Test preferred providers of virtual packages are
+ applied correctly
+ """
+ update_packages('all', 'providers', {'mpi': ['mpich']})
+ spec = concretize('mpileaks')
+ assert 'mpich' in spec
+
+ update_packages('all', 'providers', {'mpi': ['zmpi']})
+ spec = concretize('mpileaks')
+ assert 'zmpi' in spec
+
+ def test_develop(self):
+ """Test concretization with develop version"""
+ spec = Spec('builtin.mock.develop-test')
+ spec.concretize()
+ assert spec.version == spack.spec.Version('0.2.15')
+
+ def test_no_virtuals_in_packages_yaml(self):
+ """Verify that virtuals are not allowed in packages.yaml."""
+
+ # set up a packages.yaml file with a vdep as a key. We use
+ # syaml.load here to make sure source lines in the config are
+ # attached to parsed strings, as the error message uses them.
+ conf = syaml.load("""\
+mpi:
+ paths:
+ mpi-with-lapack@2.1: /path/to/lapack
+""")
+ spack.config.update_config('packages', conf, 'concretize')
+
+ # now when we get the packages.yaml config, there should be an error
+ with pytest.raises(spack.package_prefs.VirtualInPackagesYAMLError):
+ spack.package_prefs.get_packages_config()
+
+ def test_all_is_not_a_virtual(self):
+ """Verify that `all` is allowed in packages.yaml."""
+ conf = syaml.load("""\
+all:
+ variants: [+mpi]
+""")
+ spack.config.update_config('packages', conf, 'concretize')
+
+ # should be no error for 'all':
+ spack.package_prefs._pkgsort = PreferredPackages()
+ spack.package_prefs.get_packages_config()
+
+ def test_external_mpi(self):
+ # make sure this doesn't give us an external first.
+ spec = Spec('mpi')
+ spec.concretize()
+ assert not spec['mpi'].external
+
+ # load config
+ conf = syaml.load("""\
+all:
+ providers:
+ mpi: [mpich]
+mpich:
+ buildable: false
+ paths:
+ mpich@3.0.4: /dummy/path
+""")
+ spack.config.update_config('packages', conf, 'concretize')
+
+ # ensure that once config is in place, external is used
+ spec = Spec('mpi')
+ spec.concretize()
+ assert spec['mpich'].external == '/dummy/path'
diff --git a/lib/spack/spack/test/config.py b/lib/spack/spack/test/config.py
index eff482f4c6..ed8f78ceb4 100644
--- a/lib/spack/spack/test/config.py
+++ b/lib/spack/spack/test/config.py
@@ -22,118 +22,350 @@
# License along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
+import collections
+import getpass
import os
-import shutil
-from tempfile import mkdtemp
+import tempfile
+import ordereddict_backport
+import pytest
import spack
import spack.config
-from ordereddict_backport import OrderedDict
-from spack.test.mock_packages_test import *
+import yaml
+from spack.util.path import canonicalize_path
# Some sample compiler config data
-a_comps = {
- "x86_64_E5v2_IntelIB": {
- "gcc@4.7.3" : {
- "cc" : "/gcc473",
- "cxx": "/g++473",
- "f77": None,
- "fc" : None },
- "gcc@4.5.0" : {
- "cc" : "/gcc450",
- "cxx": "/g++450",
- "f77": "/gfortran",
- "fc" : "/gfortran" },
- "clang@3.3" : {
- "cc" : "<overwritten>",
- "cxx": "<overwritten>",
- "f77": "<overwritten>",
- "fc" : "<overwritten>" }
- }
+a_comps = {
+ 'compilers': [
+ {'compiler': {
+ 'paths': {
+ "cc": "/gcc473",
+ "cxx": "/g++473",
+ "f77": None,
+ "fc": None
+ },
+ 'modules': None,
+ 'spec': 'gcc@4.7.3',
+ 'operating_system': 'CNL10'
+ }},
+ {'compiler': {
+ 'paths': {
+ "cc": "/gcc450",
+ "cxx": "/g++450",
+ "f77": 'gfortran',
+ "fc": 'gfortran'
+ },
+ 'modules': None,
+ 'spec': 'gcc@4.5.0',
+ 'operating_system': 'CNL10'
+ }},
+ {'compiler': {
+ 'paths': {
+ "cc": "/gcc422",
+ "cxx": "/g++422",
+ "f77": 'gfortran',
+ "fc": 'gfortran'
+ },
+ 'flags': {
+ "cppflags": "-O0 -fpic",
+ "fflags": "-f77",
+ },
+ 'modules': None,
+ 'spec': 'gcc@4.2.2',
+ 'operating_system': 'CNL10'
+ }},
+ {'compiler': {
+ 'paths': {
+ "cc": "<overwritten>",
+ "cxx": "<overwritten>",
+ "f77": '<overwritten>',
+ "fc": '<overwritten>'},
+ 'modules': None,
+ 'spec': 'clang@3.3',
+ 'operating_system': 'CNL10'
+ }}
+ ]
}
b_comps = {
- "x86_64_E5v3": {
- "icc@10.0" : {
- "cc" : "/icc100",
- "cxx": "/icc100",
- "f77": None,
- "fc" : None },
- "icc@11.1" : {
- "cc" : "/icc111",
- "cxx": "/icp111",
- "f77": "/ifort",
- "fc" : "/ifort" },
- "clang@3.3" : {
- "cc" : "/clang",
- "cxx": "/clang++",
- "f77": None,
- "fc" : None}
- }
+ 'compilers': [
+ {'compiler': {
+ 'paths': {
+ "cc": "/icc100",
+ "cxx": "/icp100",
+ "f77": None,
+ "fc": None
+ },
+ 'modules': None,
+ 'spec': 'icc@10.0',
+ 'operating_system': 'CNL10'
+ }},
+ {'compiler': {
+ 'paths': {
+ "cc": "/icc111",
+ "cxx": "/icp111",
+ "f77": 'ifort',
+ "fc": 'ifort'
+ },
+ 'modules': None,
+ 'spec': 'icc@11.1',
+ 'operating_system': 'CNL10'
+ }},
+ {'compiler': {
+ 'paths': {
+ "cc": "/icc123",
+ "cxx": "/icp123",
+ "f77": 'ifort',
+ "fc": 'ifort'
+ },
+ 'flags': {
+ "cppflags": "-O3",
+ "fflags": "-f77rtl",
+ },
+ 'modules': None,
+ 'spec': 'icc@12.3',
+ 'operating_system': 'CNL10'
+ }},
+ {'compiler': {
+ 'paths': {
+ "cc": "<overwritten>",
+ "cxx": "<overwritten>",
+ "f77": '<overwritten>',
+ "fc": '<overwritten>'},
+ 'modules': None,
+ 'spec': 'clang@3.3',
+ 'operating_system': 'CNL10'
+ }}
+ ]
}
# Some Sample repo data
-repos_low = [ "/some/path" ]
-repos_high = [ "/some/other/path" ]
-
-class ConfigTest(MockPackagesTest):
-
- def setUp(self):
- super(ConfigTest, self).setUp()
- self.tmp_dir = mkdtemp('.tmp', 'spack-config-test-')
- spack.config.config_scopes = OrderedDict()
- spack.config.ConfigScope('test_low_priority', os.path.join(self.tmp_dir, 'low'))
- spack.config.ConfigScope('test_high_priority', os.path.join(self.tmp_dir, 'high'))
-
- def tearDown(self):
- super(ConfigTest, self).tearDown()
- shutil.rmtree(self.tmp_dir, True)
-
- def check_config(self, comps, arch, *compiler_names):
- """Check that named compilers in comps match Spack's config."""
- config = spack.config.get_config('compilers')
- compiler_list = ['cc', 'cxx', 'f77', 'fc']
- for key in compiler_names:
+repos_low = {'repos': ["/some/path"]}
+repos_high = {'repos': ["/some/other/path"]}
+
+
+# sample config data
+config_low = {
+ 'config': {
+ 'install_tree': 'install_tree_path',
+ 'build_stage': ['path1', 'path2', 'path3']}}
+
+config_override_all = {
+ 'config:': {
+ 'install_tree:': 'override_all'}}
+
+config_override_key = {
+ 'config': {
+ 'install_tree:': 'override_key'}}
+
+config_merge_list = {
+ 'config': {
+ 'build_stage': ['patha', 'pathb']}}
+
+config_override_list = {
+ 'config': {
+ 'build_stage:': ['patha', 'pathb']}}
+
+
+def check_compiler_config(comps, *compiler_names):
+ """Check that named compilers in comps match Spack's config."""
+ config = spack.config.get_config('compilers')
+ compiler_list = ['cc', 'cxx', 'f77', 'fc']
+ flag_list = ['cflags', 'cxxflags', 'fflags', 'cppflags',
+ 'ldflags', 'ldlibs']
+ param_list = ['modules', 'paths', 'spec', 'operating_system']
+ for compiler in config:
+ conf = compiler['compiler']
+ if conf['spec'] in compiler_names:
+ comp = next((c['compiler'] for c in comps if
+ c['compiler']['spec'] == conf['spec']), None)
+ if not comp:
+ raise ValueError('Bad config spec')
+ for p in param_list:
+ assert conf[p] == comp[p]
+ for f in flag_list:
+ expected = comp.get('flags', {}).get(f, None)
+ actual = conf.get('flags', {}).get(f, None)
+ assert expected == actual
for c in compiler_list:
- expected = comps[arch][key][c]
- actual = config[arch][key][c]
- self.assertEqual(expected, actual)
+ expected = comp['paths'][c]
+ actual = conf['paths'][c]
+ assert expected == actual
+
+
+@pytest.fixture()
+def config(tmpdir):
+ """Mocks the configuration scope."""
+ spack.config.clear_config_caches()
+ real_scope = spack.config.config_scopes
+ spack.config.config_scopes = ordereddict_backport.OrderedDict()
+ for priority in ['low', 'high']:
+ spack.config.ConfigScope(priority, str(tmpdir.join(priority)))
+ Config = collections.namedtuple('Config', ['real', 'mock'])
+ yield Config(real=real_scope, mock=spack.config.config_scopes)
+ spack.config.config_scopes = real_scope
+ spack.config.clear_config_caches()
+
+
+@pytest.fixture()
+def write_config_file(tmpdir):
+ """Returns a function that writes a config file."""
+ def _write(config, data, scope):
+ config_yaml = tmpdir.join(scope, config + '.yaml')
+ config_yaml.ensure()
+ with config_yaml.open('w') as f:
+ yaml.dump(data, f)
+ return _write
+
+
+@pytest.fixture()
+def compiler_specs():
+ """Returns a couple of compiler specs needed for the tests"""
+ a = [ac['compiler']['spec'] for ac in a_comps['compilers']]
+ b = [bc['compiler']['spec'] for bc in b_comps['compilers']]
+ CompilerSpecs = collections.namedtuple('CompilerSpecs', ['a', 'b'])
+ return CompilerSpecs(a=a, b=b)
+
+
+@pytest.mark.usefixtures('config')
+class TestConfig(object):
def test_write_list_in_memory(self):
- spack.config.update_config('repos', repos_low, 'test_low_priority')
- spack.config.update_config('repos', repos_high, 'test_high_priority')
+ spack.config.update_config('repos', repos_low['repos'], scope='low')
+ spack.config.update_config('repos', repos_high['repos'], scope='high')
+
config = spack.config.get_config('repos')
- self.assertEqual(config, repos_high+repos_low)
+ assert config == repos_high['repos'] + repos_low['repos']
- def test_write_key_in_memory(self):
+ def test_write_key_in_memory(self, compiler_specs):
# Write b_comps "on top of" a_comps.
- spack.config.update_config('compilers', a_comps, 'test_low_priority')
- spack.config.update_config('compilers', b_comps, 'test_high_priority')
-
+ spack.config.update_config(
+ 'compilers', a_comps['compilers'], scope='low'
+ )
+ spack.config.update_config(
+ 'compilers', b_comps['compilers'], scope='high'
+ )
# Make sure the config looks how we expect.
- self.check_config(a_comps, 'x86_64_E5v2_IntelIB', 'gcc@4.7.3', 'gcc@4.5.0')
- self.check_config(b_comps, 'x86_64_E5v3', 'icc@10.0', 'icc@11.1', 'clang@3.3')
+ check_compiler_config(a_comps['compilers'], *compiler_specs.a)
+ check_compiler_config(b_comps['compilers'], *compiler_specs.b)
- def test_write_key_to_disk(self):
+ def test_write_key_to_disk(self, compiler_specs):
# Write b_comps "on top of" a_comps.
- spack.config.update_config('compilers', a_comps, 'test_low_priority')
- spack.config.update_config('compilers', b_comps, 'test_high_priority')
-
+ spack.config.update_config(
+ 'compilers', a_comps['compilers'], scope='low'
+ )
+ spack.config.update_config(
+ 'compilers', b_comps['compilers'], scope='high'
+ )
# Clear caches so we're forced to read from disk.
spack.config.clear_config_caches()
-
# Same check again, to ensure consistency.
- self.check_config(a_comps, 'x86_64_E5v2_IntelIB', 'gcc@4.7.3', 'gcc@4.5.0')
- self.check_config(b_comps, 'x86_64_E5v3', 'icc@10.0', 'icc@11.1', 'clang@3.3')
+ check_compiler_config(a_comps['compilers'], *compiler_specs.a)
+ check_compiler_config(b_comps['compilers'], *compiler_specs.b)
- def test_write_to_same_priority_file(self):
+ def test_write_to_same_priority_file(self, compiler_specs):
# Write b_comps in the same file as a_comps.
- spack.config.update_config('compilers', a_comps, 'test_low_priority')
- spack.config.update_config('compilers', b_comps, 'test_low_priority')
-
+ spack.config.update_config(
+ 'compilers', a_comps['compilers'], scope='low'
+ )
+ spack.config.update_config(
+ 'compilers', b_comps['compilers'], scope='low'
+ )
# Clear caches so we're forced to read from disk.
spack.config.clear_config_caches()
-
# Same check again, to ensure consistency.
- self.check_config(a_comps, 'x86_64_E5v2_IntelIB', 'gcc@4.7.3', 'gcc@4.5.0')
- self.check_config(b_comps, 'x86_64_E5v3', 'icc@10.0', 'icc@11.1', 'clang@3.3')
+ check_compiler_config(a_comps['compilers'], *compiler_specs.a)
+ check_compiler_config(b_comps['compilers'], *compiler_specs.b)
+
+ def check_canonical(self, var, expected):
+ """Ensure that <expected> is substituted properly for <var> in strings
+ containing <var> in various positions."""
+ path = '/foo/bar/baz'
+
+ self.assertEqual(canonicalize_path(var + path),
+ expected + path)
+
+ self.assertEqual(canonicalize_path(path + var),
+ path + '/' + expected)
+
+ self.assertEqual(canonicalize_path(path + var + path),
+ expected + path)
+
+ def test_substitute_config_variables(self):
+ prefix = spack.prefix.lstrip('/')
+
+ assert os.path.join(
+ '/foo/bar/baz', prefix
+ ) == canonicalize_path('/foo/bar/baz/$spack')
+
+ assert os.path.join(
+ spack.prefix, 'foo/bar/baz'
+ ) == canonicalize_path('$spack/foo/bar/baz/')
+
+ assert os.path.join(
+ '/foo/bar/baz', prefix, 'foo/bar/baz'
+ ) == canonicalize_path('/foo/bar/baz/$spack/foo/bar/baz/')
+
+ assert os.path.join(
+ '/foo/bar/baz', prefix
+ ) == canonicalize_path('/foo/bar/baz/${spack}')
+
+ assert os.path.join(
+ spack.prefix, 'foo/bar/baz'
+ ) == canonicalize_path('${spack}/foo/bar/baz/')
+
+ assert os.path.join(
+ '/foo/bar/baz', prefix, 'foo/bar/baz'
+ ) == canonicalize_path('/foo/bar/baz/${spack}/foo/bar/baz/')
+
+ assert os.path.join(
+ '/foo/bar/baz', prefix, 'foo/bar/baz'
+ ) != canonicalize_path('/foo/bar/baz/${spack/foo/bar/baz/')
+
+ def test_substitute_user(self):
+ user = getpass.getuser()
+ assert '/foo/bar/' + user + '/baz' == canonicalize_path(
+ '/foo/bar/$user/baz'
+ )
+
+ def test_substitute_tempdir(self):
+ tempdir = tempfile.gettempdir()
+ assert tempdir == canonicalize_path('$tempdir')
+ assert tempdir + '/foo/bar/baz' == canonicalize_path(
+ '$tempdir/foo/bar/baz'
+ )
+
+ def test_read_config(self, write_config_file):
+ write_config_file('config', config_low, 'low')
+ assert spack.config.get_config('config') == config_low['config']
+
+ def test_read_config_override_all(self, write_config_file):
+ write_config_file('config', config_low, 'low')
+ write_config_file('config', config_override_all, 'high')
+ assert spack.config.get_config('config') == {
+ 'install_tree': 'override_all'
+ }
+
+ def test_read_config_override_key(self, write_config_file):
+ write_config_file('config', config_low, 'low')
+ write_config_file('config', config_override_key, 'high')
+ assert spack.config.get_config('config') == {
+ 'install_tree': 'override_key',
+ 'build_stage': ['path1', 'path2', 'path3']
+ }
+
+ def test_read_config_merge_list(self, write_config_file):
+ write_config_file('config', config_low, 'low')
+ write_config_file('config', config_merge_list, 'high')
+ assert spack.config.get_config('config') == {
+ 'install_tree': 'install_tree_path',
+ 'build_stage': ['patha', 'pathb', 'path1', 'path2', 'path3']
+ }
+
+ def test_read_config_override_list(self, write_config_file):
+ write_config_file('config', config_low, 'low')
+ write_config_file('config', config_override_list, 'high')
+ assert spack.config.get_config('config') == {
+ 'install_tree': 'install_tree_path',
+ 'build_stage': ['patha', 'pathb']
+ }
diff --git a/lib/spack/spack/test/configure_guess.py b/lib/spack/spack/test/configure_guess.py
deleted file mode 100644
index bad3673e7a..0000000000
--- a/lib/spack/spack/test/configure_guess.py
+++ /dev/null
@@ -1,83 +0,0 @@
-##############################################################################
-# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
-# Produced at the Lawrence Livermore National Laboratory.
-#
-# This file is part of Spack.
-# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
-# LLNL-CODE-647188
-#
-# For details, see https://github.com/llnl/spack
-# Please also see the LICENSE file for our notice and the LGPL.
-#
-# This program is free software; you can redistribute it and/or modify
-# it under the terms of the GNU Lesser General Public License (as
-# published by the Free Software Foundation) version 2.1, February 1999.
-#
-# This program is distributed in the hope that it will be useful, but
-# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
-# conditions of the GNU Lesser General Public License for more details.
-#
-# You should have received a copy of the GNU Lesser General Public
-# License along with this program; if not, write to the Free Software
-# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
-##############################################################################
-import os
-import shutil
-import tempfile
-import unittest
-
-from llnl.util.filesystem import *
-from spack.cmd.create import ConfigureGuesser
-from spack.stage import Stage
-from spack.test.mock_packages_test import *
-from spack.util.executable import which
-
-
-class InstallTest(unittest.TestCase):
- """Tests the configure guesser in spack create"""
-
- def setUp(self):
- self.tar = which('tar')
- self.tmpdir = tempfile.mkdtemp()
- self.orig_dir = os.getcwd()
- os.chdir(self.tmpdir)
- self.stage = None
-
-
- def tearDown(self):
- shutil.rmtree(self.tmpdir, ignore_errors=True)
- os.chdir(self.orig_dir)
-
-
- def check_archive(self, filename, system):
- mkdirp('archive')
- touch(join_path('archive', filename))
- self.tar('czf', 'archive.tar.gz', 'archive')
-
- url = 'file://' + join_path(os.getcwd(), 'archive.tar.gz')
- print url
- with Stage(url) as stage:
- stage.fetch()
-
- guesser = ConfigureGuesser()
- guesser(stage)
- self.assertEqual(system, guesser.build_system)
-
-
- def test_python(self):
- self.check_archive('setup.py', 'python')
-
-
- def test_autotools(self):
- self.check_archive('configure', 'autotools')
-
-
- def test_cmake(self):
- self.check_archive('CMakeLists.txt', 'cmake')
-
-
- def test_unknown(self):
- self.check_archive('foobar', 'unknown')
-
-
diff --git a/lib/spack/spack/test/conftest.py b/lib/spack/spack/test/conftest.py
new file mode 100644
index 0000000000..f344727674
--- /dev/null
+++ b/lib/spack/spack/test/conftest.py
@@ -0,0 +1,514 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+import collections
+import copy
+import os
+import re
+import shutil
+
+import cStringIO
+import llnl.util.filesystem
+import llnl.util.lang
+import ordereddict_backport
+import py
+import pytest
+import spack
+import spack.architecture
+import spack.database
+import spack.directory_layout
+import spack.fetch_strategy
+import spack.platforms.test
+import spack.repository
+import spack.stage
+import spack.util.executable
+import spack.util.pattern
+
+
+##########
+# Monkey-patching that is applied to all tests
+##########
+
+
+@pytest.fixture(autouse=True)
+def no_stdin_duplication(monkeypatch):
+ """Duplicating stdin (or any other stream) returns an empty
+ cStringIO object.
+ """
+ monkeypatch.setattr(
+ llnl.util.lang,
+ 'duplicate_stream',
+ lambda x: cStringIO.StringIO()
+ )
+
+
+@pytest.fixture(autouse=True)
+def mock_fetch_cache(monkeypatch):
+ """Substitutes spack.fetch_cache with a mock object that does nothing
+ and raises on fetch.
+ """
+ class MockCache(object):
+ def store(self, copyCmd, relativeDst):
+ pass
+
+ def fetcher(self, targetPath, digest, **kwargs):
+ return MockCacheFetcher()
+
+ class MockCacheFetcher(object):
+ def set_stage(self, stage):
+ pass
+
+ def fetch(self):
+ raise spack.fetch_strategy.FetchError(
+ 'Mock cache always fails for tests'
+ )
+
+ def __str__(self):
+ return "[mock fetcher]"
+
+ monkeypatch.setattr(spack, 'fetch_cache', MockCache())
+
+
+# FIXME: The lines below should better be added to a fixture with
+# FIXME: session-scope. Anyhow doing it is not easy, as it seems
+# FIXME: there's some weird interaction with compilers during concretization.
+spack.architecture.real_platform = spack.architecture.platform
+spack.architecture.platform = lambda: spack.platforms.test.Test()
+
+##########
+# Test-specific fixtures
+##########
+
+
+@pytest.fixture(scope='session')
+def repo_path():
+ """Session scoped RepoPath object pointing to the mock repository"""
+ return spack.repository.RepoPath(spack.mock_packages_path)
+
+
+@pytest.fixture(scope='module')
+def builtin_mock(repo_path):
+ """Uses the 'builtin.mock' repository instead of 'builtin'"""
+ mock_repo = copy.deepcopy(repo_path)
+ spack.repo.swap(mock_repo)
+ BuiltinMock = collections.namedtuple('BuiltinMock', ['real', 'mock'])
+ # Confusing, but we swapped above
+ yield BuiltinMock(real=mock_repo, mock=spack.repo)
+ spack.repo.swap(mock_repo)
+
+
+@pytest.fixture()
+def refresh_builtin_mock(builtin_mock, repo_path):
+ """Refreshes the state of spack.repo"""
+ # Get back the real repository
+ mock_repo = copy.deepcopy(repo_path)
+ spack.repo.swap(mock_repo)
+ return builtin_mock
+
+
+@pytest.fixture(scope='session')
+def linux_os():
+ """Returns a named tuple with attributes 'name' and 'version'
+ representing the OS.
+ """
+ platform = spack.architecture.platform()
+ name, version = 'debian', '6'
+ if platform.name == 'linux':
+ platform = spack.architecture.platform()
+ current_os = platform.operating_system('default_os')
+ name, version = current_os.name, current_os.version
+ LinuxOS = collections.namedtuple('LinuxOS', ['name', 'version'])
+ return LinuxOS(name=name, version=version)
+
+
+@pytest.fixture(scope='session')
+def configuration_dir(tmpdir_factory, linux_os):
+ """Copies mock configuration files in a temporary directory. Returns the
+ directory path.
+ """
+ tmpdir = tmpdir_factory.mktemp('configurations')
+ # Name of the yaml files in the test/data folder
+ test_path = py.path.local(spack.test_path)
+ compilers_yaml = test_path.join('data', 'compilers.yaml')
+ packages_yaml = test_path.join('data', 'packages.yaml')
+ config_yaml = test_path.join('data', 'config.yaml')
+ # Create temporary 'site' and 'user' folders
+ tmpdir.ensure('site', dir=True)
+ tmpdir.ensure('user', dir=True)
+ # Copy the configurations that don't need further work
+ packages_yaml.copy(tmpdir.join('site', 'packages.yaml'))
+ config_yaml.copy(tmpdir.join('site', 'config.yaml'))
+ # Write the one that needs modifications
+ content = ''.join(compilers_yaml.read()).format(linux_os)
+ t = tmpdir.join('site', 'compilers.yaml')
+ t.write(content)
+ return tmpdir
+
+
+@pytest.fixture(scope='module')
+def config(configuration_dir):
+ """Hooks the mock configuration files into spack.config"""
+ # Set up a mock config scope
+ spack.config.clear_config_caches()
+ real_scope = spack.config.config_scopes
+ spack.config.config_scopes = ordereddict_backport.OrderedDict()
+ spack.config.ConfigScope('site', str(configuration_dir.join('site')))
+ spack.config.ConfigScope('user', str(configuration_dir.join('user')))
+ Config = collections.namedtuple('Config', ['real', 'mock'])
+ yield Config(real=real_scope, mock=spack.config.config_scopes)
+ spack.config.config_scopes = real_scope
+ spack.config.clear_config_caches()
+
+
+@pytest.fixture(scope='module')
+def database(tmpdir_factory, builtin_mock, config):
+ """Creates a mock database with some packages installed note that
+ the ref count for dyninst here will be 3, as it's recycled
+ across each install.
+ """
+
+ # Here is what the mock DB looks like:
+ #
+ # o mpileaks o mpileaks' o mpileaks''
+ # |\ |\ |\
+ # | o callpath | o callpath' | o callpath''
+ # |/| |/| |/|
+ # o | mpich o | mpich2 o | zmpi
+ # | | o | fake
+ # | | |
+ # | |______________/
+ # | .____________/
+ # |/
+ # o dyninst
+ # |\
+ # | o libdwarf
+ # |/
+ # o libelf
+
+ # Make a fake install directory
+ install_path = tmpdir_factory.mktemp('install_for_database')
+ spack_install_path = py.path.local(spack.store.root)
+ spack.store.root = str(install_path)
+
+ install_layout = spack.directory_layout.YamlDirectoryLayout(
+ str(install_path)
+ )
+ spack_install_layout = spack.store.layout
+ spack.store.layout = install_layout
+
+ # Make fake database and fake install directory.
+ install_db = spack.database.Database(str(install_path))
+ spack_install_db = spack.store.db
+ spack.store.db = install_db
+
+ Entry = collections.namedtuple('Entry', ['path', 'layout', 'db'])
+ Database = collections.namedtuple(
+ 'Database', ['real', 'mock', 'install', 'uninstall', 'refresh']
+ )
+
+ real = Entry(
+ path=spack_install_path,
+ layout=spack_install_layout,
+ db=spack_install_db
+ )
+ mock = Entry(path=install_path, layout=install_layout, db=install_db)
+
+ def _install(spec):
+ s = spack.spec.Spec(spec)
+ s.concretize()
+ pkg = spack.repo.get(s)
+ pkg.do_install(fake=True)
+
+ def _uninstall(spec):
+ spec.package.do_uninstall(spec)
+
+ def _refresh():
+ with spack.store.db.write_transaction():
+ for spec in spack.store.db.query():
+ _uninstall(spec)
+ _install('mpileaks ^mpich')
+ _install('mpileaks ^mpich2')
+ _install('mpileaks ^zmpi')
+
+ t = Database(
+ real=real,
+ mock=mock,
+ install=_install,
+ uninstall=_uninstall,
+ refresh=_refresh
+ )
+ # Transaction used to avoid repeated writes.
+ with spack.store.db.write_transaction():
+ t.install('mpileaks ^mpich')
+ t.install('mpileaks ^mpich2')
+ t.install('mpileaks ^zmpi')
+
+ yield t
+
+ with spack.store.db.write_transaction():
+ for spec in spack.store.db.query():
+ t.uninstall(spec)
+
+ install_path.remove(rec=1)
+ spack.store.root = str(spack_install_path)
+ spack.store.layout = spack_install_layout
+ spack.store.db = spack_install_db
+
+
+@pytest.fixture()
+def refresh_db_on_exit(database):
+ """"Restores the state of the database after a test."""
+ yield
+ database.refresh()
+
+##########
+# Fake archives and repositories
+##########
+
+
+@pytest.fixture(scope='session')
+def mock_archive():
+ """Creates a very simple archive directory with a configure script and a
+ makefile that installs to a prefix. Tars it up into an archive.
+ """
+ tar = spack.util.executable.which('tar', required=True)
+ stage = spack.stage.Stage('mock-archive-stage')
+ tmpdir = py.path.local(stage.path)
+ repo_name = 'mock-archive-repo'
+ tmpdir.ensure(repo_name, dir=True)
+ repodir = tmpdir.join(repo_name)
+ # Create the configure script
+ configure_path = str(tmpdir.join(repo_name, 'configure'))
+ with open(configure_path, 'w') as f:
+ f.write(
+ "#!/bin/sh\n"
+ "prefix=$(echo $1 | sed 's/--prefix=//')\n"
+ "cat > Makefile <<EOF\n"
+ "all:\n"
+ "\techo Building...\n\n"
+ "install:\n"
+ "\tmkdir -p $prefix\n"
+ "\ttouch $prefix/dummy_file\n"
+ "EOF\n"
+ )
+ os.chmod(configure_path, 0755)
+ # Archive it
+ current = tmpdir.chdir()
+ archive_name = '{0}.tar.gz'.format(repo_name)
+ tar('-czf', archive_name, repo_name)
+ current.chdir()
+ Archive = collections.namedtuple('Archive', ['url', 'path'])
+ url = 'file://' + str(tmpdir.join(archive_name))
+ # Return the url
+ yield Archive(url=url, path=str(repodir))
+ stage.destroy()
+
+
+@pytest.fixture(scope='session')
+def mock_git_repository():
+ """Creates a very simple git repository with two branches and
+ two commits.
+ """
+ git = spack.util.executable.which('git', required=True)
+ stage = spack.stage.Stage('mock-git-stage')
+ tmpdir = py.path.local(stage.path)
+ repo_name = 'mock-git-repo'
+ tmpdir.ensure(repo_name, dir=True)
+ repodir = tmpdir.join(repo_name)
+
+ # Initialize the repository
+ current = repodir.chdir()
+ git('init')
+ url = 'file://' + str(repodir)
+
+ # r0 is just the first commit
+ r0_file = 'r0_file'
+ repodir.ensure(r0_file)
+ git('add', r0_file)
+ git('commit', '-m', 'mock-git-repo r0')
+
+ branch = 'test-branch'
+ branch_file = 'branch_file'
+ git('branch', branch)
+
+ tag_branch = 'tag-branch'
+ tag_file = 'tag_file'
+ git('branch', tag_branch)
+
+ # Check out first branch
+ git('checkout', branch)
+ repodir.ensure(branch_file)
+ git('add', branch_file)
+ git('commit', '-m' 'r1 test branch')
+
+ # Check out a second branch and tag it
+ git('checkout', tag_branch)
+ repodir.ensure(tag_file)
+ git('add', tag_file)
+ git('commit', '-m' 'tag test branch')
+
+ tag = 'test-tag'
+ git('tag', tag)
+
+ git('checkout', 'master')
+
+ # R1 test is the same as test for branch
+ rev_hash = lambda x: git('rev-parse', x, output=str).strip()
+ r1 = rev_hash(branch)
+ r1_file = branch_file
+ current.chdir()
+
+ Bunch = spack.util.pattern.Bunch
+
+ checks = {
+ 'master': Bunch(
+ revision='master', file=r0_file, args={'git': str(repodir)}
+ ),
+ 'branch': Bunch(
+ revision=branch, file=branch_file, args={
+ 'git': str(repodir), 'branch': branch
+ }
+ ),
+ 'tag': Bunch(
+ revision=tag, file=tag_file, args={'git': str(repodir), 'tag': tag}
+ ),
+ 'commit': Bunch(
+ revision=r1, file=r1_file, args={'git': str(repodir), 'commit': r1}
+ )
+ }
+
+ t = Bunch(checks=checks, url=url, hash=rev_hash, path=str(repodir))
+ yield t
+ stage.destroy()
+
+
+@pytest.fixture(scope='session')
+def mock_hg_repository():
+ """Creates a very simple hg repository with two commits."""
+ hg = spack.util.executable.which('hg', required=True)
+ stage = spack.stage.Stage('mock-hg-stage')
+ tmpdir = py.path.local(stage.path)
+ repo_name = 'mock-hg-repo'
+ tmpdir.ensure(repo_name, dir=True)
+ repodir = tmpdir.join(repo_name)
+
+ get_rev = lambda: hg('id', '-i', output=str).strip()
+
+ # Initialize the repository
+ current = repodir.chdir()
+ url = 'file://' + str(repodir)
+ hg('init')
+ # Commit file r0
+ r0_file = 'r0_file'
+ repodir.ensure(r0_file)
+ hg('add', r0_file)
+ hg('commit', '-m', 'revision 0', '-u', 'test')
+ r0 = get_rev()
+ # Commit file r1
+ r1_file = 'r1_file'
+ repodir.ensure(r1_file)
+ hg('add', r1_file)
+ hg('commit', '-m' 'revision 1', '-u', 'test')
+ r1 = get_rev()
+ current.chdir()
+
+ Bunch = spack.util.pattern.Bunch
+
+ checks = {
+ 'default': Bunch(
+ revision=r1, file=r1_file, args={'hg': str(repodir)}
+ ),
+ 'rev0': Bunch(
+ revision=r0, file=r0_file, args={
+ 'hg': str(repodir), 'revision': r0
+ }
+ )
+ }
+ t = Bunch(checks=checks, url=url, hash=get_rev, path=str(repodir))
+ yield t
+ stage.destroy()
+
+
+@pytest.fixture(scope='session')
+def mock_svn_repository():
+ """Creates a very simple svn repository with two commits."""
+ svn = spack.util.executable.which('svn', required=True)
+ svnadmin = spack.util.executable.which('svnadmin', required=True)
+ stage = spack.stage.Stage('mock-svn-stage')
+ tmpdir = py.path.local(stage.path)
+ repo_name = 'mock-svn-repo'
+ tmpdir.ensure(repo_name, dir=True)
+ repodir = tmpdir.join(repo_name)
+ url = 'file://' + str(repodir)
+ # Initialize the repository
+ current = repodir.chdir()
+ svnadmin('create', str(repodir))
+
+ # Import a structure (first commit)
+ r0_file = 'r0_file'
+ tmpdir.ensure('tmp-path', r0_file)
+ svn(
+ 'import',
+ str(tmpdir.join('tmp-path')),
+ url,
+ '-m',
+ 'Initial import r0'
+ )
+ shutil.rmtree(str(tmpdir.join('tmp-path')))
+ # Second commit
+ r1_file = 'r1_file'
+ svn('checkout', url, str(tmpdir.join('tmp-path')))
+ tmpdir.ensure('tmp-path', r1_file)
+ tmpdir.join('tmp-path').chdir()
+ svn('add', str(tmpdir.ensure('tmp-path', r1_file)))
+ svn('ci', '-m', 'second revision r1')
+ repodir.chdir()
+ shutil.rmtree(str(tmpdir.join('tmp-path')))
+ r0 = '1'
+ r1 = '2'
+
+ Bunch = spack.util.pattern.Bunch
+
+ checks = {
+ 'default': Bunch(
+ revision=r1, file=r1_file, args={'svn': url}
+ ),
+ 'rev0': Bunch(
+ revision=r0, file=r0_file, args={
+ 'svn': url, 'revision': r0
+ }
+ )
+ }
+
+ def get_rev():
+ output = svn('info', output=str)
+ assert "Revision" in output
+ for line in output.split('\n'):
+ match = re.match(r'Revision: (\d+)', line)
+ if match:
+ return match.group(1)
+
+ t = Bunch(checks=checks, url=url, hash=get_rev, path=str(repodir))
+ yield t
+ current.chdir()
diff --git a/lib/spack/spack/test/data/compilers.yaml b/lib/spack/spack/test/data/compilers.yaml
new file mode 100644
index 0000000000..ebba6a601d
--- /dev/null
+++ b/lib/spack/spack/test/data/compilers.yaml
@@ -0,0 +1,116 @@
+compilers:
+- compiler:
+ spec: clang@3.3
+ operating_system: {0.name}{0.version}
+ paths:
+ cc: /path/to/clang
+ cxx: /path/to/clang++
+ f77: None
+ fc: None
+ modules: 'None'
+- compiler:
+ spec: gcc@4.5.0
+ operating_system: {0.name}{0.version}
+ paths:
+ cc: /path/to/gcc
+ cxx: /path/to/g++
+ f77: None
+ fc: None
+ modules: 'None'
+- compiler:
+ spec: clang@3.3
+ operating_system: CNL
+ paths:
+ cc: /path/to/clang
+ cxx: /path/to/clang++
+ f77: None
+ fc: None
+ modules: 'None'
+- compiler:
+ spec: clang@3.3
+ operating_system: SuSE11
+ paths:
+ cc: /path/to/clang
+ cxx: /path/to/clang++
+ f77: None
+ fc: None
+ modules: 'None'
+- compiler:
+ spec: clang@3.3
+ operating_system: yosemite
+ paths:
+ cc: /path/to/clang
+ cxx: /path/to/clang++
+ f77: None
+ fc: None
+ modules: 'None'
+- compiler:
+ paths:
+ cc: /path/to/gcc
+ cxx: /path/to/g++
+ f77: /path/to/gfortran
+ fc: /path/to/gfortran
+ operating_system: CNL
+ spec: gcc@4.5.0
+ modules: 'None'
+- compiler:
+ paths:
+ cc: /path/to/gcc
+ cxx: /path/to/g++
+ f77: /path/to/gfortran
+ fc: /path/to/gfortran
+ operating_system: SuSE11
+ spec: gcc@4.5.0
+ modules: 'None'
+- compiler:
+ paths:
+ cc: /path/to/gcc
+ cxx: /path/to/g++
+ f77: /path/to/gfortran
+ fc: /path/to/gfortran
+ operating_system: yosemite
+ spec: gcc@4.5.0
+ modules: 'None'
+- compiler:
+ paths:
+ cc: /path/to/gcc
+ cxx: /path/to/g++
+ f77: /path/to/gfortran
+ fc: /path/to/gfortran
+ operating_system: elcapitan
+ spec: gcc@4.5.0
+ modules: 'None'
+- compiler:
+ spec: clang@3.3
+ operating_system: elcapitan
+ paths:
+ cc: /path/to/clang
+ cxx: /path/to/clang++
+ f77: None
+ fc: None
+ modules: 'None'
+- compiler:
+ spec: gcc@4.7.2
+ operating_system: redhat6
+ paths:
+ cc: /path/to/gcc472
+ cxx: /path/to/g++472
+ f77: /path/to/gfortran472
+ fc: /path/to/gfortran472
+ flags:
+ cflags: -O0
+ cxxflags: -O0
+ fflags: -O0
+ modules: 'None'
+- compiler:
+ spec: clang@3.5
+ operating_system: redhat6
+ paths:
+ cc: /path/to/clang35
+ cxx: /path/to/clang++35
+ f77: None
+ fc: None
+ flags:
+ cflags: -O3
+ cxxflags: -O3
+ modules: 'None'
diff --git a/lib/spack/spack/test/data/config.yaml b/lib/spack/spack/test/data/config.yaml
new file mode 100644
index 0000000000..d1758e9c16
--- /dev/null
+++ b/lib/spack/spack/test/data/config.yaml
@@ -0,0 +1,11 @@
+config:
+ install_tree: $spack/opt/spack
+ build_stage:
+ - $tempdir
+ - /nfs/tmp2/$user
+ - $spack/var/spack/stage
+ source_cache: $spack/var/spack/cache
+ misc_cache: ~/.spack/cache
+ verify_ssl: true
+ checksum: true
+ dirty: True
diff --git a/lib/spack/spack/test/data/packages.yaml b/lib/spack/spack/test/data/packages.yaml
new file mode 100644
index 0000000000..923d63173a
--- /dev/null
+++ b/lib/spack/spack/test/data/packages.yaml
@@ -0,0 +1,14 @@
+packages:
+ externaltool:
+ buildable: False
+ paths:
+ externaltool@1.0%gcc@4.5.0: /path/to/external_tool
+ externalvirtual:
+ buildable: False
+ paths:
+ externalvirtual@2.0%clang@3.3: /path/to/external_virtual_clang
+ externalvirtual@1.0%gcc@4.5.0: /path/to/external_virtual_gcc
+ externalmodule:
+ buildable: False
+ modules:
+ externalmodule@1.0%gcc@4.5.0: external-module
diff --git a/lib/spack/spack/hooks/dotkit.py b/lib/spack/spack/test/data/sourceme_first.sh
index a140646e04..ee21fabbd5 100644
--- a/lib/spack/spack/hooks/dotkit.py
+++ b/lib/spack/spack/test/data/sourceme_first.sh
@@ -1,3 +1,5 @@
+#!/usr/bin/env bash
+
##############################################################################
# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
# Produced at the Lawrence Livermore National Laboratory.
@@ -22,14 +24,6 @@
# License along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
-import spack.modules
-
-
-def post_install(pkg):
- dk = spack.modules.Dotkit(pkg.spec)
- dk.write()
-
-def post_uninstall(pkg):
- dk = spack.modules.Dotkit(pkg.spec)
- dk.remove()
+export NEW_VAR='new'
+export UNSET_ME='overridden'
diff --git a/lib/spack/spack/hooks/tclmodule.py b/lib/spack/spack/test/data/sourceme_parameters.sh
index 2c88810c97..2ee0cc87bd 100644
--- a/lib/spack/spack/hooks/tclmodule.py
+++ b/lib/spack/spack/test/data/sourceme_parameters.sh
@@ -1,3 +1,5 @@
+#!/usr/bin/env bash
+
##############################################################################
# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
# Produced at the Lawrence Livermore National Laboratory.
@@ -22,14 +24,9 @@
# License along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
-import spack.modules
-
-
-def post_install(pkg):
- dk = spack.modules.TclModule(pkg.spec)
- dk.write()
-
-def post_uninstall(pkg):
- dk = spack.modules.TclModule(pkg.spec)
- dk.remove()
+if [[ "$1" == "intel64" ]] ; then
+ export FOO='intel64'
+else
+ export FOO='default'
+fi
diff --git a/lib/spack/spack/test/data/sourceme_second.sh b/lib/spack/spack/test/data/sourceme_second.sh
new file mode 100644
index 0000000000..2269225e45
--- /dev/null
+++ b/lib/spack/spack/test/data/sourceme_second.sh
@@ -0,0 +1,29 @@
+#!/usr/bin/env bash
+
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+
+export PATH_LIST='/path/first:/path/second:/path/fourth'
+unset EMPTY_PATH_LIST
diff --git a/lib/spack/spack/test/database.py b/lib/spack/spack/test/database.py
index e1322f2081..bbaa88b91d 100644
--- a/lib/spack/spack/test/database.py
+++ b/lib/spack/spack/test/database.py
@@ -26,14 +26,13 @@
These tests check the database is functioning properly,
both in memory and in its file
"""
-import os.path
import multiprocessing
+import os.path
+import pytest
import spack
-from llnl.util.filesystem import join_path
-from llnl.util.lock import *
+import spack.store
from llnl.util.tty.colify import colify
-from spack.test.mock_database import MockDatabase
def _print_ref_counts():
@@ -41,16 +40,16 @@ def _print_ref_counts():
recs = []
def add_rec(spec):
- cspecs = spack.installed_db.query(spec, installed=any)
+ cspecs = spack.store.db.query(spec, installed=any)
if not cspecs:
recs.append("[ %-7s ] %-20s-" % ('', spec))
else:
key = cspecs[0].dag_hash()
- rec = spack.installed_db.get_record(cspecs[0])
+ rec = spack.store.db.get_record(cspecs[0])
recs.append("[ %-7s ] %-20s%d" % (key[:7], spec, rec.ref_count))
- with spack.installed_db.read_transaction():
+ with spack.store.db.read_transaction():
add_rec('mpileaks ^mpich')
add_rec('callpath ^mpich')
add_rec('mpich')
@@ -71,205 +70,286 @@ def _print_ref_counts():
colify(recs, cols=3)
-class DatabaseTest(MockDatabase):
- def test_005_db_exists(self):
- """Make sure db cache file exists after creating."""
- index_file = join_path(self.install_path, '.spack-db', 'index.yaml')
- lock_file = join_path(self.install_path, '.spack-db', 'lock')
+def _check_merkleiness():
+ """Ensure the spack database is a valid merkle graph."""
+ all_specs = spack.store.db.query(installed=any)
+
+ seen = {}
+ for spec in all_specs:
+ for dep in spec.dependencies():
+ hash_key = dep.dag_hash()
+ if hash_key not in seen:
+ seen[hash_key] = id(dep)
+ else:
+ assert seen[hash_key] == id(dep)
+
+
+def _check_db_sanity(install_db):
+ """Utiilty function to check db against install layout."""
+ expected = sorted(spack.store.layout.all_specs())
+ actual = sorted(install_db.query())
+
+ assert len(expected) == len(actual)
+ for e, a in zip(expected, actual):
+ assert e == a
+
+ _check_merkleiness()
+
+
+def _mock_remove(spec):
+ specs = spack.store.db.query(spec)
+ assert len(specs) == 1
+ spec = specs[0]
+ spec.package.do_uninstall(spec)
+
+
+def test_005_db_exists(database):
+ """Make sure db cache file exists after creating."""
+ install_path = database.mock.path
+ index_file = install_path.join('.spack-db', 'index.json')
+ lock_file = install_path.join('.spack-db', 'lock')
+ assert os.path.exists(str(index_file))
+ assert os.path.exists(str(lock_file))
+
+
+def test_010_all_install_sanity(database):
+ """Ensure that the install layout reflects what we think it does."""
+ all_specs = spack.store.layout.all_specs()
+ assert len(all_specs) == 13
+
+ # Query specs with multiple configurations
+ mpileaks_specs = [s for s in all_specs if s.satisfies('mpileaks')]
+ callpath_specs = [s for s in all_specs if s.satisfies('callpath')]
+ mpi_specs = [s for s in all_specs if s.satisfies('mpi')]
+
+ assert len(mpileaks_specs) == 3
+ assert len(callpath_specs) == 3
+ assert len(mpi_specs) == 3
+
+ # Query specs with single configurations
+ dyninst_specs = [s for s in all_specs if s.satisfies('dyninst')]
+ libdwarf_specs = [s for s in all_specs if s.satisfies('libdwarf')]
+ libelf_specs = [s for s in all_specs if s.satisfies('libelf')]
+
+ assert len(dyninst_specs) == 1
+ assert len(libdwarf_specs) == 1
+ assert len(libelf_specs) == 1
+
+ # Query by dependency
+ assert len([s for s in all_specs if s.satisfies('mpileaks ^mpich')]) == 1
+ assert len([s for s in all_specs if s.satisfies('mpileaks ^mpich2')]) == 1
+ assert len([s for s in all_specs if s.satisfies('mpileaks ^zmpi')]) == 1
+
+
+def test_015_write_and_read(database):
+ # write and read DB
+ with spack.store.db.write_transaction():
+ specs = spack.store.db.query()
+ recs = [spack.store.db.get_record(s) for s in specs]
+
+ for spec, rec in zip(specs, recs):
+ new_rec = spack.store.db.get_record(spec)
+ assert new_rec.ref_count == rec.ref_count
+ assert new_rec.spec == rec.spec
+ assert new_rec.path == rec.path
+ assert new_rec.installed == rec.installed
+
+
+def test_020_db_sanity(database):
+ """Make sure query() returns what's actually in the db."""
+ install_db = database.mock.db
+ _check_db_sanity(install_db)
+
- self.assertTrue(os.path.exists(index_file))
- self.assertTrue(os.path.exists(lock_file))
+def test_025_reindex(database):
+ """Make sure reindex works and ref counts are valid."""
+ install_db = database.mock.db
+ spack.store.db.reindex(spack.store.layout)
+ _check_db_sanity(install_db)
- def test_010_all_install_sanity(self):
- """Ensure that the install layout reflects what we think it does."""
- all_specs = spack.install_layout.all_specs()
- self.assertEqual(len(all_specs), 13)
- # query specs with multiple configurations
- mpileaks_specs = [s for s in all_specs if s.satisfies('mpileaks')]
- callpath_specs = [s for s in all_specs if s.satisfies('callpath')]
- mpi_specs = [s for s in all_specs if s.satisfies('mpi')]
+def test_030_db_sanity_from_another_process(database, refresh_db_on_exit):
+ install_db = database.mock.db
- self.assertEqual(len(mpileaks_specs), 3)
- self.assertEqual(len(callpath_specs), 3)
- self.assertEqual(len(mpi_specs), 3)
+ def read_and_modify():
+ _check_db_sanity(install_db) # check that other process can read DB
+ with install_db.write_transaction():
+ _mock_remove('mpileaks ^zmpi')
- # query specs with single configurations
- dyninst_specs = [s for s in all_specs if s.satisfies('dyninst')]
- libdwarf_specs = [s for s in all_specs if s.satisfies('libdwarf')]
- libelf_specs = [s for s in all_specs if s.satisfies('libelf')]
+ p = multiprocessing.Process(target=read_and_modify, args=())
+ p.start()
+ p.join()
- self.assertEqual(len(dyninst_specs), 1)
- self.assertEqual(len(libdwarf_specs), 1)
- self.assertEqual(len(libelf_specs), 1)
+ # ensure child process change is visible in parent process
+ with install_db.read_transaction():
+ assert len(install_db.query('mpileaks ^zmpi')) == 0
- # Query by dependency
- self.assertEqual(len([s for s in all_specs if s.satisfies('mpileaks ^mpich')]), 1)
- self.assertEqual(len([s for s in all_specs if s.satisfies('mpileaks ^mpich2')]), 1)
- self.assertEqual(len([s for s in all_specs if s.satisfies('mpileaks ^zmpi')]), 1)
+def test_040_ref_counts(database):
+ """Ensure that we got ref counts right when we read the DB."""
+ install_db = database.mock.db
+ install_db._check_ref_counts()
- def test_015_write_and_read(self):
- # write and read DB
- with spack.installed_db.write_transaction():
- specs = spack.installed_db.query()
- recs = [spack.installed_db.get_record(s) for s in specs]
- for spec, rec in zip(specs, recs):
- new_rec = spack.installed_db.get_record(spec)
- self.assertEqual(new_rec.ref_count, rec.ref_count)
- self.assertEqual(new_rec.spec, rec.spec)
- self.assertEqual(new_rec.path, rec.path)
- self.assertEqual(new_rec.installed, rec.installed)
+def test_050_basic_query(database):
+ """Ensure querying database is consistent with what is installed."""
+ install_db = database.mock.db
+ # query everything
+ assert len(spack.store.db.query()) == 13
+ # query specs with multiple configurations
+ mpileaks_specs = install_db.query('mpileaks')
+ callpath_specs = install_db.query('callpath')
+ mpi_specs = install_db.query('mpi')
- def _check_db_sanity(self):
- """Utiilty function to check db against install layout."""
- expected = sorted(spack.install_layout.all_specs())
- actual = sorted(self.installed_db.query())
+ assert len(mpileaks_specs) == 3
+ assert len(callpath_specs) == 3
+ assert len(mpi_specs) == 3
- self.assertEqual(len(expected), len(actual))
- for e, a in zip(expected, actual):
- self.assertEqual(e, a)
+ # query specs with single configurations
+ dyninst_specs = install_db.query('dyninst')
+ libdwarf_specs = install_db.query('libdwarf')
+ libelf_specs = install_db.query('libelf')
+ assert len(dyninst_specs) == 1
+ assert len(libdwarf_specs) == 1
+ assert len(libelf_specs) == 1
- def test_020_db_sanity(self):
- """Make sure query() returns what's actually in the db."""
- self._check_db_sanity()
+ # Query by dependency
+ assert len(install_db.query('mpileaks ^mpich')) == 1
+ assert len(install_db.query('mpileaks ^mpich2')) == 1
+ assert len(install_db.query('mpileaks ^zmpi')) == 1
- def test_030_db_sanity_from_another_process(self):
- def read_and_modify():
- self._check_db_sanity() # check that other process can read DB
- with self.installed_db.write_transaction():
- self._mock_remove('mpileaks ^zmpi')
+def _check_remove_and_add_package(install_db, spec):
+ """Remove a spec from the DB, then add it and make sure everything's
+ still ok once it is added. This checks that it was
+ removed, that it's back when added again, and that ref
+ counts are consistent.
+ """
+ original = install_db.query()
+ install_db._check_ref_counts()
- p = multiprocessing.Process(target=read_and_modify, args=())
- p.start()
- p.join()
+ # Remove spec
+ concrete_spec = install_db.remove(spec)
+ install_db._check_ref_counts()
+ remaining = install_db.query()
- # ensure child process change is visible in parent process
- with self.installed_db.read_transaction():
- self.assertEqual(len(self.installed_db.query('mpileaks ^zmpi')), 0)
+ # ensure spec we removed is gone
+ assert len(original) - 1 == len(remaining)
+ assert all(s in original for s in remaining)
+ assert concrete_spec not in remaining
+ # add it back and make sure everything is ok.
+ install_db.add(concrete_spec, spack.store.layout)
+ installed = install_db.query()
+ assert concrete_spec in installed
+ assert installed == original
- def test_040_ref_counts(self):
- """Ensure that we got ref counts right when we read the DB."""
- self.installed_db._check_ref_counts()
+ # sanity check against direcory layout and check ref counts.
+ _check_db_sanity(install_db)
+ install_db._check_ref_counts()
- def test_050_basic_query(self):
- """Ensure that querying the database is consistent with what is installed."""
- # query everything
- self.assertEqual(len(spack.installed_db.query()), 13)
+def test_060_remove_and_add_root_package(database):
+ install_db = database.mock.db
+ _check_remove_and_add_package(install_db, 'mpileaks ^mpich')
- # query specs with multiple configurations
- mpileaks_specs = self.installed_db.query('mpileaks')
- callpath_specs = self.installed_db.query('callpath')
- mpi_specs = self.installed_db.query('mpi')
- self.assertEqual(len(mpileaks_specs), 3)
- self.assertEqual(len(callpath_specs), 3)
- self.assertEqual(len(mpi_specs), 3)
+def test_070_remove_and_add_dependency_package(database):
+ install_db = database.mock.db
+ _check_remove_and_add_package(install_db, 'dyninst')
- # query specs with single configurations
- dyninst_specs = self.installed_db.query('dyninst')
- libdwarf_specs = self.installed_db.query('libdwarf')
- libelf_specs = self.installed_db.query('libelf')
- self.assertEqual(len(dyninst_specs), 1)
- self.assertEqual(len(libdwarf_specs), 1)
- self.assertEqual(len(libelf_specs), 1)
+def test_080_root_ref_counts(database):
+ install_db = database.mock.db
+ rec = install_db.get_record('mpileaks ^mpich')
- # Query by dependency
- self.assertEqual(len(self.installed_db.query('mpileaks ^mpich')), 1)
- self.assertEqual(len(self.installed_db.query('mpileaks ^mpich2')), 1)
- self.assertEqual(len(self.installed_db.query('mpileaks ^zmpi')), 1)
+ # Remove a top-level spec from the DB
+ install_db.remove('mpileaks ^mpich')
+ # record no longer in DB
+ assert install_db.query('mpileaks ^mpich', installed=any) == []
- def _check_remove_and_add_package(self, spec):
- """Remove a spec from the DB, then add it and make sure everything's
- still ok once it is added. This checks that it was
- removed, that it's back when added again, and that ref
- counts are consistent.
- """
- original = self.installed_db.query()
- self.installed_db._check_ref_counts()
+ # record's deps have updated ref_counts
+ assert install_db.get_record('callpath ^mpich').ref_count == 0
+ assert install_db.get_record('mpich').ref_count == 1
- # Remove spec
- concrete_spec = self.installed_db.remove(spec)
- self.installed_db._check_ref_counts()
- remaining = self.installed_db.query()
+ # Put the spec back
+ install_db.add(rec.spec, spack.store.layout)
- # ensure spec we removed is gone
- self.assertEqual(len(original) - 1, len(remaining))
- self.assertTrue(all(s in original for s in remaining))
- self.assertTrue(concrete_spec not in remaining)
+ # record is present again
+ assert len(install_db.query('mpileaks ^mpich', installed=any)) == 1
- # add it back and make sure everything is ok.
- self.installed_db.add(concrete_spec, "")
- installed = self.installed_db.query()
- self.assertEqual(len(installed), len(original))
+ # dependencies have ref counts updated
+ assert install_db.get_record('callpath ^mpich').ref_count == 1
+ assert install_db.get_record('mpich').ref_count == 2
- # sanity check against direcory layout and check ref counts.
- self._check_db_sanity()
- self.installed_db._check_ref_counts()
+def test_090_non_root_ref_counts(database):
+ install_db = database.mock.db
- def test_060_remove_and_add_root_package(self):
- self._check_remove_and_add_package('mpileaks ^mpich')
+ install_db.get_record('mpileaks ^mpich')
+ install_db.get_record('callpath ^mpich')
+ # "force remove" a non-root spec from the DB
+ install_db.remove('callpath ^mpich')
- def test_070_remove_and_add_dependency_package(self):
- self._check_remove_and_add_package('dyninst')
+ # record still in DB but marked uninstalled
+ assert install_db.query('callpath ^mpich', installed=True) == []
+ assert len(install_db.query('callpath ^mpich', installed=any)) == 1
+ # record and its deps have same ref_counts
+ assert install_db.get_record(
+ 'callpath ^mpich', installed=any
+ ).ref_count == 1
+ assert install_db.get_record('mpich').ref_count == 2
- def test_080_root_ref_counts(self):
- rec = self.installed_db.get_record('mpileaks ^mpich')
+ # remove only dependent of uninstalled callpath record
+ install_db.remove('mpileaks ^mpich')
- # Remove a top-level spec from the DB
- self.installed_db.remove('mpileaks ^mpich')
+ # record and parent are completely gone.
+ assert install_db.query('mpileaks ^mpich', installed=any) == []
+ assert install_db.query('callpath ^mpich', installed=any) == []
- # record no longer in DB
- self.assertEqual(self.installed_db.query('mpileaks ^mpich', installed=any), [])
+ # mpich ref count updated properly.
+ mpich_rec = install_db.get_record('mpich')
+ assert mpich_rec.ref_count == 0
- # record's deps have updated ref_counts
- self.assertEqual(self.installed_db.get_record('callpath ^mpich').ref_count, 0)
- self.assertEqual(self.installed_db.get_record('mpich').ref_count, 1)
- # put the spec back
- self.installed_db.add(rec.spec, rec.path)
+def test_100_no_write_with_exception_on_remove(database):
+ install_db = database.mock.db
- # record is present again
- self.assertEqual(len(self.installed_db.query('mpileaks ^mpich', installed=any)), 1)
+ def fail_while_writing():
+ with install_db.write_transaction():
+ _mock_remove('mpileaks ^zmpi')
+ raise Exception()
- # dependencies have ref counts updated
- self.assertEqual(self.installed_db.get_record('callpath ^mpich').ref_count, 1)
- self.assertEqual(self.installed_db.get_record('mpich').ref_count, 2)
+ with install_db.read_transaction():
+ assert len(install_db.query('mpileaks ^zmpi', installed=any)) == 1
+ with pytest.raises(Exception):
+ fail_while_writing()
- def test_090_non_root_ref_counts(self):
- mpileaks_mpich_rec = self.installed_db.get_record('mpileaks ^mpich')
- callpath_mpich_rec = self.installed_db.get_record('callpath ^mpich')
+ # reload DB and make sure zmpi is still there.
+ with install_db.read_transaction():
+ assert len(install_db.query('mpileaks ^zmpi', installed=any)) == 1
- # "force remove" a non-root spec from the DB
- self.installed_db.remove('callpath ^mpich')
- # record still in DB but marked uninstalled
- self.assertEqual(self.installed_db.query('callpath ^mpich', installed=True), [])
- self.assertEqual(len(self.installed_db.query('callpath ^mpich', installed=any)), 1)
+def test_110_no_write_with_exception_on_install(database):
+ install_db = database.mock.db
- # record and its deps have same ref_counts
- self.assertEqual(self.installed_db.get_record('callpath ^mpich', installed=any).ref_count, 1)
- self.assertEqual(self.installed_db.get_record('mpich').ref_count, 2)
+ def fail_while_writing():
+ with install_db.write_transaction():
+ _mock_install('cmake')
+ raise Exception()
- # remove only dependent of uninstalled callpath record
- self.installed_db.remove('mpileaks ^mpich')
+ with install_db.read_transaction():
+ assert install_db.query('cmake', installed=any) == []
- # record and parent are completely gone.
- self.assertEqual(self.installed_db.query('mpileaks ^mpich', installed=any), [])
- self.assertEqual(self.installed_db.query('callpath ^mpich', installed=any), [])
+ with pytest.raises(Exception):
+ fail_while_writing()
- # mpich ref count updated properly.
- mpich_rec = self.installed_db.get_record('mpich')
- self.assertEqual(mpich_rec.ref_count, 0)
+ # reload DB and make sure cmake was not written.
+ with install_db.read_transaction():
+ assert install_db.query('cmake', installed=any) == []
diff --git a/lib/spack/spack/test/directory_layout.py b/lib/spack/spack/test/directory_layout.py
index 74669fe8a2..2caadad0fe 100644
--- a/lib/spack/spack/test/directory_layout.py
+++ b/lib/spack/spack/test/directory_layout.py
@@ -22,167 +22,173 @@
# License along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
-"""\
+"""
This test verifies that the Spack directory layout works properly.
"""
import os
-import shutil
-import tempfile
+import pytest
import spack
-from llnl.util.filesystem import *
from spack.directory_layout import YamlDirectoryLayout
from spack.repository import RepoPath
from spack.spec import Spec
-from spack.test.mock_packages_test import *
# number of packages to test (to reduce test time)
max_packages = 10
-class DirectoryLayoutTest(MockPackagesTest):
- """Tests that a directory layout works correctly and produces a
- consistent install path."""
-
- def setUp(self):
- super(DirectoryLayoutTest, self).setUp()
- self.tmpdir = tempfile.mkdtemp()
- self.layout = YamlDirectoryLayout(self.tmpdir)
-
-
- def tearDown(self):
- super(DirectoryLayoutTest, self).tearDown()
- shutil.rmtree(self.tmpdir, ignore_errors=True)
- self.layout = None
-
-
- def test_read_and_write_spec(self):
- """This goes through each package in spack and creates a directory for
- it. It then ensures that the spec for the directory's
- installed package can be read back in consistently, and
- finally that the directory can be removed by the directory
- layout.
- """
- packages = list(spack.repo.all_packages())[:max_packages]
-
- for pkg in packages:
- if pkg.name.startswith('external'):
- #External package tests cannot be installed
- continue
- spec = pkg.spec
-
- # If a spec fails to concretize, just skip it. If it is a
- # real error, it will be caught by concretization tests.
- try:
- spec.concretize()
- except:
- continue
-
- self.layout.create_install_directory(spec)
-
- install_dir = self.layout.path_for_spec(spec)
- spec_path = self.layout.spec_file_path(spec)
-
- # Ensure directory has been created in right place.
- self.assertTrue(os.path.isdir(install_dir))
- self.assertTrue(install_dir.startswith(self.tmpdir))
-
- # Ensure spec file exists when directory is created
- self.assertTrue(os.path.isfile(spec_path))
- self.assertTrue(spec_path.startswith(install_dir))
-
- # Make sure spec file can be read back in to get the original spec
- spec_from_file = self.layout.read_spec(spec_path)
- self.assertEqual(spec, spec_from_file)
- self.assertTrue(spec.eq_dag, spec_from_file)
- self.assertTrue(spec_from_file.concrete)
-
- # Ensure that specs that come out "normal" are really normal.
- with open(spec_path) as spec_file:
- read_separately = Spec.from_yaml(spec_file.read())
-
- read_separately.normalize()
- self.assertEqual(read_separately, spec_from_file)
-
- read_separately.concretize()
- self.assertEqual(read_separately, spec_from_file)
-
- # Make sure the hash of the read-in spec is the same
- self.assertEqual(spec.dag_hash(), spec_from_file.dag_hash())
-
- # Ensure directories are properly removed
- self.layout.remove_install_directory(spec)
- self.assertFalse(os.path.isdir(install_dir))
- self.assertFalse(os.path.exists(install_dir))
-
-
- def test_handle_unknown_package(self):
- """This test ensures that spack can at least do *some*
- operations with packages that are installed but that it
- does not know about. This is actually not such an uncommon
- scenario with spack; it can happen when you switch from a
- git branch where you're working on a new package.
-
- This test ensures that the directory layout stores enough
- information about installed packages' specs to uninstall
- or query them again if the package goes away.
- """
- mock_db = RepoPath(spack.mock_packages_path)
-
- not_in_mock = set.difference(
- set(spack.repo.all_package_names()),
- set(mock_db.all_package_names()))
- packages = list(not_in_mock)[:max_packages]
-
- # Create all the packages that are not in mock.
- installed_specs = {}
- for pkg_name in packages:
- spec = spack.repo.get(pkg_name).spec
-
- # If a spec fails to concretize, just skip it. If it is a
- # real error, it will be caught by concretization tests.
- try:
- spec.concretize()
- except:
- continue
-
- self.layout.create_install_directory(spec)
- installed_specs[spec] = self.layout.path_for_spec(spec)
-
- spack.repo.swap(mock_db)
-
- # Now check that even without the package files, we know
- # enough to read a spec from the spec file.
- for spec, path in installed_specs.items():
- spec_from_file = self.layout.read_spec(
- join_path(path, '.spack', 'spec.yaml'))
-
- # To satisfy these conditions, directory layouts need to
- # read in concrete specs from their install dirs somehow.
- self.assertEqual(path, self.layout.path_for_spec(spec_from_file))
- self.assertEqual(spec, spec_from_file)
- self.assertTrue(spec.eq_dag(spec_from_file))
- self.assertEqual(spec.dag_hash(), spec_from_file.dag_hash())
-
- spack.repo.swap(mock_db)
-
-
- def test_find(self):
- """Test that finding specs within an install layout works."""
- packages = list(spack.repo.all_packages())[:max_packages]
-
- # Create install prefixes for all packages in the list
- installed_specs = {}
- for pkg in packages:
- if pkg.name.startswith('external'):
- #External package tests cannot be installed
- continue
- spec = pkg.spec.concretized()
- installed_specs[spec.name] = spec
- self.layout.create_install_directory(spec)
-
- # Make sure all the installed specs appear in DirectoryLayout.all_specs()
- found_specs = dict((s.name, s) for s in self.layout.all_specs())
- for name, spec in found_specs.items():
- self.assertTrue(name in found_specs)
- self.assertTrue(found_specs[name].eq_dag(spec))
+@pytest.fixture()
+def layout_and_dir(tmpdir):
+ """Returns a directory layout and the corresponding directory."""
+ yield YamlDirectoryLayout(str(tmpdir)), str(tmpdir)
+
+
+def test_read_and_write_spec(
+ layout_and_dir, config, builtin_mock
+):
+ """This goes through each package in spack and creates a directory for
+ it. It then ensures that the spec for the directory's
+ installed package can be read back in consistently, and
+ finally that the directory can be removed by the directory
+ layout.
+ """
+ layout, tmpdir = layout_and_dir
+ packages = list(spack.repo.all_packages())[:max_packages]
+
+ for pkg in packages:
+ if pkg.name.startswith('external'):
+ # External package tests cannot be installed
+ continue
+ spec = pkg.spec
+
+ # If a spec fails to concretize, just skip it. If it is a
+ # real error, it will be caught by concretization tests.
+ try:
+ spec.concretize()
+ except Exception:
+ continue
+
+ layout.create_install_directory(spec)
+
+ install_dir = layout.path_for_spec(spec)
+ spec_path = layout.spec_file_path(spec)
+
+ # Ensure directory has been created in right place.
+ assert os.path.isdir(install_dir)
+ assert install_dir.startswith(str(tmpdir))
+
+ # Ensure spec file exists when directory is created
+ assert os.path.isfile(spec_path)
+ assert spec_path.startswith(install_dir)
+
+ # Make sure spec file can be read back in to get the original spec
+ spec_from_file = layout.read_spec(spec_path)
+
+ # currently we don't store build dependency information when
+ # we write out specs to the filesystem.
+
+ # TODO: fix this when we can concretize more loosely based on
+ # TODO: what is installed. We currently omit these to
+ # TODO: increase reuse of build dependencies.
+ stored_deptypes = ('link', 'run')
+ expected = spec.copy(deps=stored_deptypes)
+ assert expected == spec_from_file
+ assert expected.eq_dag # msg , spec_from_file
+ assert spec_from_file.concrete
+
+ # Ensure that specs that come out "normal" are really normal.
+ with open(spec_path) as spec_file:
+ read_separately = Spec.from_yaml(spec_file.read())
+
+ # TODO: revise this when build deps are in dag_hash
+ norm = read_separately.normalized().copy(deps=stored_deptypes)
+ assert norm == spec_from_file
+
+ # TODO: revise this when build deps are in dag_hash
+ conc = read_separately.concretized().copy(deps=stored_deptypes)
+ assert conc == spec_from_file
+
+ # Make sure the hash of the read-in spec is the same
+ assert expected.dag_hash() == spec_from_file.dag_hash()
+
+ # Ensure directories are properly removed
+ layout.remove_install_directory(spec)
+ assert not os.path.isdir(install_dir)
+ assert not os.path.exists(install_dir)
+
+
+def test_handle_unknown_package(
+ layout_and_dir, config, builtin_mock
+):
+ """This test ensures that spack can at least do *some*
+ operations with packages that are installed but that it
+ does not know about. This is actually not such an uncommon
+ scenario with spack; it can happen when you switch from a
+ git branch where you're working on a new package.
+
+ This test ensures that the directory layout stores enough
+ information about installed packages' specs to uninstall
+ or query them again if the package goes away.
+ """
+ layout, _ = layout_and_dir
+ mock_db = RepoPath(spack.mock_packages_path)
+
+ not_in_mock = set.difference(
+ set(spack.repo.all_package_names()),
+ set(mock_db.all_package_names()))
+ packages = list(not_in_mock)[:max_packages]
+
+ # Create all the packages that are not in mock.
+ installed_specs = {}
+ for pkg_name in packages:
+ spec = spack.repo.get(pkg_name).spec
+
+ # If a spec fails to concretize, just skip it. If it is a
+ # real error, it will be caught by concretization tests.
+ try:
+ spec.concretize()
+ except Exception:
+ continue
+
+ layout.create_install_directory(spec)
+ installed_specs[spec] = layout.path_for_spec(spec)
+
+ spack.repo.swap(mock_db)
+
+ # Now check that even without the package files, we know
+ # enough to read a spec from the spec file.
+ for spec, path in installed_specs.items():
+ spec_from_file = layout.read_spec(
+ join_path(path, '.spack', 'spec.yaml')
+ )
+ # To satisfy these conditions, directory layouts need to
+ # read in concrete specs from their install dirs somehow.
+ assert path == layout.path_for_spec(spec_from_file)
+ assert spec == spec_from_file
+ assert spec.eq_dag(spec_from_file)
+ assert spec.dag_hash() == spec_from_file.dag_hash()
+
+ spack.repo.swap(mock_db)
+
+
+def test_find(layout_and_dir, config, builtin_mock):
+ """Test that finding specs within an install layout works."""
+ layout, _ = layout_and_dir
+ packages = list(spack.repo.all_packages())[:max_packages]
+
+ # Create install prefixes for all packages in the list
+ installed_specs = {}
+ for pkg in packages:
+ if pkg.name.startswith('external'):
+ # External package tests cannot be installed
+ continue
+ spec = pkg.spec.concretized()
+ installed_specs[spec.name] = spec
+ layout.create_install_directory(spec)
+
+ # Make sure all the installed specs appear in
+ # DirectoryLayout.all_specs()
+ found_specs = dict((s.name, s) for s in layout.all_specs())
+ for name, spec in found_specs.items():
+ assert name in found_specs
+ assert found_specs[name].eq_dag(spec)
diff --git a/lib/spack/spack/test/environment.py b/lib/spack/spack/test/environment.py
index ded1539e18..e9f0a5182f 100644
--- a/lib/spack/spack/test/environment.py
+++ b/lib/spack/spack/test/environment.py
@@ -24,16 +24,26 @@
##############################################################################
import unittest
import os
+
+from spack import spack_root
+from llnl.util.filesystem import join_path
from spack.environment import EnvironmentModifications
+from spack.environment import SetEnv, UnsetEnv
+from spack.environment import RemovePath, PrependPath, AppendPath
+from spack.util.environment import filter_system_paths, filter_system_bin_paths
class EnvironmentTest(unittest.TestCase):
+
def setUp(self):
- os.environ.clear()
os.environ['UNSET_ME'] = 'foo'
os.environ['EMPTY_PATH_LIST'] = ''
os.environ['PATH_LIST'] = '/path/second:/path/third'
- os.environ['REMOVE_PATH_LIST'] = '/a/b:/duplicate:/a/c:/remove/this:/a/d:/duplicate/:/f/g'
+ os.environ['REMOVE_PATH_LIST'] = \
+ '/a/b:/duplicate:/a/c:/remove/this:/a/d:/duplicate/:/f/g'
+
+ def tearDown(self):
+ pass
def test_set(self):
env = EnvironmentModifications()
@@ -50,6 +60,44 @@ class EnvironmentTest(unittest.TestCase):
env.apply_modifications()
self.assertRaises(KeyError, os.environ.__getitem__, 'UNSET_ME')
+ def test_filter_system_paths(self):
+ filtered = filter_system_paths([
+ '/usr/local/Cellar/gcc/5.3.0/lib',
+ '/usr/local/lib',
+ '/usr/local',
+ '/usr/local/include',
+ '/usr/local/lib64',
+ '/usr/local/opt/some-package/lib',
+ '/usr/opt/lib',
+ '/lib',
+ '/',
+ '/usr',
+ '/lib64',
+ '/include',
+ '/opt/some-package/include',
+ ])
+ self.assertEqual(filtered,
+ ['/usr/local/Cellar/gcc/5.3.0/lib',
+ '/usr/local/opt/some-package/lib',
+ '/usr/opt/lib',
+ '/opt/some-package/include'])
+
+ filtered = filter_system_bin_paths([
+ '/usr/local/Cellar/gcc/5.3.0/bin',
+ '/usr/local/bin',
+ '/usr/local/opt/some-package/bin',
+ '/usr/opt/bin',
+ '/bin',
+ '/opt/some-package/bin',
+ ])
+ self.assertEqual(filtered,
+ ['/usr/local/bin',
+ '/bin',
+ '/usr/local/Cellar/gcc/5.3.0/bin',
+ '/usr/local/opt/some-package/bin',
+ '/usr/opt/bin',
+ '/opt/some-package/bin'])
+
def test_set_path(self):
env = EnvironmentModifications()
env.set_path('A', ['foo', 'bar', 'baz'])
@@ -74,9 +122,18 @@ class EnvironmentTest(unittest.TestCase):
env.remove_path('REMOVE_PATH_LIST', '/duplicate/')
env.apply_modifications()
- self.assertEqual('/path/first:/path/second:/path/third:/path/last', os.environ['PATH_LIST'])
- self.assertEqual('/path/first:/path/middle:/path/last', os.environ['EMPTY_PATH_LIST'])
- self.assertEqual('/path/first:/path/middle:/path/last', os.environ['NEWLY_CREATED_PATH_LIST'])
+ self.assertEqual(
+ '/path/first:/path/second:/path/third:/path/last',
+ os.environ['PATH_LIST']
+ )
+ self.assertEqual(
+ '/path/first:/path/middle:/path/last',
+ os.environ['EMPTY_PATH_LIST']
+ )
+ self.assertEqual(
+ '/path/first:/path/middle:/path/last',
+ os.environ['NEWLY_CREATED_PATH_LIST']
+ )
self.assertEqual('/a/b:/a/c:/a/d:/f/g', os.environ['REMOVE_PATH_LIST'])
def test_extra_arguments(self):
@@ -95,3 +152,52 @@ class EnvironmentTest(unittest.TestCase):
self.assertEqual(len(copy_construct), 2)
for x, y in zip(env, copy_construct):
assert x is y
+
+ def test_source_files(self):
+ datadir = join_path(spack_root, 'lib', 'spack',
+ 'spack', 'test', 'data')
+ files = [
+ join_path(datadir, 'sourceme_first.sh'),
+ join_path(datadir, 'sourceme_second.sh'),
+ join_path(datadir, 'sourceme_parameters.sh intel64')
+ ]
+ env = EnvironmentModifications.from_sourcing_files(*files)
+ modifications = env.group_by_name()
+
+ # This is sensitive to the user's environment; can include
+ # spurious entries for things like PS1
+ #
+ # TODO: figure out how to make a bit more robust.
+ self.assertTrue(len(modifications) >= 4)
+
+ # Set new variables
+ self.assertEqual(len(modifications['NEW_VAR']), 1)
+ self.assertTrue(isinstance(modifications['NEW_VAR'][0], SetEnv))
+ self.assertEqual(modifications['NEW_VAR'][0].value, 'new')
+
+ self.assertEqual(len(modifications['FOO']), 1)
+ self.assertTrue(isinstance(modifications['FOO'][0], SetEnv))
+ self.assertEqual(modifications['FOO'][0].value, 'intel64')
+
+ # Unset variables
+ self.assertEqual(len(modifications['EMPTY_PATH_LIST']), 1)
+ self.assertTrue(isinstance(
+ modifications['EMPTY_PATH_LIST'][0], UnsetEnv))
+ # Modified variables
+ self.assertEqual(len(modifications['UNSET_ME']), 1)
+ self.assertTrue(isinstance(modifications['UNSET_ME'][0], SetEnv))
+ self.assertEqual(modifications['UNSET_ME'][0].value, 'overridden')
+
+ self.assertEqual(len(modifications['PATH_LIST']), 3)
+ self.assertTrue(
+ isinstance(modifications['PATH_LIST'][0], RemovePath)
+ )
+ self.assertEqual(modifications['PATH_LIST'][0].value, '/path/third')
+ self.assertTrue(
+ isinstance(modifications['PATH_LIST'][1], AppendPath)
+ )
+ self.assertEqual(modifications['PATH_LIST'][1].value, '/path/fourth')
+ self.assertTrue(
+ isinstance(modifications['PATH_LIST'][2], PrependPath)
+ )
+ self.assertEqual(modifications['PATH_LIST'][2].value, '/path/first')
diff --git a/lib/spack/spack/test/file_cache.py b/lib/spack/spack/test/file_cache.py
new file mode 100644
index 0000000000..cc66beda2e
--- /dev/null
+++ b/lib/spack/spack/test/file_cache.py
@@ -0,0 +1,83 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+"""
+Test Spack's FileCache.
+"""
+import os
+import shutil
+import tempfile
+import unittest
+
+from spack.file_cache import FileCache
+
+
+class FileCacheTest(unittest.TestCase):
+ """Ensure that a file cache can properly write to a file and recover its
+ contents."""
+
+ def setUp(self):
+ self.scratch_dir = tempfile.mkdtemp()
+ self.cache = FileCache(self.scratch_dir)
+
+ def tearDown(self):
+ shutil.rmtree(self.scratch_dir)
+
+ def test_write_and_read_cache_file(self):
+ """Test writing then reading a cached file."""
+ with self.cache.write_transaction('test.yaml') as (old, new):
+ self.assertTrue(old is None)
+ self.assertTrue(new is not None)
+ new.write("foobar\n")
+
+ with self.cache.read_transaction('test.yaml') as stream:
+ text = stream.read()
+ self.assertEqual("foobar\n", text)
+
+ def test_remove(self):
+ """Test removing an entry from the cache."""
+ self.test_write_and_write_cache_file()
+
+ self.cache.remove('test.yaml')
+
+ self.assertFalse(os.path.exists(self.cache.cache_path('test.yaml')))
+ self.assertFalse(os.path.exists(self.cache._lock_path('test.yaml')))
+
+ def test_write_and_write_cache_file(self):
+ """Test two write transactions on a cached file."""
+ with self.cache.write_transaction('test.yaml') as (old, new):
+ self.assertTrue(old is None)
+ self.assertTrue(new is not None)
+ new.write("foobar\n")
+
+ with self.cache.write_transaction('test.yaml') as (old, new):
+ self.assertTrue(old is not None)
+ text = old.read()
+ self.assertEqual("foobar\n", text)
+ self.assertTrue(new is not None)
+ new.write("barbaz\n")
+
+ with self.cache.read_transaction('test.yaml') as stream:
+ text = stream.read()
+ self.assertEqual("barbaz\n", text)
diff --git a/lib/spack/spack/test/git_fetch.py b/lib/spack/spack/test/git_fetch.py
index 4de65760d7..3bd998c5c2 100644
--- a/lib/spack/spack/test/git_fetch.py
+++ b/lib/spack/spack/test/git_fetch.py
@@ -24,96 +24,61 @@
##############################################################################
import os
+import pytest
import spack
from llnl.util.filesystem import *
-from spack.test.mock_packages_test import *
-from spack.test.mock_repo import MockGitRepo
+from spack.spec import Spec
from spack.version import ver
-class GitFetchTest(MockPackagesTest):
- """Tests fetching from a dummy git repository."""
-
- def setUp(self):
- """Create a git repository with master and two other branches,
- and one tag, so that we can experiment on it."""
- super(GitFetchTest, self).setUp()
-
- self.repo = MockGitRepo()
-
- spec = Spec('git-test')
- spec.concretize()
- self.pkg = spack.repo.get(spec, new=True)
-
- def tearDown(self):
- """Destroy the stage space used by this test."""
- super(GitFetchTest, self).tearDown()
- self.repo.destroy()
-
- def assert_rev(self, rev):
- """Check that the current git revision is equal to the supplied rev."""
- self.assertEqual(self.repo.rev_hash('HEAD'), self.repo.rev_hash(rev))
-
- def try_fetch(self, rev, test_file, args):
- """Tries to:
- 1. Fetch the repo using a fetch strategy constructed with
- supplied args.
- 2. Check if the test_file is in the checked out repository.
- 3. Assert that the repository is at the revision supplied.
- 4. Add and remove some files, then reset the repo, and
- ensure it's all there again.
- """
- self.pkg.versions[ver('git')] = args
-
- with self.pkg.stage:
- self.pkg.do_stage()
- self.assert_rev(rev)
-
- file_path = join_path(self.pkg.stage.source_path, test_file)
- self.assertTrue(os.path.isdir(self.pkg.stage.source_path))
- self.assertTrue(os.path.isfile(file_path))
-
- os.unlink(file_path)
- self.assertFalse(os.path.isfile(file_path))
-
- untracked_file = 'foobarbaz'
- touch(untracked_file)
- self.assertTrue(os.path.isfile(untracked_file))
- self.pkg.do_restage()
- self.assertFalse(os.path.isfile(untracked_file))
-
- self.assertTrue(os.path.isdir(self.pkg.stage.source_path))
- self.assertTrue(os.path.isfile(file_path))
-
- self.assert_rev(rev)
-
-
- def test_fetch_master(self):
- """Test a default git checkout with no commit or tag specified."""
- self.try_fetch('master', self.repo.r0_file, {
- 'git' : self.repo.path
- })
-
-
- def test_fetch_branch(self):
- """Test fetching a branch."""
- self.try_fetch(self.repo.branch, self.repo.branch_file, {
- 'git' : self.repo.path,
- 'branch' : self.repo.branch
- })
-
-
- def test_fetch_tag(self):
- """Test fetching a tag."""
- self.try_fetch(self.repo.tag, self.repo.tag_file, {
- 'git' : self.repo.path,
- 'tag' : self.repo.tag
- })
-
-
- def test_fetch_commit(self):
- """Test fetching a particular commit."""
- self.try_fetch(self.repo.r1, self.repo.r1_file, {
- 'git' : self.repo.path,
- 'commit' : self.repo.r1
- })
+@pytest.fixture(params=['master', 'branch', 'tag', 'commit'])
+def type_of_test(request):
+ """Returns one of the test type available for the mock_git_repository"""
+ return request.param
+
+
+def test_fetch(
+ type_of_test,
+ mock_git_repository,
+ config,
+ refresh_builtin_mock
+):
+ """Tries to:
+
+ 1. Fetch the repo using a fetch strategy constructed with
+ supplied args (they depend on type_of_test).
+ 2. Check if the test_file is in the checked out repository.
+ 3. Assert that the repository is at the revision supplied.
+ 4. Add and remove some files, then reset the repo, and
+ ensure it's all there again.
+ """
+ # Retrieve the right test parameters
+ t = mock_git_repository.checks[type_of_test]
+ h = mock_git_repository.hash
+ # Construct the package under test
+ spec = Spec('git-test')
+ spec.concretize()
+ pkg = spack.repo.get(spec, new=True)
+ pkg.versions[ver('git')] = t.args
+ # Enter the stage directory and check some properties
+ with pkg.stage:
+ pkg.do_stage()
+ assert h('HEAD') == h(t.revision)
+
+ file_path = join_path(pkg.stage.source_path, t.file)
+ assert os.path.isdir(pkg.stage.source_path)
+ assert os.path.isfile(file_path)
+
+ os.unlink(file_path)
+ assert not os.path.isfile(file_path)
+
+ untracked_file = 'foobarbaz'
+ touch(untracked_file)
+ assert os.path.isfile(untracked_file)
+ pkg.do_restage()
+ assert not os.path.isfile(untracked_file)
+
+ assert os.path.isdir(pkg.stage.source_path)
+ assert os.path.isfile(file_path)
+
+ assert h('HEAD') == h(t.revision)
diff --git a/lib/spack/spack/test/hg_fetch.py b/lib/spack/spack/test/hg_fetch.py
index 292ffba949..71e4693c56 100644
--- a/lib/spack/spack/test/hg_fetch.py
+++ b/lib/spack/spack/test/hg_fetch.py
@@ -23,77 +23,62 @@
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
import os
-import spack
-from spack.version import ver
-from spack.test.mock_repo import MockHgRepo
+import pytest
+import spack
from llnl.util.filesystem import *
-from spack.test.mock_packages_test import *
-
-
-class HgFetchTest(MockPackagesTest):
- """Tests fetching from a dummy hg repository."""
-
- def setUp(self):
- """Create a hg repository with master and two other branches,
- and one tag, so that we can experiment on it."""
- super(HgFetchTest, self).setUp()
-
- self.repo = MockHgRepo()
-
- spec = Spec('hg-test')
- spec.concretize()
- self.pkg = spack.repo.get(spec, new=True)
-
- def tearDown(self):
- """Destroy the stage space used by this test."""
- super(HgFetchTest, self).tearDown()
- self.repo.destroy()
-
- def try_fetch(self, rev, test_file, args):
- """Tries to:
- 1. Fetch the repo using a fetch strategy constructed with
- supplied args.
- 2. Check if the test_file is in the checked out repository.
- 3. Assert that the repository is at the revision supplied.
- 4. Add and remove some files, then reset the repo, and
- ensure it's all there again.
- """
- self.pkg.versions[ver('hg')] = args
-
- with self.pkg.stage:
- self.pkg.do_stage()
- self.assertEqual(self.repo.get_rev(), rev)
-
- file_path = join_path(self.pkg.stage.source_path, test_file)
- self.assertTrue(os.path.isdir(self.pkg.stage.source_path))
- self.assertTrue(os.path.isfile(file_path))
-
- os.unlink(file_path)
- self.assertFalse(os.path.isfile(file_path))
-
- untracked = 'foobarbaz'
- touch(untracked)
- self.assertTrue(os.path.isfile(untracked))
- self.pkg.do_restage()
- self.assertFalse(os.path.isfile(untracked))
-
- self.assertTrue(os.path.isdir(self.pkg.stage.source_path))
- self.assertTrue(os.path.isfile(file_path))
-
- self.assertEqual(self.repo.get_rev(), rev)
-
-
- def test_fetch_default(self):
- """Test a default hg checkout with no commit or tag specified."""
- self.try_fetch(self.repo.r1, self.repo.r1_file, {
- 'hg' : self.repo.path
- })
+from spack.spec import Spec
+from spack.version import ver
- def test_fetch_rev0(self):
- """Test fetching a branch."""
- self.try_fetch(self.repo.r0, self.repo.r0_file, {
- 'hg' : self.repo.path,
- 'revision' : self.repo.r0
- })
+@pytest.fixture(params=['default', 'rev0'])
+def type_of_test(request):
+ """Returns one of the test type available for the mock_hg_repository"""
+ return request.param
+
+
+def test_fetch(
+ type_of_test,
+ mock_hg_repository,
+ config,
+ refresh_builtin_mock
+):
+ """Tries to:
+
+ 1. Fetch the repo using a fetch strategy constructed with
+ supplied args (they depend on type_of_test).
+ 2. Check if the test_file is in the checked out repository.
+ 3. Assert that the repository is at the revision supplied.
+ 4. Add and remove some files, then reset the repo, and
+ ensure it's all there again.
+ """
+ # Retrieve the right test parameters
+ t = mock_hg_repository.checks[type_of_test]
+ h = mock_hg_repository.hash
+ # Construct the package under test
+ spec = Spec('hg-test')
+ spec.concretize()
+ pkg = spack.repo.get(spec, new=True)
+ pkg.versions[ver('hg')] = t.args
+ # Enter the stage directory and check some properties
+ with pkg.stage:
+ pkg.do_stage()
+ assert h() == t.revision
+
+ file_path = join_path(pkg.stage.source_path, t.file)
+ assert os.path.isdir(pkg.stage.source_path)
+ assert os.path.isfile(file_path)
+
+ os.unlink(file_path)
+ assert not os.path.isfile(file_path)
+
+ untracked_file = 'foobarbaz'
+ touch(untracked_file)
+ assert os.path.isfile(untracked_file)
+ pkg.do_restage()
+ assert not os.path.isfile(untracked_file)
+
+ assert os.path.isdir(pkg.stage.source_path)
+ assert os.path.isfile(file_path)
+
+ assert h() == t.revision
diff --git a/lib/spack/spack/test/install.py b/lib/spack/spack/test/install.py
index cfe6ea9b27..f10c3a37e9 100644
--- a/lib/spack/spack/test/install.py
+++ b/lib/spack/spack/test/install.py
@@ -22,83 +22,83 @@
# License along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
-import shutil
-import tempfile
-
+import pytest
import spack
-from llnl.util.filesystem import *
+import spack.store
+from spack.database import Database
from spack.directory_layout import YamlDirectoryLayout
from spack.fetch_strategy import URLFetchStrategy, FetchStrategyComposite
-from spack.test.mock_packages_test import *
-from spack.test.mock_repo import MockArchive
-
-
-class InstallTest(MockPackagesTest):
- """Tests install and uninstall on a trivial package."""
-
- def setUp(self):
- super(InstallTest, self).setUp()
-
- # create a simple installable package directory and tarball
- self.repo = MockArchive()
-
- # We use a fake package, so skip the checksum.
- spack.do_checksum = False
-
- # Use a fake install directory to avoid conflicts bt/w
- # installed pkgs and mock packages.
- self.tmpdir = tempfile.mkdtemp()
- self.orig_layout = spack.install_layout
- spack.install_layout = YamlDirectoryLayout(self.tmpdir)
-
-
- def tearDown(self):
- super(InstallTest, self).tearDown()
- self.repo.destroy()
-
- # Turn checksumming back on
- spack.do_checksum = True
-
- # restore spack's layout.
- spack.install_layout = self.orig_layout
- shutil.rmtree(self.tmpdir, ignore_errors=True)
-
-
- def fake_fetchify(self, pkg):
- """Fake the URL for a package so it downloads from a file."""
- fetcher = FetchStrategyComposite()
- fetcher.append(URLFetchStrategy(self.repo.url))
- pkg.fetcher = fetcher
-
-
- def ztest_install_and_uninstall(self):
- # Get a basic concrete spec for the trivial install package.
- spec = Spec('trivial_install_test_package')
- spec.concretize()
- self.assertTrue(spec.concrete)
-
- # Get the package
- pkg = spack.repo.get(spec)
-
- self.fake_fetchify(pkg)
-
- try:
- pkg.do_install()
- pkg.do_uninstall()
- except Exception, e:
- pkg.remove_prefix()
- raise
-
-
- def test_install_environment(self):
- spec = Spec('cmake-client').concretized()
-
- for s in spec.traverse():
- self.fake_fetchify(s.package)
-
- pkg = spec.package
- try:
- pkg.do_install()
- except Exception, e:
- pkg.remove_prefix()
- raise
+from spack.spec import Spec
+
+
+@pytest.fixture()
+def install_mockery(tmpdir, config, builtin_mock):
+ """Hooks a fake install directory and a fake db into Spack."""
+ layout = spack.store.layout
+ db = spack.store.db
+ # Use a fake install directory to avoid conflicts bt/w
+ # installed pkgs and mock packages.
+ spack.store.layout = YamlDirectoryLayout(str(tmpdir))
+ spack.store.db = Database(str(tmpdir))
+ # We use a fake package, so skip the checksum.
+ spack.do_checksum = False
+ yield
+ # Turn checksumming back on
+ spack.do_checksum = True
+ # Restore Spack's layout.
+ spack.store.layout = layout
+ spack.store.db = db
+
+
+def fake_fetchify(url, pkg):
+ """Fake the URL for a package so it downloads from a file."""
+ fetcher = FetchStrategyComposite()
+ fetcher.append(URLFetchStrategy(url))
+ pkg.fetcher = fetcher
+
+
+@pytest.mark.usefixtures('install_mockery')
+def test_install_and_uninstall(mock_archive):
+ # Get a basic concrete spec for the trivial install package.
+ spec = Spec('trivial-install-test-package')
+ spec.concretize()
+ assert spec.concrete
+
+ # Get the package
+ pkg = spack.repo.get(spec)
+
+ fake_fetchify(mock_archive.url, pkg)
+
+ try:
+ pkg.do_install()
+ pkg.do_uninstall()
+ except Exception:
+ pkg.remove_prefix()
+ raise
+
+
+@pytest.mark.usefixtures('install_mockery')
+def test_store(mock_archive):
+ spec = Spec('cmake-client').concretized()
+
+ for s in spec.traverse():
+ fake_fetchify(mock_archive.url, s.package)
+
+ pkg = spec.package
+ try:
+ pkg.do_install()
+ except Exception:
+ pkg.remove_prefix()
+ raise
+
+
+@pytest.mark.usefixtures('install_mockery')
+def test_failing_build(mock_archive):
+ spec = Spec('failing-build').concretized()
+
+ for s in spec.traverse():
+ fake_fetchify(mock_archive.url, s.package)
+
+ pkg = spec.package
+ with pytest.raises(spack.build_environment.ChildError):
+ pkg.do_install()
diff --git a/lib/spack/spack/test/library_list.py b/lib/spack/spack/test/library_list.py
new file mode 100644
index 0000000000..7fc2fd222f
--- /dev/null
+++ b/lib/spack/spack/test/library_list.py
@@ -0,0 +1,111 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+
+import unittest
+
+from llnl.util.filesystem import LibraryList
+
+
+class LibraryListTest(unittest.TestCase):
+ def setUp(self):
+ l = [
+ '/dir1/liblapack.a',
+ '/dir2/libfoo.dylib',
+ '/dir1/libblas.a',
+ '/dir3/libbar.so',
+ 'libbaz.so'
+ ]
+ self.liblist = LibraryList(l)
+
+ def test_repr(self):
+ x = eval(repr(self.liblist))
+ self.assertEqual(self.liblist, x)
+
+ def test_joined_and_str(self):
+ s1 = self.liblist.joined()
+ self.assertEqual(
+ s1,
+ '/dir1/liblapack.a /dir2/libfoo.dylib /dir1/libblas.a /dir3/libbar.so libbaz.so' # NOQA: ignore=E501
+ )
+ s2 = str(self.liblist)
+ self.assertEqual(s1, s2)
+ s3 = self.liblist.joined(';')
+ self.assertEqual(
+ s3,
+ '/dir1/liblapack.a;/dir2/libfoo.dylib;/dir1/libblas.a;/dir3/libbar.so;libbaz.so' # NOQA: ignore=E501
+ )
+
+ def test_flags(self):
+ search_flags = self.liblist.search_flags
+ self.assertTrue('-L/dir1' in search_flags)
+ self.assertTrue('-L/dir2' in search_flags)
+ self.assertTrue('-L/dir3' in search_flags)
+ self.assertTrue(isinstance(search_flags, str))
+
+ link_flags = self.liblist.link_flags
+ self.assertEqual(
+ link_flags,
+ '-llapack -lfoo -lblas -lbar -lbaz'
+ )
+
+ ld_flags = self.liblist.ld_flags
+ self.assertEqual(ld_flags, search_flags + ' ' + link_flags)
+
+ def test_paths_manipulation(self):
+ names = self.liblist.names
+ self.assertEqual(names, ['lapack', 'foo', 'blas', 'bar', 'baz'])
+
+ directories = self.liblist.directories
+ self.assertEqual(directories, ['/dir1', '/dir2', '/dir3'])
+
+ def test_get_item(self):
+ a = self.liblist[0]
+ self.assertEqual(a, '/dir1/liblapack.a')
+
+ b = self.liblist[:]
+ self.assertEqual(type(b), type(self.liblist))
+ self.assertEqual(self.liblist, b)
+ self.assertTrue(self.liblist is not b)
+
+ def test_add(self):
+ pylist = [
+ '/dir1/liblapack.a', # removed from the final list
+ '/dir2/libbaz.so',
+ '/dir4/libnew.a'
+ ]
+ another = LibraryList(pylist)
+ l = self.liblist + another
+ self.assertEqual(len(l), 7)
+ # Invariant : l == l + l
+ self.assertEqual(l, l + l)
+ # Always produce an instance of LibraryList
+ self.assertEqual(
+ type(self.liblist),
+ type(self.liblist + pylist)
+ )
+ self.assertEqual(
+ type(pylist + self.liblist),
+ type(self.liblist)
+ )
diff --git a/lib/spack/spack/test/link_tree.py b/lib/spack/spack/test/link_tree.py
index de40991b57..5d0a7430b6 100644
--- a/lib/spack/spack/test/link_tree.py
+++ b/lib/spack/spack/test/link_tree.py
@@ -53,16 +53,13 @@ class LinkTreeTest(unittest.TestCase):
def tearDown(self):
self.stage.destroy()
-
def check_file_link(self, filename):
self.assertTrue(os.path.isfile(filename))
self.assertTrue(os.path.islink(filename))
-
def check_dir(self, filename):
self.assertTrue(os.path.isdir(filename))
-
def test_merge_to_new_directory(self):
with working_dir(self.stage.path):
self.link_tree.merge('dest')
@@ -79,7 +76,6 @@ class LinkTreeTest(unittest.TestCase):
self.assertFalse(os.path.exists('dest'))
-
def test_merge_to_existing_directory(self):
with working_dir(self.stage.path):
@@ -112,7 +108,6 @@ class LinkTreeTest(unittest.TestCase):
self.assertFalse(os.path.isfile('dest/c/d/6'))
self.assertFalse(os.path.isfile('dest/c/d/e/7'))
-
def test_merge_with_empty_directories(self):
with working_dir(self.stage.path):
mkdirp('dest/f/g')
@@ -132,7 +127,6 @@ class LinkTreeTest(unittest.TestCase):
self.assertTrue(os.path.isdir('dest/a/b/h'))
self.assertTrue(os.path.isdir('dest/f/g'))
-
def test_ignore(self):
with working_dir(self.stage.path):
touchp('source/.spec')
diff --git a/lib/spack/spack/test/lock.py b/lib/spack/spack/test/lock.py
index 0e9f6daf4d..4f62cd85e9 100644
--- a/lib/spack/spack/test/lock.py
+++ b/lib/spack/spack/test/lock.py
@@ -25,6 +25,7 @@
"""
These tests ensure that our lock works correctly.
"""
+import os
import shutil
import tempfile
import unittest
@@ -44,107 +45,256 @@ class LockTest(unittest.TestCase):
def setUp(self):
self.tempdir = tempfile.mkdtemp()
self.lock_path = join_path(self.tempdir, 'lockfile')
- touch(self.lock_path)
-
def tearDown(self):
- shutil.rmtree(self.tempdir, ignore_errors=True)
-
+ shutil.rmtree(self.tempdir, ignore_errors=True)
def multiproc_test(self, *functions):
"""Order some processes using simple barrier synchronization."""
b = Barrier(len(functions), timeout=barrier_timeout)
procs = [Process(target=f, args=(b,)) for f in functions]
- for p in procs: p.start()
+
+ for p in procs:
+ p.start()
+
for p in procs:
p.join()
self.assertEqual(p.exitcode, 0)
-
#
# Process snippets below can be composed into tests.
#
- def acquire_write(self, barrier):
- lock = Lock(self.lock_path)
- lock.acquire_write() # grab exclusive lock
- barrier.wait()
- barrier.wait() # hold the lock until exception raises in other procs.
-
- def acquire_read(self, barrier):
- lock = Lock(self.lock_path)
- lock.acquire_read() # grab shared lock
- barrier.wait()
- barrier.wait() # hold the lock until exception raises in other procs.
-
- def timeout_write(self, barrier):
- lock = Lock(self.lock_path)
- barrier.wait() # wait for lock acquire in first process
- self.assertRaises(LockError, lock.acquire_write, 0.1)
- barrier.wait()
-
- def timeout_read(self, barrier):
- lock = Lock(self.lock_path)
- barrier.wait() # wait for lock acquire in first process
- self.assertRaises(LockError, lock.acquire_read, 0.1)
- barrier.wait()
+ def acquire_write(self, start=0, length=0):
+ def fn(barrier):
+ lock = Lock(self.lock_path, start, length)
+ lock.acquire_write() # grab exclusive lock
+ barrier.wait()
+ barrier.wait() # hold the lock until timeout in other procs.
+ return fn
+
+ def acquire_read(self, start=0, length=0):
+ def fn(barrier):
+ lock = Lock(self.lock_path, start, length)
+ lock.acquire_read() # grab shared lock
+ barrier.wait()
+ barrier.wait() # hold the lock until timeout in other procs.
+ return fn
+
+ def timeout_write(self, start=0, length=0):
+ def fn(barrier):
+ lock = Lock(self.lock_path, start, length)
+ barrier.wait() # wait for lock acquire in first process
+ self.assertRaises(LockError, lock.acquire_write, 0.1)
+ barrier.wait()
+ return fn
+ def timeout_read(self, start=0, length=0):
+ def fn(barrier):
+ lock = Lock(self.lock_path, start, length)
+ barrier.wait() # wait for lock acquire in first process
+ self.assertRaises(LockError, lock.acquire_read, 0.1)
+ barrier.wait()
+ return fn
#
# Test that exclusive locks on other processes time out when an
# exclusive lock is held.
#
def test_write_lock_timeout_on_write(self):
- self.multiproc_test(self.acquire_write, self.timeout_write)
+ self.multiproc_test(self.acquire_write(), self.timeout_write())
def test_write_lock_timeout_on_write_2(self):
- self.multiproc_test(self.acquire_write, self.timeout_write, self.timeout_write)
+ self.multiproc_test(
+ self.acquire_write(), self.timeout_write(), self.timeout_write())
def test_write_lock_timeout_on_write_3(self):
- self.multiproc_test(self.acquire_write, self.timeout_write, self.timeout_write, self.timeout_write)
-
+ self.multiproc_test(
+ self.acquire_write(), self.timeout_write(), self.timeout_write(),
+ self.timeout_write())
+
+ def test_write_lock_timeout_on_write_ranges(self):
+ self.multiproc_test(
+ self.acquire_write(0, 1), self.timeout_write(0, 1))
+
+ def test_write_lock_timeout_on_write_ranges_2(self):
+ self.multiproc_test(
+ self.acquire_write(0, 64), self.acquire_write(65, 1),
+ self.timeout_write(0, 1), self.timeout_write(63, 1))
+
+ def test_write_lock_timeout_on_write_ranges_3(self):
+ self.multiproc_test(
+ self.acquire_write(0, 1), self.acquire_write(1, 1),
+ self.timeout_write(), self.timeout_write(), self.timeout_write())
+
+ def test_write_lock_timeout_on_write_ranges_4(self):
+ self.multiproc_test(
+ self.acquire_write(0, 1), self.acquire_write(1, 1),
+ self.acquire_write(2, 456), self.acquire_write(500, 64),
+ self.timeout_write(), self.timeout_write(), self.timeout_write())
#
# Test that shared locks on other processes time out when an
# exclusive lock is held.
#
def test_read_lock_timeout_on_write(self):
- self.multiproc_test(self.acquire_write, self.timeout_read)
+ self.multiproc_test(self.acquire_write(), self.timeout_read())
def test_read_lock_timeout_on_write_2(self):
- self.multiproc_test(self.acquire_write, self.timeout_read, self.timeout_read)
+ self.multiproc_test(
+ self.acquire_write(), self.timeout_read(), self.timeout_read())
def test_read_lock_timeout_on_write_3(self):
- self.multiproc_test(self.acquire_write, self.timeout_read, self.timeout_read, self.timeout_read)
+ self.multiproc_test(
+ self.acquire_write(), self.timeout_read(), self.timeout_read(),
+ self.timeout_read())
+ def test_read_lock_timeout_on_write_ranges(self):
+ """small write lock, read whole file."""
+ self.multiproc_test(self.acquire_write(0, 1), self.timeout_read())
+
+ def test_read_lock_timeout_on_write_ranges_2(self):
+ """small write lock, small read lock"""
+ self.multiproc_test(self.acquire_write(0, 1), self.timeout_read(0, 1))
+
+ def test_read_lock_timeout_on_write_ranges_3(self):
+ """two write locks, overlapping read locks"""
+ self.multiproc_test(
+ self.acquire_write(0, 1), self.acquire_write(64, 128),
+ self.timeout_read(0, 1), self.timeout_read(128, 256))
#
# Test that exclusive locks time out when shared locks are held.
#
def test_write_lock_timeout_on_read(self):
- self.multiproc_test(self.acquire_read, self.timeout_write)
+ self.multiproc_test(self.acquire_read(), self.timeout_write())
def test_write_lock_timeout_on_read_2(self):
- self.multiproc_test(self.acquire_read, self.timeout_write, self.timeout_write)
+ self.multiproc_test(
+ self.acquire_read(), self.timeout_write(), self.timeout_write())
def test_write_lock_timeout_on_read_3(self):
- self.multiproc_test(self.acquire_read, self.timeout_write, self.timeout_write, self.timeout_write)
+ self.multiproc_test(
+ self.acquire_read(), self.timeout_write(), self.timeout_write(),
+ self.timeout_write())
+
+ def test_write_lock_timeout_on_read_ranges(self):
+ self.multiproc_test(self.acquire_read(0, 1), self.timeout_write())
+
+ def test_write_lock_timeout_on_read_ranges_2(self):
+ self.multiproc_test(self.acquire_read(0, 1), self.timeout_write(0, 1))
+
+ def test_write_lock_timeout_on_read_ranges_3(self):
+ self.multiproc_test(
+ self.acquire_read(0, 1), self.acquire_read(10, 1),
+ self.timeout_write(0, 1), self.timeout_write(10, 1))
+ def test_write_lock_timeout_on_read_ranges_4(self):
+ self.multiproc_test(
+ self.acquire_read(0, 64),
+ self.timeout_write(10, 1), self.timeout_write(32, 1))
+
+ def test_write_lock_timeout_on_read_ranges_5(self):
+ self.multiproc_test(
+ self.acquire_read(64, 128),
+ self.timeout_write(65, 1), self.timeout_write(127, 1),
+ self.timeout_write(90, 10))
#
# Test that exclusive locks time while lots of shared locks are held.
#
def test_write_lock_timeout_with_multiple_readers_2_1(self):
- self.multiproc_test(self.acquire_read, self.acquire_read, self.timeout_write)
+ self.multiproc_test(
+ self.acquire_read(), self.acquire_read(), self.timeout_write())
def test_write_lock_timeout_with_multiple_readers_2_2(self):
- self.multiproc_test(self.acquire_read, self.acquire_read, self.timeout_write, self.timeout_write)
+ self.multiproc_test(
+ self.acquire_read(), self.acquire_read(), self.timeout_write(),
+ self.timeout_write())
def test_write_lock_timeout_with_multiple_readers_3_1(self):
- self.multiproc_test(self.acquire_read, self.acquire_read, self.acquire_read, self.timeout_write)
+ self.multiproc_test(
+ self.acquire_read(), self.acquire_read(), self.acquire_read(),
+ self.timeout_write())
def test_write_lock_timeout_with_multiple_readers_3_2(self):
- self.multiproc_test(self.acquire_read, self.acquire_read, self.acquire_read, self.timeout_write, self.timeout_write)
+ self.multiproc_test(
+ self.acquire_read(), self.acquire_read(), self.acquire_read(),
+ self.timeout_write(), self.timeout_write())
+
+ def test_write_lock_timeout_with_multiple_readers_2_1_ranges(self):
+ self.multiproc_test(
+ self.acquire_read(0, 10), self.acquire_read(5, 10),
+ self.timeout_write(5, 5))
+
+ def test_write_lock_timeout_with_multiple_readers_2_3_ranges(self):
+ self.multiproc_test(
+ self.acquire_read(0, 10), self.acquire_read(5, 15),
+ self.timeout_write(0, 1), self.timeout_write(11, 3),
+ self.timeout_write(7, 1))
+
+ def test_write_lock_timeout_with_multiple_readers_3_1_ranges(self):
+ self.multiproc_test(
+ self.acquire_read(0, 5), self.acquire_read(5, 5),
+ self.acquire_read(10, 5),
+ self.timeout_write(0, 15))
+
+ def test_write_lock_timeout_with_multiple_readers_3_2_ranges(self):
+ self.multiproc_test(
+ self.acquire_read(0, 5), self.acquire_read(5, 5),
+ self.acquire_read(10, 5),
+ self.timeout_write(3, 10), self.timeout_write(5, 1))
+ #
+ # Test that read can be upgraded to write.
+ #
+ def test_upgrade_read_to_write(self):
+ # ensure lock file exists the first time, so we open it read-only
+ # to begin wtih.
+ touch(self.lock_path)
+
+ lock = Lock(self.lock_path)
+ self.assertTrue(lock._reads == 0)
+ self.assertTrue(lock._writes == 0)
+
+ lock.acquire_read()
+ self.assertTrue(lock._reads == 1)
+ self.assertTrue(lock._writes == 0)
+ self.assertTrue(lock._file.mode == 'r+')
+
+ lock.acquire_write()
+ self.assertTrue(lock._reads == 1)
+ self.assertTrue(lock._writes == 1)
+ self.assertTrue(lock._file.mode == 'r+')
+
+ lock.release_write()
+ self.assertTrue(lock._reads == 1)
+ self.assertTrue(lock._writes == 0)
+ self.assertTrue(lock._file.mode == 'r+')
+
+ lock.release_read()
+ self.assertTrue(lock._reads == 0)
+ self.assertTrue(lock._writes == 0)
+ self.assertTrue(lock._file is None)
+
+ #
+ # Test that read-only file can be read-locked but not write-locked.
+ #
+ def test_upgrade_read_to_write_fails_with_readonly_file(self):
+ # ensure lock file exists the first time, so we open it read-only
+ # to begin wtih.
+ touch(self.lock_path)
+ os.chmod(self.lock_path, 0444)
+
+ lock = Lock(self.lock_path)
+ self.assertTrue(lock._reads == 0)
+ self.assertTrue(lock._writes == 0)
+
+ lock.acquire_read()
+ self.assertTrue(lock._reads == 1)
+ self.assertTrue(lock._writes == 0)
+ self.assertTrue(lock._file.mode == 'r')
+
+ self.assertRaises(LockError, lock.acquire_write)
#
# Longer test case that ensures locks are reusable. Ordering is
@@ -155,110 +305,283 @@ class LockTest(unittest.TestCase):
lock = Lock(self.lock_path)
lock.acquire_write()
- barrier.wait() # ---------------------------------------- 1
+ barrier.wait() # ---------------------------------------- 1
# others test timeout
- barrier.wait() # ---------------------------------------- 2
+ barrier.wait() # ---------------------------------------- 2
lock.release_write() # release and others acquire read
- barrier.wait() # ---------------------------------------- 3
+ barrier.wait() # ---------------------------------------- 3
self.assertRaises(LockError, lock.acquire_write, 0.1)
lock.acquire_read()
- barrier.wait() # ---------------------------------------- 4
+ barrier.wait() # ---------------------------------------- 4
lock.release_read()
- barrier.wait() # ---------------------------------------- 5
+ barrier.wait() # ---------------------------------------- 5
# p2 upgrades read to write
- barrier.wait() # ---------------------------------------- 6
+ barrier.wait() # ---------------------------------------- 6
self.assertRaises(LockError, lock.acquire_write, 0.1)
self.assertRaises(LockError, lock.acquire_read, 0.1)
- barrier.wait() # ---------------------------------------- 7
+ barrier.wait() # ---------------------------------------- 7
# p2 releases write and read
- barrier.wait() # ---------------------------------------- 8
+ barrier.wait() # ---------------------------------------- 8
# p3 acquires read
- barrier.wait() # ---------------------------------------- 9
+ barrier.wait() # ---------------------------------------- 9
# p3 upgrades read to write
- barrier.wait() # ---------------------------------------- 10
+ barrier.wait() # ---------------------------------------- 10
self.assertRaises(LockError, lock.acquire_write, 0.1)
self.assertRaises(LockError, lock.acquire_read, 0.1)
- barrier.wait() # ---------------------------------------- 11
+ barrier.wait() # ---------------------------------------- 11
# p3 releases locks
- barrier.wait() # ---------------------------------------- 12
+ barrier.wait() # ---------------------------------------- 12
lock.acquire_read()
- barrier.wait() # ---------------------------------------- 13
+ barrier.wait() # ---------------------------------------- 13
lock.release_read()
-
def p2(barrier):
lock = Lock(self.lock_path)
# p1 acquires write
- barrier.wait() # ---------------------------------------- 1
+ barrier.wait() # ---------------------------------------- 1
self.assertRaises(LockError, lock.acquire_write, 0.1)
self.assertRaises(LockError, lock.acquire_read, 0.1)
- barrier.wait() # ---------------------------------------- 2
+ barrier.wait() # ---------------------------------------- 2
lock.acquire_read()
- barrier.wait() # ---------------------------------------- 3
+ barrier.wait() # ---------------------------------------- 3
# p1 tests shared read
- barrier.wait() # ---------------------------------------- 4
+ barrier.wait() # ---------------------------------------- 4
# others release reads
- barrier.wait() # ---------------------------------------- 5
+ barrier.wait() # ---------------------------------------- 5
- lock.acquire_write() # upgrade read to write
- barrier.wait() # ---------------------------------------- 6
+ lock.acquire_write() # upgrade read to write
+ barrier.wait() # ---------------------------------------- 6
# others test timeout
- barrier.wait() # ---------------------------------------- 7
+ barrier.wait() # ---------------------------------------- 7
lock.release_write() # release read AND write (need both)
lock.release_read()
- barrier.wait() # ---------------------------------------- 8
+ barrier.wait() # ---------------------------------------- 8
# p3 acquires read
- barrier.wait() # ---------------------------------------- 9
+ barrier.wait() # ---------------------------------------- 9
# p3 upgrades read to write
- barrier.wait() # ---------------------------------------- 10
+ barrier.wait() # ---------------------------------------- 10
self.assertRaises(LockError, lock.acquire_write, 0.1)
self.assertRaises(LockError, lock.acquire_read, 0.1)
- barrier.wait() # ---------------------------------------- 11
+ barrier.wait() # ---------------------------------------- 11
# p3 releases locks
- barrier.wait() # ---------------------------------------- 12
+ barrier.wait() # ---------------------------------------- 12
lock.acquire_read()
- barrier.wait() # ---------------------------------------- 13
+ barrier.wait() # ---------------------------------------- 13
lock.release_read()
-
def p3(barrier):
lock = Lock(self.lock_path)
# p1 acquires write
- barrier.wait() # ---------------------------------------- 1
+ barrier.wait() # ---------------------------------------- 1
self.assertRaises(LockError, lock.acquire_write, 0.1)
self.assertRaises(LockError, lock.acquire_read, 0.1)
- barrier.wait() # ---------------------------------------- 2
+ barrier.wait() # ---------------------------------------- 2
lock.acquire_read()
- barrier.wait() # ---------------------------------------- 3
+ barrier.wait() # ---------------------------------------- 3
# p1 tests shared read
- barrier.wait() # ---------------------------------------- 4
+ barrier.wait() # ---------------------------------------- 4
lock.release_read()
- barrier.wait() # ---------------------------------------- 5
+ barrier.wait() # ---------------------------------------- 5
# p2 upgrades read to write
- barrier.wait() # ---------------------------------------- 6
+ barrier.wait() # ---------------------------------------- 6
self.assertRaises(LockError, lock.acquire_write, 0.1)
self.assertRaises(LockError, lock.acquire_read, 0.1)
- barrier.wait() # ---------------------------------------- 7
+ barrier.wait() # ---------------------------------------- 7
# p2 releases write & read
- barrier.wait() # ---------------------------------------- 8
+ barrier.wait() # ---------------------------------------- 8
lock.acquire_read()
- barrier.wait() # ---------------------------------------- 9
+ barrier.wait() # ---------------------------------------- 9
lock.acquire_write()
- barrier.wait() # ---------------------------------------- 10
+ barrier.wait() # ---------------------------------------- 10
# others test timeout
- barrier.wait() # ---------------------------------------- 11
+ barrier.wait() # ---------------------------------------- 11
lock.release_read() # release read AND write in opposite
lock.release_write() # order from before on p2
- barrier.wait() # ---------------------------------------- 12
+ barrier.wait() # ---------------------------------------- 12
lock.acquire_read()
- barrier.wait() # ---------------------------------------- 13
+ barrier.wait() # ---------------------------------------- 13
lock.release_read()
self.multiproc_test(p1, p2, p3)
+
+ def test_transaction(self):
+ def enter_fn():
+ vals['entered'] = True
+
+ def exit_fn(t, v, tb):
+ vals['exited'] = True
+ vals['exception'] = (t or v or tb)
+
+ lock = Lock(self.lock_path)
+ vals = {'entered': False, 'exited': False, 'exception': False}
+ with ReadTransaction(lock, enter_fn, exit_fn):
+ pass
+
+ self.assertTrue(vals['entered'])
+ self.assertTrue(vals['exited'])
+ self.assertFalse(vals['exception'])
+
+ vals = {'entered': False, 'exited': False, 'exception': False}
+ with WriteTransaction(lock, enter_fn, exit_fn):
+ pass
+
+ self.assertTrue(vals['entered'])
+ self.assertTrue(vals['exited'])
+ self.assertFalse(vals['exception'])
+
+ def test_transaction_with_exception(self):
+ def enter_fn():
+ vals['entered'] = True
+
+ def exit_fn(t, v, tb):
+ vals['exited'] = True
+ vals['exception'] = (t or v or tb)
+
+ lock = Lock(self.lock_path)
+
+ def do_read_with_exception():
+ with ReadTransaction(lock, enter_fn, exit_fn):
+ raise Exception()
+
+ def do_write_with_exception():
+ with WriteTransaction(lock, enter_fn, exit_fn):
+ raise Exception()
+
+ vals = {'entered': False, 'exited': False, 'exception': False}
+ self.assertRaises(Exception, do_read_with_exception)
+ self.assertTrue(vals['entered'])
+ self.assertTrue(vals['exited'])
+ self.assertTrue(vals['exception'])
+
+ vals = {'entered': False, 'exited': False, 'exception': False}
+ self.assertRaises(Exception, do_write_with_exception)
+ self.assertTrue(vals['entered'])
+ self.assertTrue(vals['exited'])
+ self.assertTrue(vals['exception'])
+
+ def test_transaction_with_context_manager(self):
+ class TestContextManager(object):
+
+ def __enter__(self):
+ vals['entered'] = True
+
+ def __exit__(self, t, v, tb):
+ vals['exited'] = True
+ vals['exception'] = (t or v or tb)
+
+ def exit_fn(t, v, tb):
+ vals['exited_fn'] = True
+ vals['exception_fn'] = (t or v or tb)
+
+ lock = Lock(self.lock_path)
+
+ vals = {'entered': False, 'exited': False, 'exited_fn': False,
+ 'exception': False, 'exception_fn': False}
+ with ReadTransaction(lock, TestContextManager, exit_fn):
+ pass
+
+ self.assertTrue(vals['entered'])
+ self.assertTrue(vals['exited'])
+ self.assertFalse(vals['exception'])
+ self.assertTrue(vals['exited_fn'])
+ self.assertFalse(vals['exception_fn'])
+
+ vals = {'entered': False, 'exited': False, 'exited_fn': False,
+ 'exception': False, 'exception_fn': False}
+ with ReadTransaction(lock, TestContextManager):
+ pass
+
+ self.assertTrue(vals['entered'])
+ self.assertTrue(vals['exited'])
+ self.assertFalse(vals['exception'])
+ self.assertFalse(vals['exited_fn'])
+ self.assertFalse(vals['exception_fn'])
+
+ vals = {'entered': False, 'exited': False, 'exited_fn': False,
+ 'exception': False, 'exception_fn': False}
+ with WriteTransaction(lock, TestContextManager, exit_fn):
+ pass
+
+ self.assertTrue(vals['entered'])
+ self.assertTrue(vals['exited'])
+ self.assertFalse(vals['exception'])
+ self.assertTrue(vals['exited_fn'])
+ self.assertFalse(vals['exception_fn'])
+
+ vals = {'entered': False, 'exited': False, 'exited_fn': False,
+ 'exception': False, 'exception_fn': False}
+ with WriteTransaction(lock, TestContextManager):
+ pass
+
+ self.assertTrue(vals['entered'])
+ self.assertTrue(vals['exited'])
+ self.assertFalse(vals['exception'])
+ self.assertFalse(vals['exited_fn'])
+ self.assertFalse(vals['exception_fn'])
+
+ def test_transaction_with_context_manager_and_exception(self):
+ class TestContextManager(object):
+
+ def __enter__(self):
+ vals['entered'] = True
+
+ def __exit__(self, t, v, tb):
+ vals['exited'] = True
+ vals['exception'] = (t or v or tb)
+
+ def exit_fn(t, v, tb):
+ vals['exited_fn'] = True
+ vals['exception_fn'] = (t or v or tb)
+
+ lock = Lock(self.lock_path)
+
+ def do_read_with_exception(exit_fn):
+ with ReadTransaction(lock, TestContextManager, exit_fn):
+ raise Exception()
+
+ def do_write_with_exception(exit_fn):
+ with WriteTransaction(lock, TestContextManager, exit_fn):
+ raise Exception()
+
+ vals = {'entered': False, 'exited': False, 'exited_fn': False,
+ 'exception': False, 'exception_fn': False}
+ self.assertRaises(Exception, do_read_with_exception, exit_fn)
+ self.assertTrue(vals['entered'])
+ self.assertTrue(vals['exited'])
+ self.assertTrue(vals['exception'])
+ self.assertTrue(vals['exited_fn'])
+ self.assertTrue(vals['exception_fn'])
+
+ vals = {'entered': False, 'exited': False, 'exited_fn': False,
+ 'exception': False, 'exception_fn': False}
+ self.assertRaises(Exception, do_read_with_exception, None)
+ self.assertTrue(vals['entered'])
+ self.assertTrue(vals['exited'])
+ self.assertTrue(vals['exception'])
+ self.assertFalse(vals['exited_fn'])
+ self.assertFalse(vals['exception_fn'])
+
+ vals = {'entered': False, 'exited': False, 'exited_fn': False,
+ 'exception': False, 'exception_fn': False}
+ self.assertRaises(Exception, do_write_with_exception, exit_fn)
+ self.assertTrue(vals['entered'])
+ self.assertTrue(vals['exited'])
+ self.assertTrue(vals['exception'])
+ self.assertTrue(vals['exited_fn'])
+ self.assertTrue(vals['exception_fn'])
+
+ vals = {'entered': False, 'exited': False, 'exited_fn': False,
+ 'exception': False, 'exception_fn': False}
+ self.assertRaises(Exception, do_write_with_exception, None)
+ self.assertTrue(vals['entered'])
+ self.assertTrue(vals['exited'])
+ self.assertTrue(vals['exception'])
+ self.assertFalse(vals['exited_fn'])
+ self.assertFalse(vals['exception_fn'])
diff --git a/lib/spack/spack/test/make_executable.py b/lib/spack/spack/test/make_executable.py
index b7a45a3f72..87a43a529a 100644
--- a/lib/spack/spack/test/make_executable.py
+++ b/lib/spack/spack/test/make_executable.py
@@ -38,6 +38,7 @@ from spack.util.environment import path_put_first
class MakeExecutableTest(unittest.TestCase):
+
def setUp(self):
self.tmpdir = tempfile.mkdtemp()
@@ -49,34 +50,30 @@ class MakeExecutableTest(unittest.TestCase):
path_put_first('PATH', [self.tmpdir])
-
def tearDown(self):
shutil.rmtree(self.tmpdir)
-
def test_make_normal(self):
make = MakeExecutable('make', 8)
self.assertEqual(make(output=str).strip(), '-j8')
self.assertEqual(make('install', output=str).strip(), '-j8 install')
-
def test_make_explicit(self):
make = MakeExecutable('make', 8)
self.assertEqual(make(parallel=True, output=str).strip(), '-j8')
- self.assertEqual(make('install', parallel=True, output=str).strip(), '-j8 install')
-
+ self.assertEqual(make('install', parallel=True,
+ output=str).strip(), '-j8 install')
def test_make_one_job(self):
make = MakeExecutable('make', 1)
self.assertEqual(make(output=str).strip(), '')
self.assertEqual(make('install', output=str).strip(), 'install')
-
def test_make_parallel_false(self):
make = MakeExecutable('make', 8)
self.assertEqual(make(parallel=False, output=str).strip(), '')
- self.assertEqual(make('install', parallel=False, output=str).strip(), 'install')
-
+ self.assertEqual(make('install', parallel=False,
+ output=str).strip(), 'install')
def test_make_parallel_disabled(self):
make = MakeExecutable('make', 8)
@@ -100,26 +97,29 @@ class MakeExecutableTest(unittest.TestCase):
del os.environ['SPACK_NO_PARALLEL_MAKE']
-
def test_make_parallel_precedence(self):
make = MakeExecutable('make', 8)
# These should work
os.environ['SPACK_NO_PARALLEL_MAKE'] = 'true'
self.assertEqual(make(parallel=True, output=str).strip(), '')
- self.assertEqual(make('install', parallel=True, output=str).strip(), 'install')
+ self.assertEqual(make('install', parallel=True,
+ output=str).strip(), 'install')
os.environ['SPACK_NO_PARALLEL_MAKE'] = '1'
self.assertEqual(make(parallel=True, output=str).strip(), '')
- self.assertEqual(make('install', parallel=True, output=str).strip(), 'install')
+ self.assertEqual(make('install', parallel=True,
+ output=str).strip(), 'install')
# These don't disable (false and random string)
os.environ['SPACK_NO_PARALLEL_MAKE'] = 'false'
self.assertEqual(make(parallel=True, output=str).strip(), '-j8')
- self.assertEqual(make('install', parallel=True, output=str).strip(), '-j8 install')
+ self.assertEqual(make('install', parallel=True,
+ output=str).strip(), '-j8 install')
os.environ['SPACK_NO_PARALLEL_MAKE'] = 'foobar'
self.assertEqual(make(parallel=True, output=str).strip(), '-j8')
- self.assertEqual(make('install', parallel=True, output=str).strip(), '-j8 install')
+ self.assertEqual(make('install', parallel=True,
+ output=str).strip(), '-j8 install')
del os.environ['SPACK_NO_PARALLEL_MAKE']
diff --git a/lib/spack/spack/test/mirror.py b/lib/spack/spack/test/mirror.py
index b682d4e097..e5e60e3045 100644
--- a/lib/spack/spack/test/mirror.py
+++ b/lib/spack/spack/test/mirror.py
@@ -22,123 +22,127 @@
# License along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
+import filecmp
import os
+import pytest
+
import spack
import spack.mirror
-
-from filecmp import dircmp
-from spack.test.mock_packages_test import *
-from spack.test.mock_repo import *
+import spack.util.executable
+from llnl.util.filesystem import join_path
+from spack.spec import Spec
+from spack.stage import Stage
# paths in repos that shouldn't be in the mirror tarballs.
exclude = ['.hg', '.git', '.svn']
-
-class MirrorTest(MockPackagesTest):
- def setUp(self):
- """Sets up a mock package and a mock repo for each fetch strategy, to
- ensure that the mirror can create archives for each of them.
- """
- super(MirrorTest, self).setUp()
- self.repos = {}
-
-
- def tearDown(self):
- """Destroy all the stages created by the repos in setup."""
- super(MirrorTest, self).tearDown()
- for repo in self.repos.values():
- repo.destroy()
- self.repos.clear()
-
-
- def set_up_package(self, name, MockRepoClass, url_attr):
- """Set up a mock package to be mirrored.
- Each package needs us to:
- 1. Set up a mock repo/archive to fetch from.
- 2. Point the package's version args at that repo.
- """
- # Set up packages to point at mock repos.
- spec = Spec(name)
- spec.concretize()
-
- # Get the package and fix its fetch args to point to a mock repo
- pkg = spack.repo.get(spec)
- repo = MockRepoClass()
- self.repos[name] = repo
-
- # change the fetch args of the first (only) version.
- assert(len(pkg.versions) == 1)
- v = next(iter(pkg.versions))
- pkg.versions[v][url_attr] = repo.url
-
-
- def check_mirror(self):
- with Stage('spack-mirror-test') as stage:
- mirror_root = join_path(stage.path, 'test-mirror')
-
- # register mirror with spack config
- mirrors = { 'spack-mirror-test' : 'file://' + mirror_root }
- spack.config.update_config('mirrors', mirrors)
-
-
- os.chdir(stage.path)
- spack.mirror.create(
- mirror_root, self.repos, no_checksum=True)
-
- # Stage directory exists
- self.assertTrue(os.path.isdir(mirror_root))
-
- # check that there are subdirs for each package
- for name in self.repos:
- subdir = join_path(mirror_root, name)
- self.assertTrue(os.path.isdir(subdir))
-
- files = os.listdir(subdir)
- self.assertEqual(len(files), 1)
-
- # Now try to fetch each package.
- for name, mock_repo in self.repos.items():
- spec = Spec(name).concretized()
- pkg = spec.package
-
- saved_checksum_setting = spack.do_checksum
- with pkg.stage:
- # Stage the archive from the mirror and cd to it.
- spack.do_checksum = False
- pkg.do_stage(mirror_only=True)
- # Compare the original repo with the expanded archive
- original_path = mock_repo.path
- if 'svn' in name:
- # have to check out the svn repo to compare.
- original_path = join_path(mock_repo.path, 'checked_out')
- svn('checkout', mock_repo.url, original_path)
- dcmp = dircmp(original_path, pkg.stage.source_path)
- # make sure there are no new files in the expanded tarball
- self.assertFalse(dcmp.right_only)
- # and that all original files are present.
- self.assertTrue(all(l in exclude for l in dcmp.left_only))
- spack.do_checksum = saved_checksum_setting
-
-
- def test_git_mirror(self):
- self.set_up_package('git-test', MockGitRepo, 'git')
- self.check_mirror()
-
- def test_svn_mirror(self):
- self.set_up_package('svn-test', MockSvnRepo, 'svn')
- self.check_mirror()
-
- def test_hg_mirror(self):
- self.set_up_package('hg-test', MockHgRepo, 'hg')
- self.check_mirror()
-
- def test_url_mirror(self):
- self.set_up_package('trivial_install_test_package', MockArchive, 'url')
- self.check_mirror()
-
- def test_all_mirror(self):
- self.set_up_package('git-test', MockGitRepo, 'git')
- self.set_up_package('svn-test', MockSvnRepo, 'svn')
- self.set_up_package('hg-test', MockHgRepo, 'hg')
- self.set_up_package('trivial_install_test_package', MockArchive, 'url')
- self.check_mirror()
+repos = {}
+svn = spack.util.executable.which('svn', required=True)
+
+
+def set_up_package(name, repository, url_attr):
+ """Set up a mock package to be mirrored.
+ Each package needs us to:
+
+ 1. Set up a mock repo/archive to fetch from.
+ 2. Point the package's version args at that repo.
+ """
+ # Set up packages to point at mock repos.
+ spec = Spec(name)
+ spec.concretize()
+ # Get the package and fix its fetch args to point to a mock repo
+ pkg = spack.repo.get(spec)
+
+ repos[name] = repository
+
+ # change the fetch args of the first (only) version.
+ assert len(pkg.versions) == 1
+ v = next(iter(pkg.versions))
+
+ pkg.versions[v][url_attr] = repository.url
+
+
+def check_mirror():
+ with Stage('spack-mirror-test') as stage:
+ mirror_root = join_path(stage.path, 'test-mirror')
+ # register mirror with spack config
+ mirrors = {'spack-mirror-test': 'file://' + mirror_root}
+ spack.config.update_config('mirrors', mirrors)
+
+ os.chdir(stage.path)
+ spack.mirror.create(
+ mirror_root, repos, no_checksum=True
+ )
+
+ # Stage directory exists
+ assert os.path.isdir(mirror_root)
+
+ # check that there are subdirs for each package
+ for name in repos:
+ subdir = join_path(mirror_root, name)
+ assert os.path.isdir(subdir)
+
+ files = os.listdir(subdir)
+ assert len(files) == 1
+
+ # Now try to fetch each package.
+ for name, mock_repo in repos.items():
+ spec = Spec(name).concretized()
+ pkg = spec.package
+
+ saved_checksum_setting = spack.do_checksum
+ with pkg.stage:
+ # Stage the archive from the mirror and cd to it.
+ spack.do_checksum = False
+ pkg.do_stage(mirror_only=True)
+ # Compare the original repo with the expanded archive
+ original_path = mock_repo.path
+ if 'svn' in name:
+ # have to check out the svn repo to compare.
+ original_path = join_path(
+ mock_repo.path, 'checked_out')
+ svn('checkout', mock_repo.url, original_path)
+ dcmp = filecmp.dircmp(original_path, pkg.stage.source_path)
+ # make sure there are no new files in the expanded
+ # tarball
+ assert not dcmp.right_only
+ # and that all original files are present.
+ assert all(l in exclude for l in dcmp.left_only)
+ spack.do_checksum = saved_checksum_setting
+
+
+@pytest.mark.usefixtures('config', 'refresh_builtin_mock')
+class TestMirror(object):
+ def test_git_mirror(self, mock_git_repository):
+ set_up_package('git-test', mock_git_repository, 'git')
+ check_mirror()
+ repos.clear()
+
+ def test_svn_mirror(self, mock_svn_repository):
+ set_up_package('svn-test', mock_svn_repository, 'svn')
+ check_mirror()
+ repos.clear()
+
+ def test_hg_mirror(self, mock_hg_repository):
+ set_up_package('hg-test', mock_hg_repository, 'hg')
+ check_mirror()
+ repos.clear()
+
+ def test_url_mirror(self, mock_archive):
+ set_up_package('trivial-install-test-package', mock_archive, 'url')
+ check_mirror()
+ repos.clear()
+
+ def test_all_mirror(
+ self,
+ mock_git_repository,
+ mock_svn_repository,
+ mock_hg_repository,
+ mock_archive,
+ ):
+ set_up_package('git-test', mock_git_repository, 'git')
+ set_up_package('svn-test', mock_svn_repository, 'svn')
+ set_up_package('hg-test', mock_hg_repository, 'hg')
+ set_up_package('trivial-install-test-package', mock_archive, 'url')
+ check_mirror()
+ repos.clear()
diff --git a/lib/spack/spack/test/mock_database.py b/lib/spack/spack/test/mock_database.py
deleted file mode 100644
index b1194f2451..0000000000
--- a/lib/spack/spack/test/mock_database.py
+++ /dev/null
@@ -1,104 +0,0 @@
-##############################################################################
-# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
-# Produced at the Lawrence Livermore National Laboratory.
-#
-# This file is part of Spack.
-# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
-# LLNL-CODE-647188
-#
-# For details, see https://github.com/llnl/spack
-# Please also see the LICENSE file for our notice and the LGPL.
-#
-# This program is free software; you can redistribute it and/or modify
-# it under the terms of the GNU Lesser General Public License (as
-# published by the Free Software Foundation) version 2.1, February 1999.
-#
-# This program is distributed in the hope that it will be useful, but
-# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
-# conditions of the GNU Lesser General Public License for more details.
-#
-# You should have received a copy of the GNU Lesser General Public
-# License along with this program; if not, write to the Free Software
-# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
-##############################################################################
-import shutil
-import tempfile
-
-import spack
-from spack.spec import Spec
-from spack.database import Database
-from spack.directory_layout import YamlDirectoryLayout
-from spack.test.mock_packages_test import MockPackagesTest
-
-
-class MockDatabase(MockPackagesTest):
- def _mock_install(self, spec):
- s = Spec(spec)
- s.concretize()
- pkg = spack.repo.get(s)
- pkg.do_install(fake=True)
-
- def _mock_remove(self, spec):
- specs = spack.installed_db.query(spec)
- assert len(specs) == 1
- spec = specs[0]
- spec.package.do_uninstall(spec)
-
- def setUp(self):
- super(MockDatabase, self).setUp()
- #
- # TODO: make the mockup below easier.
- #
-
- # Make a fake install directory
- self.install_path = tempfile.mkdtemp()
- self.spack_install_path = spack.install_path
- spack.install_path = self.install_path
-
- self.install_layout = YamlDirectoryLayout(self.install_path)
- self.spack_install_layout = spack.install_layout
- spack.install_layout = self.install_layout
-
- # Make fake database and fake install directory.
- self.installed_db = Database(self.install_path)
- self.spack_installed_db = spack.installed_db
- spack.installed_db = self.installed_db
-
- # make a mock database with some packages installed note that
- # the ref count for dyninst here will be 3, as it's recycled
- # across each install.
- #
- # Here is what the mock DB looks like:
- #
- # o mpileaks o mpileaks' o mpileaks''
- # |\ |\ |\
- # | o callpath | o callpath' | o callpath''
- # |/| |/| |/|
- # o | mpich o | mpich2 o | zmpi
- # | | o | fake
- # | | |
- # | |______________/
- # | .____________/
- # |/
- # o dyninst
- # |\
- # | o libdwarf
- # |/
- # o libelf
- #
-
- # Transaction used to avoid repeated writes.
- with spack.installed_db.write_transaction():
- self._mock_install('mpileaks ^mpich')
- self._mock_install('mpileaks ^mpich2')
- self._mock_install('mpileaks ^zmpi')
-
- def tearDown(self):
- for spec in spack.installed_db.query():
- spec.package.do_uninstall(spec)
- super(MockDatabase, self).tearDown()
- shutil.rmtree(self.install_path)
- spack.install_path = self.spack_install_path
- spack.install_layout = self.spack_install_layout
- spack.installed_db = self.spack_installed_db
diff --git a/lib/spack/spack/test/mock_packages_test.py b/lib/spack/spack/test/mock_packages_test.py
deleted file mode 100644
index 595667bf35..0000000000
--- a/lib/spack/spack/test/mock_packages_test.py
+++ /dev/null
@@ -1,133 +0,0 @@
-##############################################################################
-# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
-# Produced at the Lawrence Livermore National Laboratory.
-#
-# This file is part of Spack.
-# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
-# LLNL-CODE-647188
-#
-# For details, see https://github.com/llnl/spack
-# Please also see the LICENSE file for our notice and the LGPL.
-#
-# This program is free software; you can redistribute it and/or modify
-# it under the terms of the GNU Lesser General Public License (as
-# published by the Free Software Foundation) version 2.1, February 1999.
-#
-# This program is distributed in the hope that it will be useful, but
-# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
-# conditions of the GNU Lesser General Public License for more details.
-#
-# You should have received a copy of the GNU Lesser General Public
-# License along with this program; if not, write to the Free Software
-# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
-##############################################################################
-import os
-import shutil
-import tempfile
-import unittest
-
-import spack
-import spack.config
-from llnl.util.filesystem import mkdirp
-from ordereddict_backport import OrderedDict
-from spack.repository import RepoPath
-from spack.spec import Spec
-
-mock_compiler_config = """\
-compilers:
- all:
- clang@3.3:
- cc: /path/to/clang
- cxx: /path/to/clang++
- f77: None
- fc: None
- gcc@4.5.0:
- cc: /path/to/gcc
- cxx: /path/to/g++
- f77: /path/to/gfortran
- fc: /path/to/gfortran
-"""
-
-mock_packages_config = """\
-packages:
- externaltool:
- buildable: False
- paths:
- externaltool@1.0%gcc@4.5.0: /path/to/external_tool
- externalvirtual:
- buildable: False
- paths:
- externalvirtual@2.0%clang@3.3: /path/to/external_virtual_clang
- externalvirtual@1.0%gcc@4.5.0: /path/to/external_virtual_gcc
-"""
-
-class MockPackagesTest(unittest.TestCase):
- def initmock(self):
- # Use the mock packages database for these tests. This allows
- # us to set up contrived packages that don't interfere with
- # real ones.
- self.db = RepoPath(spack.mock_packages_path)
- spack.repo.swap(self.db)
-
- spack.config.clear_config_caches()
- self.real_scopes = spack.config.config_scopes
-
- # Mock up temporary configuration directories
- self.temp_config = tempfile.mkdtemp()
- self.mock_site_config = os.path.join(self.temp_config, 'site')
- self.mock_user_config = os.path.join(self.temp_config, 'user')
- mkdirp(self.mock_site_config)
- mkdirp(self.mock_user_config)
- for confs in [('compilers.yaml', mock_compiler_config), ('packages.yaml', mock_packages_config)]:
- conf_yaml = os.path.join(self.mock_site_config, confs[0])
- with open(conf_yaml, 'w') as f:
- f.write(confs[1])
-
- # TODO: Mocking this up is kind of brittle b/c ConfigScope
- # TODO: constructor modifies config_scopes. Make it cleaner.
- spack.config.config_scopes = OrderedDict()
- spack.config.ConfigScope('site', self.mock_site_config)
- spack.config.ConfigScope('user', self.mock_user_config)
-
- # Store changes to the package's dependencies so we can
- # restore later.
- self.saved_deps = {}
-
-
- def set_pkg_dep(self, pkg_name, spec):
- """Alters dependence information for a package.
-
- Adds a dependency on <spec> to pkg.
- Use this to mock up constraints.
- """
- spec = Spec(spec)
-
- # Save original dependencies before making any changes.
- pkg = spack.repo.get(pkg_name)
- if pkg_name not in self.saved_deps:
- self.saved_deps[pkg_name] = (pkg, pkg.dependencies.copy())
-
- # Change dep spec
- pkg.dependencies[spec.name] = { Spec(pkg_name) : spec }
-
-
- def cleanmock(self):
- """Restore the real packages path after any test."""
- spack.repo.swap(self.db)
- spack.config.config_scopes = self.real_scopes
- shutil.rmtree(self.temp_config, ignore_errors=True)
- spack.config.clear_config_caches()
-
- # Restore dependency changes that happened during the test
- for pkg_name, (pkg, deps) in self.saved_deps.items():
- pkg.dependencies.clear()
- pkg.dependencies.update(deps)
-
-
- def setUp(self):
- self.initmock()
-
-
- def tearDown(self):
- self.cleanmock()
diff --git a/lib/spack/spack/test/mock_repo.py b/lib/spack/spack/test/mock_repo.py
deleted file mode 100644
index a8098b8eec..0000000000
--- a/lib/spack/spack/test/mock_repo.py
+++ /dev/null
@@ -1,198 +0,0 @@
-##############################################################################
-# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
-# Produced at the Lawrence Livermore National Laboratory.
-#
-# This file is part of Spack.
-# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
-# LLNL-CODE-647188
-#
-# For details, see https://github.com/llnl/spack
-# Please also see the LICENSE file for our notice and the LGPL.
-#
-# This program is free software; you can redistribute it and/or modify
-# it under the terms of the GNU Lesser General Public License (as
-# published by the Free Software Foundation) version 2.1, February 1999.
-#
-# This program is distributed in the hope that it will be useful, but
-# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
-# conditions of the GNU Lesser General Public License for more details.
-#
-# You should have received a copy of the GNU Lesser General Public
-# License along with this program; if not, write to the Free Software
-# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
-##############################################################################
-import os
-import shutil
-
-from llnl.util.filesystem import *
-from spack.stage import Stage
-from spack.util.executable import which
-
-#
-# VCS Systems used by mock repo code.
-#
-git = which('git', required=True)
-svn = which('svn', required=True)
-svnadmin = which('svnadmin', required=True)
-hg = which('hg', required=True)
-tar = which('tar', required=True)
-
-
-class MockRepo(object):
- def __init__(self, stage_name, repo_name):
- """This creates a stage where some archive/repo files can be staged
- for testing spack's fetch strategies."""
- # Stage where this repo has been created
- self.stage = Stage(stage_name)
-
- # Full path to the repo within the stage.
- self.path = join_path(self.stage.path, repo_name)
- mkdirp(self.path)
-
-
- def destroy(self):
- """Destroy resources associated with this mock repo."""
- if self.stage:
- self.stage.destroy()
-
-
-class MockArchive(MockRepo):
- """Creates a very simple archive directory with a configure script and a
- makefile that installs to a prefix. Tars it up into an archive."""
-
- def __init__(self):
- repo_name = 'mock-archive-repo'
- super(MockArchive, self).__init__('mock-archive-stage', repo_name)
-
- with working_dir(self.path):
- configure = join_path(self.path, 'configure')
-
- with open(configure, 'w') as cfg_file:
- cfg_file.write(
- "#!/bin/sh\n"
- "prefix=$(echo $1 | sed 's/--prefix=//')\n"
- "cat > Makefile <<EOF\n"
- "all:\n"
- "\techo Building...\n\n"
- "install:\n"
- "\tmkdir -p $prefix\n"
- "\ttouch $prefix/dummy_file\n"
- "EOF\n")
- os.chmod(configure, 0755)
-
- with working_dir(self.stage.path):
- archive_name = "%s.tar.gz" % repo_name
- tar('-czf', archive_name, repo_name)
-
- self.archive_path = join_path(self.stage.path, archive_name)
- self.url = 'file://' + self.archive_path
-
-
-class MockVCSRepo(MockRepo):
- def __init__(self, stage_name, repo_name):
- """This creates a stage and a repo directory within the stage."""
- super(MockVCSRepo, self).__init__(stage_name, repo_name)
-
- # Name for rev0 & rev1 files in the repo to be
- self.r0_file = 'r0_file'
- self.r1_file = 'r1_file'
-
-
-class MockGitRepo(MockVCSRepo):
- def __init__(self):
- super(MockGitRepo, self).__init__('mock-git-stage', 'mock-git-repo')
-
- with working_dir(self.path):
- git('init')
-
- # r0 is just the first commit
- touch(self.r0_file)
- git('add', self.r0_file)
- git('commit', '-m', 'mock-git-repo r0')
-
- self.branch = 'test-branch'
- self.branch_file = 'branch_file'
- git('branch', self.branch)
-
- self.tag_branch = 'tag-branch'
- self.tag_file = 'tag_file'
- git('branch', self.tag_branch)
-
- # Check out first branch
- git('checkout', self.branch)
- touch(self.branch_file)
- git('add', self.branch_file)
- git('commit', '-m' 'r1 test branch')
-
- # Check out a second branch and tag it
- git('checkout', self.tag_branch)
- touch(self.tag_file)
- git('add', self.tag_file)
- git('commit', '-m' 'tag test branch')
-
- self.tag = 'test-tag'
- git('tag', self.tag)
-
- git('checkout', 'master')
-
- # R1 test is the same as test for branch
- self.r1 = self.rev_hash(self.branch)
- self.r1_file = self.branch_file
-
- self.url = self.path
-
- def rev_hash(self, rev):
- return git('rev-parse', rev, output=str).strip()
-
-
-class MockSvnRepo(MockVCSRepo):
- def __init__(self):
- super(MockSvnRepo, self).__init__('mock-svn-stage', 'mock-svn-repo')
-
- self.url = 'file://' + self.path
-
- with working_dir(self.stage.path):
- svnadmin('create', self.path)
-
- tmp_path = join_path(self.stage.path, 'tmp-path')
- mkdirp(tmp_path)
- with working_dir(tmp_path):
- touch(self.r0_file)
-
- svn('import', tmp_path, self.url, '-m', 'Initial import r0')
-
- shutil.rmtree(tmp_path)
- svn('checkout', self.url, tmp_path)
- with working_dir(tmp_path):
- touch(self.r1_file)
- svn('add', self.r1_file)
- svn('ci', '-m', 'second revision r1')
-
- shutil.rmtree(tmp_path)
-
- self.r0 = '1'
- self.r1 = '2'
-
-
-class MockHgRepo(MockVCSRepo):
- def __init__(self):
- super(MockHgRepo, self).__init__('mock-hg-stage', 'mock-hg-repo')
- self.url = 'file://' + self.path
-
- with working_dir(self.path):
- hg('init')
-
- touch(self.r0_file)
- hg('add', self.r0_file)
- hg('commit', '-m', 'revision 0', '-u', 'test')
- self.r0 = self.get_rev()
-
- touch(self.r1_file)
- hg('add', self.r1_file)
- hg('commit', '-m' 'revision 1', '-u', 'test')
- self.r1 = self.get_rev()
-
- def get_rev(self):
- """Get current mercurial revision."""
- return hg('id', '-i', output=str).strip()
diff --git a/lib/spack/spack/test/modules.py b/lib/spack/spack/test/modules.py
index 56e294de26..4f35df1982 100644
--- a/lib/spack/spack/test/modules.py
+++ b/lib/spack/spack/test/modules.py
@@ -23,159 +23,476 @@
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
import collections
-from contextlib import contextmanager
+import contextlib
-import StringIO
+import cStringIO
+import pytest
import spack.modules
-from spack.test.mock_packages_test import MockPackagesTest
+import spack.spec
-FILE_REGISTRY = collections.defaultdict(StringIO.StringIO)
+# Our "filesystem" for the tests below
+FILE_REGISTRY = collections.defaultdict(cStringIO.StringIO)
+# Spec strings that will be used throughout the tests
+mpich_spec_string = 'mpich@3.0.4'
+mpileaks_spec_string = 'mpileaks'
+libdwarf_spec_string = 'libdwarf arch=x64-linux'
-# Monkey-patch open to write module files to a StringIO instance
-@contextmanager
-def mock_open(filename, mode):
- if not mode == 'w':
- raise RuntimeError(
- 'test.modules : unexpected opening mode for monkey-patched open')
+@pytest.fixture()
+def stringio_open(monkeypatch):
+ """Overrides the `open` builtin in spack.modules with an implementation
+ that writes on a StringIO instance.
+ """
+ @contextlib.contextmanager
+ def _mock(filename, mode):
+ if not mode == 'w':
+ raise RuntimeError('unexpected opening mode for stringio_open')
- FILE_REGISTRY[filename] = StringIO.StringIO()
+ FILE_REGISTRY[filename] = cStringIO.StringIO()
- try:
- yield FILE_REGISTRY[filename]
- finally:
- handle = FILE_REGISTRY[filename]
- FILE_REGISTRY[filename] = handle.getvalue()
- handle.close()
+ try:
+ yield FILE_REGISTRY[filename]
+ finally:
+ handle = FILE_REGISTRY[filename]
+ FILE_REGISTRY[filename] = handle.getvalue()
+ handle.close()
+ monkeypatch.setattr(spack.modules, 'open', _mock, raising=False)
-configuration_autoload_direct = {
- 'enable': ['tcl'],
- 'tcl': {
- 'all': {
- 'autoload': 'direct'
+
+def get_modulefile_content(factory, spec):
+ """Writes the module file and returns the content as a string.
+
+ :param factory: module file factory
+ :param spec: spec of the module file to be written
+ :return: content of the module file
+ :rtype: str
+ """
+ spec.concretize()
+ generator = factory(spec)
+ generator.write()
+ content = FILE_REGISTRY[generator.file_name].split('\n')
+ generator.remove()
+ return content
+
+
+def test_update_dictionary_extending_list():
+ target = {
+ 'foo': {
+ 'a': 1,
+ 'b': 2,
+ 'd': 4
+ },
+ 'bar': [1, 2, 4],
+ 'baz': 'foobar'
+ }
+ update = {
+ 'foo': {
+ 'c': 3,
+ },
+ 'bar': [3],
+ 'baz': 'foobaz',
+ 'newkey': {
+ 'd': 4
}
}
-}
+ spack.modules.update_dictionary_extending_lists(target, update)
+ assert len(target) == 4
+ assert len(target['foo']) == 4
+ assert len(target['bar']) == 4
+ assert target['baz'] == 'foobaz'
+
+
+def test_inspect_path():
+ env = spack.modules.inspect_path('/usr')
+ names = [item.name for item in env]
+ assert 'PATH' in names
+ assert 'LIBRARY_PATH' in names
+ assert 'LD_LIBRARY_PATH' in names
+ assert 'CPATH' in names
+
-configuration_autoload_all = {
- 'enable': ['tcl'],
- 'tcl': {
- 'all': {
- 'autoload': 'all'
+@pytest.fixture()
+def tcl_factory(tmpdir, monkeypatch):
+ """Returns a factory that writes non-hierarchical TCL module files."""
+ factory = spack.modules.TclModule
+ monkeypatch.setattr(factory, 'path', str(tmpdir))
+ monkeypatch.setattr(spack.modules, 'module_types', {factory.name: factory})
+ return factory
+
+
+@pytest.fixture()
+def lmod_factory(tmpdir, monkeypatch):
+ """Returns a factory that writes hierarchical LUA module files."""
+ factory = spack.modules.LmodModule
+ monkeypatch.setattr(factory, 'path', str(tmpdir))
+ monkeypatch.setattr(spack.modules, 'module_types', {factory.name: factory})
+ return factory
+
+
+@pytest.fixture()
+def dotkit_factory(tmpdir, monkeypatch):
+ """Returns a factory that writes DotKit module files."""
+ factory = spack.modules.Dotkit
+ monkeypatch.setattr(factory, 'path', str(tmpdir))
+ monkeypatch.setattr(spack.modules, 'module_types', {factory.name: factory})
+ return factory
+
+
+@pytest.mark.usefixtures('config', 'builtin_mock', 'stringio_open')
+class TestTcl(object):
+
+ configuration_autoload_direct = {
+ 'enable': ['tcl'],
+ 'tcl': {
+ 'all': {
+ 'autoload': 'direct'
+ }
}
}
-}
-configuration_alter_environment = {
- 'enable': ['tcl'],
- 'tcl': {
- 'all': {
- 'filter': {'environment_blacklist': ['CMAKE_PREFIX_PATH']}
- },
- '=x86-linux': {
- 'environment': {'set': {'FOO': 'foo'},
- 'unset': ['BAR']}
+ configuration_autoload_all = {
+ 'enable': ['tcl'],
+ 'tcl': {
+ 'all': {
+ 'autoload': 'all'
+ }
}
}
-}
-configuration_blacklist = {
- 'enable': ['tcl'],
- 'tcl': {
- 'blacklist': ['callpath'],
- 'all': {
- 'autoload': 'direct'
+ configuration_prerequisites_direct = {
+ 'enable': ['tcl'],
+ 'tcl': {
+ 'all': {
+ 'prerequisites': 'direct'
+ }
}
}
-}
-configuration_conflicts = {
- 'enable': ['tcl'],
- 'tcl': {
- 'naming_scheme': '{name}/{version}-{compiler.name}',
- 'all': {
- 'conflict': ['{name}', 'intel/14.0.1']
+ configuration_prerequisites_all = {
+ 'enable': ['tcl'],
+ 'tcl': {
+ 'all': {
+ 'prerequisites': 'all'
+ }
}
}
-}
+ configuration_alter_environment = {
+ 'enable': ['tcl'],
+ 'tcl': {
+ 'all': {
+ 'filter': {'environment_blacklist': ['CMAKE_PREFIX_PATH']},
+ 'environment': {
+ 'set': {'${PACKAGE}_ROOT': '${PREFIX}'}
+ }
+ },
+ 'platform=test target=x86_64': {
+ 'environment': {
+ 'set': {'FOO': 'foo'},
+ 'unset': ['BAR']
+ }
+ },
+ 'platform=test target=x86_32': {
+ 'load': ['foo/bar']
+ }
+ }
+ }
+
+ configuration_blacklist = {
+ 'enable': ['tcl'],
+ 'tcl': {
+ 'whitelist': ['zmpi'],
+ 'blacklist': ['callpath', 'mpi'],
+ 'all': {
+ 'autoload': 'direct'
+ }
+ }
+ }
+
+ configuration_conflicts = {
+ 'enable': ['tcl'],
+ 'tcl': {
+ 'naming_scheme': '${PACKAGE}/${VERSION}-${COMPILERNAME}',
+ 'all': {
+ 'conflict': ['${PACKAGE}', 'intel/14.0.1']
+ }
+ }
+ }
+
+ configuration_wrong_conflicts = {
+ 'enable': ['tcl'],
+ 'tcl': {
+ 'naming_scheme': '${PACKAGE}/${VERSION}-${COMPILERNAME}',
+ 'all': {
+ 'conflict': ['${PACKAGE}/${COMPILERNAME}']
+ }
+ }
+ }
+
+ configuration_suffix = {
+ 'enable': ['tcl'],
+ 'tcl': {
+ 'mpileaks': {
+ 'suffixes': {
+ '+debug': 'foo',
+ '~debug': 'bar'
+ }
+ }
+ }
+ }
+
+ def test_simple_case(self, tcl_factory):
+ spack.modules._module_config = self.configuration_autoload_direct
+ spec = spack.spec.Spec(mpich_spec_string)
+ content = get_modulefile_content(tcl_factory, spec)
+ assert 'module-whatis "mpich @3.0.4"' in content
+ with pytest.raises(TypeError):
+ spack.modules.dependencies(spec, 'non-existing-tag')
+
+ def test_autoload(self, tcl_factory):
+ spack.modules._module_config = self.configuration_autoload_direct
+ spec = spack.spec.Spec(mpileaks_spec_string)
+ content = get_modulefile_content(tcl_factory, spec)
+ assert len([x for x in content if 'is-loaded' in x]) == 2
+ assert len([x for x in content if 'module load ' in x]) == 2
+
+ spack.modules._module_config = self.configuration_autoload_all
+ spec = spack.spec.Spec(mpileaks_spec_string)
+ content = get_modulefile_content(tcl_factory, spec)
+ assert len([x for x in content if 'is-loaded' in x]) == 5
+ assert len([x for x in content if 'module load ' in x]) == 5
+
+ # dtbuild1 has
+ # - 1 ('run',) dependency
+ # - 1 ('build','link') dependency
+ # - 1 ('build',) dependency
+ # Just make sure the 'build' dependency is not there
+ spack.modules._module_config = self.configuration_autoload_direct
+ spec = spack.spec.Spec('dtbuild1')
+ content = get_modulefile_content(tcl_factory, spec)
+ assert len([x for x in content if 'is-loaded' in x]) == 2
+ assert len([x for x in content if 'module load ' in x]) == 2
+
+ # dtbuild1 has
+ # - 1 ('run',) dependency
+ # - 1 ('build','link') dependency
+ # - 1 ('build',) dependency
+ # Just make sure the 'build' dependency is not there
+ spack.modules._module_config = self.configuration_autoload_all
+ spec = spack.spec.Spec('dtbuild1')
+ content = get_modulefile_content(tcl_factory, spec)
+ assert len([x for x in content if 'is-loaded' in x]) == 2
+ assert len([x for x in content if 'module load ' in x]) == 2
-class TclTests(MockPackagesTest):
- def setUp(self):
- super(TclTests, self).setUp()
- self.configuration_obj = spack.modules.CONFIGURATION
- spack.modules.open = mock_open
- # Make sure that a non-mocked configuration will trigger an error
- spack.modules.CONFIGURATION = None
+ def test_prerequisites(self, tcl_factory):
+ spack.modules._module_config = self.configuration_prerequisites_direct
+ spec = spack.spec.Spec('mpileaks arch=x86-linux')
+ content = get_modulefile_content(tcl_factory, spec)
+ assert len([x for x in content if 'prereq' in x]) == 2
- def tearDown(self):
- del spack.modules.open
- spack.modules.CONFIGURATION = self.configuration_obj
- super(TclTests, self).tearDown()
+ spack.modules._module_config = self.configuration_prerequisites_all
+ spec = spack.spec.Spec('mpileaks arch=x86-linux')
+ content = get_modulefile_content(tcl_factory, spec)
+ assert len([x for x in content if 'prereq' in x]) == 5
- def get_modulefile_content(self, spec):
+ def test_alter_environment(self, tcl_factory):
+ spack.modules._module_config = self.configuration_alter_environment
+ spec = spack.spec.Spec('mpileaks platform=test target=x86_64')
+ content = get_modulefile_content(tcl_factory, spec)
+ assert len([x for x in content
+ if x.startswith('prepend-path CMAKE_PREFIX_PATH')
+ ]) == 0
+ assert len([x for x in content if 'setenv FOO "foo"' in x]) == 1
+ assert len([x for x in content if 'unsetenv BAR' in x]) == 1
+ assert len([x for x in content if 'setenv MPILEAKS_ROOT' in x]) == 1
+
+ spec = spack.spec.Spec('libdwarf %clang platform=test target=x86_32')
+ content = get_modulefile_content(tcl_factory, spec)
+ assert len(
+ [x for x in content if x.startswith('prepend-path CMAKE_PREFIX_PATH')] # NOQA: ignore=E501
+ ) == 0
+ assert len([x for x in content if 'setenv FOO "foo"' in x]) == 0
+ assert len([x for x in content if 'unsetenv BAR' in x]) == 0
+ assert len([x for x in content if 'is-loaded foo/bar' in x]) == 1
+ assert len([x for x in content if 'module load foo/bar' in x]) == 1
+ assert len([x for x in content if 'setenv LIBDWARF_ROOT' in x]) == 1
+
+ def test_blacklist(self, tcl_factory):
+ spack.modules._module_config = self.configuration_blacklist
+ spec = spack.spec.Spec('mpileaks ^zmpi')
+ content = get_modulefile_content(tcl_factory, spec)
+ assert len([x for x in content if 'is-loaded' in x]) == 1
+ assert len([x for x in content if 'module load ' in x]) == 1
+ spec = spack.spec.Spec('callpath arch=x86-linux')
+ # Returns a StringIO instead of a string as no module file was written
+ with pytest.raises(AttributeError):
+ get_modulefile_content(tcl_factory, spec)
+ spec = spack.spec.Spec('zmpi arch=x86-linux')
+ content = get_modulefile_content(tcl_factory, spec)
+ assert len([x for x in content if 'is-loaded' in x]) == 1
+ assert len([x for x in content if 'module load ' in x]) == 1
+
+ def test_conflicts(self, tcl_factory):
+ spack.modules._module_config = self.configuration_conflicts
+ spec = spack.spec.Spec('mpileaks')
+ content = get_modulefile_content(tcl_factory, spec)
+ assert len([x for x in content if x.startswith('conflict')]) == 2
+ assert len([x for x in content if x == 'conflict mpileaks']) == 1
+ assert len([x for x in content if x == 'conflict intel/14.0.1']) == 1
+
+ spack.modules._module_config = self.configuration_wrong_conflicts
+ with pytest.raises(SystemExit):
+ get_modulefile_content(tcl_factory, spec)
+
+ def test_suffixes(self, tcl_factory):
+ spack.modules._module_config = self.configuration_suffix
+ spec = spack.spec.Spec('mpileaks+debug arch=x86-linux')
+ spec.concretize()
+ generator = tcl_factory(spec)
+ assert 'foo' in generator.use_name
+
+ spec = spack.spec.Spec('mpileaks~debug arch=x86-linux')
+ spec.concretize()
+ generator = tcl_factory(spec)
+ assert 'bar' in generator.use_name
+
+
+@pytest.mark.usefixtures('config', 'builtin_mock', 'stringio_open')
+class TestLmod(object):
+ configuration_autoload_direct = {
+ 'enable': ['lmod'],
+ 'lmod': {
+ 'all': {
+ 'autoload': 'direct'
+ }
+ }
+ }
+
+ configuration_autoload_all = {
+ 'enable': ['lmod'],
+ 'lmod': {
+ 'all': {
+ 'autoload': 'all'
+ }
+ }
+ }
+
+ configuration_no_hash = {
+ 'enable': ['lmod'],
+ 'lmod': {
+ 'hash_length': 0
+ }
+ }
+
+ configuration_alter_environment = {
+ 'enable': ['lmod'],
+ 'lmod': {
+ 'all': {
+ 'filter': {'environment_blacklist': ['CMAKE_PREFIX_PATH']}
+ },
+ 'platform=test target=x86_64': {
+ 'environment': {
+ 'set': {'FOO': 'foo'},
+ 'unset': ['BAR']
+ }
+ },
+ 'platform=test target=x86_32': {
+ 'load': ['foo/bar']
+ }
+ }
+ }
+
+ configuration_blacklist = {
+ 'enable': ['lmod'],
+ 'lmod': {
+ 'blacklist': ['callpath'],
+ 'all': {
+ 'autoload': 'direct'
+ }
+ }
+ }
+
+ def test_simple_case(self, lmod_factory):
+ spack.modules._module_config = self.configuration_autoload_direct
+ spec = spack.spec.Spec(mpich_spec_string)
+ content = get_modulefile_content(lmod_factory, spec)
+ assert '-- -*- lua -*-' in content
+ assert 'whatis([[Name : mpich]])' in content
+ assert 'whatis([[Version : 3.0.4]])' in content
+
+ def test_autoload(self, lmod_factory):
+ spack.modules._module_config = self.configuration_autoload_direct
+ spec = spack.spec.Spec(mpileaks_spec_string)
+ content = get_modulefile_content(lmod_factory, spec)
+ assert len([x for x in content if 'if not isloaded(' in x]) == 2
+ assert len([x for x in content if 'load(' in x]) == 2
+
+ spack.modules._module_config = self.configuration_autoload_all
+ spec = spack.spec.Spec(mpileaks_spec_string)
+ content = get_modulefile_content(lmod_factory, spec)
+ assert len([x for x in content if 'if not isloaded(' in x]) == 5
+ assert len([x for x in content if 'load(' in x]) == 5
+
+ def test_alter_environment(self, lmod_factory):
+ spack.modules._module_config = self.configuration_alter_environment
+ spec = spack.spec.Spec('mpileaks platform=test target=x86_64')
+ content = get_modulefile_content(lmod_factory, spec)
+ assert len(
+ [x for x in content if x.startswith('prepend_path("CMAKE_PREFIX_PATH"')] # NOQA: ignore=E501
+ ) == 0
+ assert len([x for x in content if 'setenv("FOO", "foo")' in x]) == 1
+ assert len([x for x in content if 'unsetenv("BAR")' in x]) == 1
+
+ spec = spack.spec.Spec('libdwarf %clang platform=test target=x86_32')
+ content = get_modulefile_content(lmod_factory, spec)
+ assert len(
+ [x for x in content if x.startswith('prepend-path("CMAKE_PREFIX_PATH"')] # NOQA: ignore=E501
+ ) == 0
+ assert len([x for x in content if 'setenv("FOO", "foo")' in x]) == 0
+ assert len([x for x in content if 'unsetenv("BAR")' in x]) == 0
+
+ def test_blacklist(self, lmod_factory):
+ spack.modules._module_config = self.configuration_blacklist
+ spec = spack.spec.Spec(mpileaks_spec_string)
+ content = get_modulefile_content(lmod_factory, spec)
+ assert len([x for x in content if 'if not isloaded(' in x]) == 1
+ assert len([x for x in content if 'load(' in x]) == 1
+
+ def test_no_hash(self, lmod_factory):
+ # Make sure that virtual providers (in the hierarchy) always
+ # include a hash. Make sure that the module file for the spec
+ # does not include a hash if hash_length is 0.
+ spack.modules._module_config = self.configuration_no_hash
+ spec = spack.spec.Spec(mpileaks_spec_string)
spec.concretize()
- generator = spack.modules.TclModule(spec)
- generator.write()
- content = FILE_REGISTRY[generator.file_name].split('\n')
- return content
-
- def test_simple_case(self):
- spack.modules.CONFIGURATION = configuration_autoload_direct
- spec = spack.spec.Spec('mpich@3.0.4=x86-linux')
- content = self.get_modulefile_content(spec)
- self.assertTrue('module-whatis "mpich @3.0.4"' in content)
-
- def test_autoload(self):
- spack.modules.CONFIGURATION = configuration_autoload_direct
- spec = spack.spec.Spec('mpileaks=x86-linux')
- content = self.get_modulefile_content(spec)
- self.assertEqual(len([x for x in content if 'is-loaded' in x]), 2)
- self.assertEqual(len([x for x in content if 'module load ' in x]), 2)
-
- spack.modules.CONFIGURATION = configuration_autoload_all
- spec = spack.spec.Spec('mpileaks=x86-linux')
- content = self.get_modulefile_content(spec)
- self.assertEqual(len([x for x in content if 'is-loaded' in x]), 5)
- self.assertEqual(len([x for x in content if 'module load ' in x]), 5)
-
- def test_alter_environment(self):
- spack.modules.CONFIGURATION = configuration_alter_environment
- spec = spack.spec.Spec('mpileaks=x86-linux')
- content = self.get_modulefile_content(spec)
- self.assertEqual(
- len([x
- for x in content
- if x.startswith('prepend-path CMAKE_PREFIX_PATH')]), 0)
- self.assertEqual(
- len([x for x in content if 'setenv FOO "foo"' in x]), 1)
- self.assertEqual(len([x for x in content if 'unsetenv BAR' in x]), 1)
-
- spec = spack.spec.Spec('libdwarf=x64-linux')
- content = self.get_modulefile_content(spec)
- self.assertEqual(
- len([x
- for x in content
- if x.startswith('prepend-path CMAKE_PREFIX_PATH')]), 0)
- self.assertEqual(
- len([x for x in content if 'setenv FOO "foo"' in x]), 0)
- self.assertEqual(len([x for x in content if 'unsetenv BAR' in x]), 0)
-
- def test_blacklist(self):
- spack.modules.CONFIGURATION = configuration_blacklist
- spec = spack.spec.Spec('mpileaks=x86-linux')
- content = self.get_modulefile_content(spec)
- self.assertEqual(len([x for x in content if 'is-loaded' in x]), 1)
- self.assertEqual(len([x for x in content if 'module load ' in x]), 1)
-
- def test_conflicts(self):
- spack.modules.CONFIGURATION = configuration_conflicts
- spec = spack.spec.Spec('mpileaks=x86-linux')
- content = self.get_modulefile_content(spec)
- self.assertEqual(
- len([x for x in content if x.startswith('conflict')]), 2)
- self.assertEqual(
- len([x for x in content if x == 'conflict mpileaks']), 1)
- self.assertEqual(
- len([x for x in content if x == 'conflict intel/14.0.1']), 1)
+ module = lmod_factory(spec)
+ path = module.file_name
+ mpi_spec = spec['mpi']
+ mpiElement = "{0}/{1}-{2}/".format(
+ mpi_spec.name, mpi_spec.version, mpi_spec.dag_hash(length=7)
+ )
+ assert mpiElement in path
+ mpileaks_spec = spec
+ mpileaks_element = "{0}/{1}.lua".format(
+ mpileaks_spec.name, mpileaks_spec.version)
+ assert path.endswith(mpileaks_element)
+
+
+@pytest.mark.usefixtures('config', 'builtin_mock', 'stringio_open')
+class TestDotkit(object):
+ configuration_dotkit = {
+ 'enable': ['dotkit'],
+ 'dotkit': {
+ 'all': {
+ 'prerequisites': 'direct'
+ }
+ }
+ }
+
+ def test_dotkit(self, dotkit_factory):
+ spack.modules._module_config = self.configuration_dotkit
+ spec = spack.spec.Spec('mpileaks arch=x86-linux')
+ content = get_modulefile_content(dotkit_factory, spec)
+ assert '#c spack' in content
+ assert '#d mpileaks @2.3' in content
diff --git a/lib/spack/spack/test/multimethod.py b/lib/spack/spack/test/multimethod.py
index f653ca3477..90948f010c 100644
--- a/lib/spack/spack/test/multimethod.py
+++ b/lib/spack/spack/test/multimethod.py
@@ -22,105 +22,99 @@
# License along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
-"""
-Test for multi_method dispatch.
-"""
-
+"""Test for multi_method dispatch."""
import spack
+import pytest
from spack.multimethod import *
-from spack.test.mock_packages_test import *
from spack.version import *
-class MultiMethodTest(MockPackagesTest):
-
- def test_no_version_match(self):
- pkg = spack.repo.get('multimethod@2.0')
- self.assertRaises(NoSuchMethodError, pkg.no_version_2)
-
-
- def test_one_version_match(self):
- pkg = spack.repo.get('multimethod@1.0')
- self.assertEqual(pkg.no_version_2(), 1)
-
- pkg = spack.repo.get('multimethod@3.0')
- self.assertEqual(pkg.no_version_2(), 3)
+def test_no_version_match(builtin_mock):
+ pkg = spack.repo.get('multimethod@2.0')
+ with pytest.raises(NoSuchMethodError):
+ pkg.no_version_2()
- pkg = spack.repo.get('multimethod@4.0')
- self.assertEqual(pkg.no_version_2(), 4)
+def test_one_version_match(builtin_mock):
+ pkg = spack.repo.get('multimethod@1.0')
+ assert pkg.no_version_2() == 1
- def test_version_overlap(self):
- pkg = spack.repo.get('multimethod@2.0')
- self.assertEqual(pkg.version_overlap(), 1)
+ pkg = spack.repo.get('multimethod@3.0')
+ assert pkg.no_version_2() == 3
- pkg = spack.repo.get('multimethod@5.0')
- self.assertEqual(pkg.version_overlap(), 2)
+ pkg = spack.repo.get('multimethod@4.0')
+ assert pkg.no_version_2() == 4
- def test_mpi_version(self):
- pkg = spack.repo.get('multimethod^mpich@3.0.4')
- self.assertEqual(pkg.mpi_version(), 3)
+def test_version_overlap(builtin_mock):
+ pkg = spack.repo.get('multimethod@2.0')
+ assert pkg.version_overlap() == 1
- pkg = spack.repo.get('multimethod^mpich2@1.2')
- self.assertEqual(pkg.mpi_version(), 2)
+ pkg = spack.repo.get('multimethod@5.0')
+ assert pkg.version_overlap() == 2
- pkg = spack.repo.get('multimethod^mpich@1.0')
- self.assertEqual(pkg.mpi_version(), 1)
+def test_mpi_version(builtin_mock):
+ pkg = spack.repo.get('multimethod^mpich@3.0.4')
+ assert pkg.mpi_version() == 3
- def test_undefined_mpi_version(self):
- pkg = spack.repo.get('multimethod^mpich@0.4')
- self.assertEqual(pkg.mpi_version(), 1)
+ pkg = spack.repo.get('multimethod^mpich2@1.2')
+ assert pkg.mpi_version() == 2
- pkg = spack.repo.get('multimethod^mpich@1.4')
- self.assertEqual(pkg.mpi_version(), 1)
+ pkg = spack.repo.get('multimethod^mpich@1.0')
+ assert pkg.mpi_version() == 1
- def test_default_works(self):
- pkg = spack.repo.get('multimethod%gcc')
- self.assertEqual(pkg.has_a_default(), 'gcc')
+def test_undefined_mpi_version(builtin_mock):
+ pkg = spack.repo.get('multimethod^mpich@0.4')
+ assert pkg.mpi_version() == 1
- pkg = spack.repo.get('multimethod%intel')
- self.assertEqual(pkg.has_a_default(), 'intel')
+ pkg = spack.repo.get('multimethod^mpich@1.4')
+ assert pkg.mpi_version() == 1
- pkg = spack.repo.get('multimethod%pgi')
- self.assertEqual(pkg.has_a_default(), 'default')
+def test_default_works(builtin_mock):
+ pkg = spack.repo.get('multimethod%gcc')
+ assert pkg.has_a_default() == 'gcc'
- def test_architecture_match(self):
- pkg = spack.repo.get('multimethod=x86_64')
- self.assertEqual(pkg.different_by_architecture(), 'x86_64')
+ pkg = spack.repo.get('multimethod%intel')
+ assert pkg.has_a_default() == 'intel'
- pkg = spack.repo.get('multimethod=ppc64')
- self.assertEqual(pkg.different_by_architecture(), 'ppc64')
+ pkg = spack.repo.get('multimethod%pgi')
+ assert pkg.has_a_default() == 'default'
- pkg = spack.repo.get('multimethod=ppc32')
- self.assertEqual(pkg.different_by_architecture(), 'ppc32')
- pkg = spack.repo.get('multimethod=arm64')
- self.assertEqual(pkg.different_by_architecture(), 'arm64')
+def test_target_match(builtin_mock):
+ platform = spack.architecture.platform()
+ targets = platform.targets.values()
+ for target in targets[:-1]:
+ pkg = spack.repo.get('multimethod target=' + target.name)
+ assert pkg.different_by_target() == target.name
- pkg = spack.repo.get('multimethod=macos')
- self.assertRaises(NoSuchMethodError, pkg.different_by_architecture)
+ pkg = spack.repo.get('multimethod target=' + targets[-1].name)
+ if len(targets) == 1:
+ assert pkg.different_by_target() == targets[-1].name
+ else:
+ with pytest.raises(NoSuchMethodError):
+ pkg.different_by_target()
- def test_dependency_match(self):
- pkg = spack.repo.get('multimethod^zmpi')
- self.assertEqual(pkg.different_by_dep(), 'zmpi')
+def test_dependency_match(builtin_mock):
+ pkg = spack.repo.get('multimethod^zmpi')
+ assert pkg.different_by_dep() == 'zmpi'
- pkg = spack.repo.get('multimethod^mpich')
- self.assertEqual(pkg.different_by_dep(), 'mpich')
+ pkg = spack.repo.get('multimethod^mpich')
+ assert pkg.different_by_dep() == 'mpich'
- # If we try to switch on some entirely different dep, it's ambiguous,
- # but should take the first option
- pkg = spack.repo.get('multimethod^foobar')
- self.assertEqual(pkg.different_by_dep(), 'mpich')
+ # If we try to switch on some entirely different dep, it's ambiguous,
+ # but should take the first option
+ pkg = spack.repo.get('multimethod^foobar')
+ assert pkg.different_by_dep() == 'mpich'
- def test_virtual_dep_match(self):
- pkg = spack.repo.get('multimethod^mpich2')
- self.assertEqual(pkg.different_by_virtual_dep(), 2)
+def test_virtual_dep_match(builtin_mock):
+ pkg = spack.repo.get('multimethod^mpich2')
+ assert pkg.different_by_virtual_dep() == 2
- pkg = spack.repo.get('multimethod^mpich@1.0')
- self.assertEqual(pkg.different_by_virtual_dep(), 1)
+ pkg = spack.repo.get('multimethod^mpich@1.0')
+ assert pkg.different_by_virtual_dep() == 1
diff --git a/lib/spack/spack/test/namespace_trie.py b/lib/spack/spack/test/namespace_trie.py
index b38ecd6179..7927fc8e60 100644
--- a/lib/spack/spack/test/namespace_trie.py
+++ b/lib/spack/spack/test/namespace_trie.py
@@ -32,7 +32,6 @@ class NamespaceTrieTest(unittest.TestCase):
def setUp(self):
self.trie = NamespaceTrie()
-
def test_add_single(self):
self.trie['foo'] = 'bar'
@@ -40,7 +39,6 @@ class NamespaceTrieTest(unittest.TestCase):
self.assertTrue(self.trie.has_value('foo'))
self.assertEqual(self.trie['foo'], 'bar')
-
def test_add_multiple(self):
self.trie['foo.bar'] = 'baz'
@@ -54,7 +52,6 @@ class NamespaceTrieTest(unittest.TestCase):
self.assertFalse(self.trie.is_prefix('foo.bar.baz'))
self.assertFalse(self.trie.has_value('foo.bar.baz'))
-
def test_add_three(self):
# add a three-level namespace
self.trie['foo.bar.baz'] = 'quux'
@@ -89,7 +86,6 @@ class NamespaceTrieTest(unittest.TestCase):
self.assertFalse(self.trie.is_prefix('foo.bar.baz.quux'))
self.assertFalse(self.trie.has_value('foo.bar.baz.quux'))
-
def test_add_none_single(self):
self.trie['foo'] = None
self.assertTrue(self.trie.is_prefix('foo'))
@@ -99,8 +95,6 @@ class NamespaceTrieTest(unittest.TestCase):
self.assertFalse(self.trie.is_prefix('foo.bar'))
self.assertFalse(self.trie.has_value('foo.bar'))
-
-
def test_add_none_multiple(self):
self.trie['foo.bar'] = None
diff --git a/lib/spack/spack/test/optional_deps.py b/lib/spack/spack/test/optional_deps.py
index 90382dfc4a..d2b8c3e3ac 100644
--- a/lib/spack/spack/test/optional_deps.py
+++ b/lib/spack/spack/test/optional_deps.py
@@ -22,86 +22,90 @@
# License along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
-
+import pytest
from spack.spec import Spec
-from spack.test.mock_packages_test import *
-
-class ConcretizeTest(MockPackagesTest):
-
- def check_normalize(self, spec_string, expected):
- spec = Spec(spec_string)
- spec.normalize()
- self.assertEqual(spec, expected)
- self.assertTrue(spec.eq_dag(expected))
-
-
- def test_normalize_simple_conditionals(self):
- self.check_normalize('optional-dep-test', Spec('optional-dep-test'))
- self.check_normalize('optional-dep-test~a', Spec('optional-dep-test~a'))
-
- self.check_normalize('optional-dep-test+a',
- Spec('optional-dep-test+a', Spec('a')))
-
- self.check_normalize('optional-dep-test@1.1',
- Spec('optional-dep-test@1.1', Spec('b')))
-
- self.check_normalize('optional-dep-test%intel',
- Spec('optional-dep-test%intel', Spec('c')))
-
- self.check_normalize('optional-dep-test%intel@64.1',
- Spec('optional-dep-test%intel@64.1', Spec('c'), Spec('d')))
-
- self.check_normalize('optional-dep-test%intel@64.1.2',
- Spec('optional-dep-test%intel@64.1.2', Spec('c'), Spec('d')))
-
- self.check_normalize('optional-dep-test%clang@35',
- Spec('optional-dep-test%clang@35', Spec('e')))
-
-
- def test_multiple_conditionals(self):
- self.check_normalize('optional-dep-test+a@1.1',
- Spec('optional-dep-test+a@1.1', Spec('a'), Spec('b')))
-
- self.check_normalize('optional-dep-test+a%intel',
- Spec('optional-dep-test+a%intel', Spec('a'), Spec('c')))
-
- self.check_normalize('optional-dep-test@1.1%intel',
- Spec('optional-dep-test@1.1%intel', Spec('b'), Spec('c')))
-
- self.check_normalize('optional-dep-test@1.1%intel@64.1.2+a',
- Spec('optional-dep-test@1.1%intel@64.1.2+a',
- Spec('b'), Spec('a'), Spec('c'), Spec('d')))
-
- self.check_normalize('optional-dep-test@1.1%clang@36.5+a',
- Spec('optional-dep-test@1.1%clang@36.5+a',
- Spec('b'), Spec('a'), Spec('e')))
-
-
- def test_chained_mpi(self):
- self.check_normalize('optional-dep-test-2+mpi',
- Spec('optional-dep-test-2+mpi',
- Spec('optional-dep-test+mpi',
- Spec('mpi'))))
-
-
- def test_default_variant(self):
- spec = Spec('optional-dep-test-3')
- spec.concretize()
- self.assertTrue('a' in spec)
-
- spec = Spec('optional-dep-test-3~var')
- spec.concretize()
- self.assertTrue('a' in spec)
-
- spec = Spec('optional-dep-test-3+var')
- spec.concretize()
- self.assertTrue('b' in spec)
- def test_transitive_chain(self):
+@pytest.fixture(
+ params=[
+ # Normalize simple conditionals
+ ('optional-dep-test', Spec('optional-dep-test')),
+ ('optional-dep-test~a', Spec('optional-dep-test~a')),
+ ('optional-dep-test+a', Spec('optional-dep-test+a', Spec('a'))),
+ ('optional-dep-test a=true', Spec(
+ 'optional-dep-test a=true', Spec('a')
+ )),
+ ('optional-dep-test a=true', Spec('optional-dep-test+a', Spec('a'))),
+ ('optional-dep-test@1.1', Spec('optional-dep-test@1.1', Spec('b'))),
+ ('optional-dep-test%intel', Spec(
+ 'optional-dep-test%intel', Spec('c')
+ )),
+ ('optional-dep-test%intel@64.1', Spec(
+ 'optional-dep-test%intel@64.1', Spec('c'), Spec('d')
+ )),
+ ('optional-dep-test%intel@64.1.2', Spec(
+ 'optional-dep-test%intel@64.1.2', Spec('c'), Spec('d')
+ )),
+ ('optional-dep-test%clang@35', Spec(
+ 'optional-dep-test%clang@35', Spec('e')
+ )),
+ # Normalize multiple conditionals
+ ('optional-dep-test+a@1.1', Spec(
+ 'optional-dep-test+a@1.1', Spec('a'), Spec('b')
+ )),
+ ('optional-dep-test+a%intel', Spec(
+ 'optional-dep-test+a%intel', Spec('a'), Spec('c')
+ )),
+ ('optional-dep-test@1.1%intel', Spec(
+ 'optional-dep-test@1.1%intel', Spec('b'), Spec('c')
+ )),
+ ('optional-dep-test@1.1%intel@64.1.2+a', Spec(
+ 'optional-dep-test@1.1%intel@64.1.2+a',
+ Spec('b'),
+ Spec('a'),
+ Spec('c'),
+ Spec('d')
+ )),
+ ('optional-dep-test@1.1%clang@36.5+a', Spec(
+ 'optional-dep-test@1.1%clang@36.5+a',
+ Spec('b'),
+ Spec('a'),
+ Spec('e')
+ )),
+ # Chained MPI
+ ('optional-dep-test-2+mpi', Spec(
+ 'optional-dep-test-2+mpi',
+ Spec('optional-dep-test+mpi', Spec('mpi'))
+ )),
# Each of these dependencies comes from a conditional
# dependency on another. This requires iterating to evaluate
# the whole chain.
- self.check_normalize(
- 'optional-dep-test+f',
- Spec('optional-dep-test+f', Spec('f'), Spec('g'), Spec('mpi')))
+ ('optional-dep-test+f', Spec(
+ 'optional-dep-test+f', Spec('f'), Spec('g'), Spec('mpi')
+ ))
+ ]
+)
+def spec_and_expected(request):
+ """Parameters for te normalization test."""
+ return request.param
+
+
+def test_normalize(spec_and_expected, config, builtin_mock):
+ spec, expected = spec_and_expected
+ spec = Spec(spec)
+ spec.normalize()
+ assert spec.eq_dag(expected, deptypes=False)
+
+
+def test_default_variant(config, builtin_mock):
+ spec = Spec('optional-dep-test-3')
+ spec.concretize()
+ assert 'a' in spec
+
+ spec = Spec('optional-dep-test-3~var')
+ spec.concretize()
+ assert 'a' in spec
+
+ spec = Spec('optional-dep-test-3+var')
+ spec.concretize()
+ assert 'b' in spec
diff --git a/lib/spack/spack/test/package_sanity.py b/lib/spack/spack/test/package_sanity.py
index 9198986f5d..c75d7cdcc7 100644
--- a/lib/spack/spack/test/package_sanity.py
+++ b/lib/spack/spack/test/package_sanity.py
@@ -26,6 +26,7 @@
This test does sanity checks on Spack's builtin package database.
"""
import unittest
+import re
import spack
from spack.repository import RepoPath
@@ -38,12 +39,10 @@ class PackageSanityTest(unittest.TestCase):
for name in spack.repo.all_package_names():
spack.repo.get(name)
-
def test_get_all_packages(self):
"""Get all packages once and make sure that works."""
self.check_db()
-
def test_get_all_mock_packages(self):
"""Get the mock packages once each too."""
db = RepoPath(spack.mock_packages_path)
@@ -51,7 +50,6 @@ class PackageSanityTest(unittest.TestCase):
self.check_db()
spack.repo.swap(db)
-
def test_url_versions(self):
"""Check URLs for regular packages, if they are explicitly defined."""
for pkg in spack.repo.all_packages():
@@ -60,3 +58,13 @@ class PackageSanityTest(unittest.TestCase):
# If there is a url for the version check it.
v_url = pkg.url_for_version(v)
self.assertEqual(vattrs['url'], v_url)
+
+ def test_all_versions_are_lowercase(self):
+ """Spack package names must be lowercase, and use `-` instead of `_`.
+ """
+ errors = []
+ for name in spack.repo.all_package_names():
+ if re.search(r'[_A-Z]', name):
+ errors.append(name)
+
+ self.assertEqual([], errors)
diff --git a/lib/spack/spack/test/packages.py b/lib/spack/spack/test/packages.py
index bea42bb33a..6ae8a33a24 100644
--- a/lib/spack/spack/test/packages.py
+++ b/lib/spack/spack/test/packages.py
@@ -22,83 +22,109 @@
# License along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
-
import spack
from llnl.util.filesystem import join_path
from spack.repository import Repo
-from spack.test.mock_packages_test import *
from spack.util.naming import mod_to_class
+from spack.spec import *
+
+
+def test_load_package(builtin_mock):
+ spack.repo.get('mpich')
-class PackagesTest(MockPackagesTest):
+def test_package_name(builtin_mock):
+ pkg = spack.repo.get('mpich')
+ assert pkg.name == 'mpich'
- def test_load_package(self):
- pkg = spack.repo.get('mpich')
+def test_package_filename(builtin_mock):
+ repo = Repo(spack.mock_packages_path)
+ filename = repo.filename_for_package_name('mpich')
+ assert filename == join_path(
+ spack.mock_packages_path,
+ 'packages',
+ 'mpich',
+ 'package.py'
+ )
- def test_package_name(self):
- pkg = spack.repo.get('mpich')
- self.assertEqual(pkg.name, 'mpich')
+def test_nonexisting_package_filename():
+ repo = Repo(spack.mock_packages_path)
+ filename = repo.filename_for_package_name('some-nonexisting-package')
+ assert filename == join_path(
+ spack.mock_packages_path,
+ 'packages',
+ 'some-nonexisting-package',
+ 'package.py'
+ )
- def test_package_filename(self):
- repo = Repo(spack.mock_packages_path)
- filename = repo.filename_for_package_name('mpich')
- self.assertEqual(filename,
- join_path(spack.mock_packages_path, 'packages', 'mpich', 'package.py'))
+def test_package_class_names():
+ assert 'Mpich' == mod_to_class('mpich')
+ assert 'PmgrCollective' == mod_to_class('pmgr_collective')
+ assert 'PmgrCollective' == mod_to_class('pmgr-collective')
+ assert 'Pmgrcollective' == mod_to_class('PmgrCollective')
+ assert '_3db' == mod_to_class('3db')
- def test_package_name(self):
- pkg = spack.repo.get('mpich')
- self.assertEqual(pkg.name, 'mpich')
+# Below tests target direct imports of spack packages from the
+# spack.pkg namespace
+def test_import_package(builtin_mock):
+ import spack.pkg.builtin.mock.mpich # noqa
- def test_nonexisting_package_filename(self):
- repo = Repo(spack.mock_packages_path)
- filename = repo.filename_for_package_name('some-nonexisting-package')
- self.assertEqual(
- filename,
- join_path(spack.mock_packages_path, 'packages', 'some-nonexisting-package', 'package.py'))
+def test_import_package_as(builtin_mock):
+ import spack.pkg.builtin.mock.mpich as mp # noqa
- def test_package_class_names(self):
- self.assertEqual('Mpich', mod_to_class('mpich'))
- self.assertEqual('PmgrCollective', mod_to_class('pmgr_collective'))
- self.assertEqual('PmgrCollective', mod_to_class('pmgr-collective'))
- self.assertEqual('Pmgrcollective', mod_to_class('PmgrCollective'))
- self.assertEqual('_3db', mod_to_class('3db'))
+ import spack.pkg.builtin.mock # noqa
+ import spack.pkg.builtin.mock as m # noqa
+ from spack.pkg.builtin import mock # noqa
- #
- # Below tests target direct imports of spack packages from the
- # spack.pkg namespace
- #
+def test_inheritance_of_diretives():
+ p = spack.repo.get('simple-inheritance')
- def test_import_package(self):
- import spack.pkg.builtin.mock.mpich
+ # Check dictionaries that should have been filled by directives
+ assert len(p.dependencies) == 3
+ assert 'cmake' in p.dependencies
+ assert 'openblas' in p.dependencies
+ assert 'mpi' in p.dependencies
+ assert len(p.provided) == 2
+ # Check that Spec instantiation behaves as we expect
+ s = Spec('simple-inheritance')
+ s.concretize()
+ assert '^cmake' in s
+ assert '^openblas' in s
+ assert '+openblas' in s
+ assert 'mpi' in s
- def test_import_package_as(self):
- import spack.pkg.builtin.mock.mpich as mp
+ s = Spec('simple-inheritance~openblas')
+ s.concretize()
+ assert '^cmake' in s
+ assert '^openblas' not in s
+ assert '~openblas' in s
+ assert 'mpi' in s
- def test_import_class_from_package(self):
- from spack.pkg.builtin.mock.mpich import Mpich
+def test_import_class_from_package(builtin_mock):
+ from spack.pkg.builtin.mock.mpich import Mpich # noqa
- def test_import_module_from_package(self):
- from spack.pkg.builtin.mock import mpich
+def test_import_module_from_package(builtin_mock):
+ from spack.pkg.builtin.mock import mpich # noqa
- def test_import_namespace_container_modules(self):
- import spack.pkg
- import spack.pkg as p
- from spack import pkg
+def test_import_namespace_container_modules(builtin_mock):
+ import spack.pkg # noqa
+ import spack.pkg as p # noqa
+ from spack import pkg # noqa
- import spack.pkg.builtin
- import spack.pkg.builtin as b
- from spack.pkg import builtin
+ import spack.pkg.builtin # noqa
+ import spack.pkg.builtin as b # noqa
+ from spack.pkg import builtin # noqa
- import spack.pkg.builtin.mock
- import spack.pkg.builtin.mock as m
- from spack.pkg.builtin import mock
+ import spack.pkg.builtin.mock # noqa
+ import spack.pkg.builtin.mock as m # noqa
+ from spack.pkg.builtin import mock # noqa
diff --git a/lib/spack/spack/test/pattern.py b/lib/spack/spack/test/pattern.py
index 3419d600b8..0c772a0d2d 100644
--- a/lib/spack/spack/test/pattern.py
+++ b/lib/spack/spack/test/pattern.py
@@ -41,6 +41,7 @@ class CompositeTest(unittest.TestCase):
raise NotImplemented('subtract not implemented')
class One(Base):
+
def add(self):
Base.counter += 1
@@ -48,6 +49,7 @@ class CompositeTest(unittest.TestCase):
Base.counter -= 1
class Two(Base):
+
def add(self):
Base.counter += 2
diff --git a/lib/spack/spack/test/provider_index.py b/lib/spack/spack/test/provider_index.py
new file mode 100644
index 0000000000..a176d0c315
--- /dev/null
+++ b/lib/spack/spack/test/provider_index.py
@@ -0,0 +1,93 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+"""Tests for provider index cache files.
+
+Tests assume that mock packages provide this::
+
+ {'blas': {
+ blas: set([netlib-blas, openblas, openblas-with-lapack])},
+ 'lapack': {lapack: set([netlib-lapack, openblas-with-lapack])},
+ 'mpi': {mpi@:1: set([mpich@:1]),
+ mpi@:2.0: set([mpich2]),
+ mpi@:2.1: set([mpich2@1.1:]),
+ mpi@:2.2: set([mpich2@1.2:]),
+ mpi@:3: set([mpich@3:]),
+ mpi@:10.0: set([zmpi])},
+ 'stuff': {stuff: set([externalvirtual])}}
+"""
+import StringIO
+import spack
+from spack.provider_index import ProviderIndex
+from spack.spec import Spec
+
+
+def test_yaml_round_trip(builtin_mock):
+ p = ProviderIndex(spack.repo.all_package_names())
+
+ ostream = StringIO.StringIO()
+ p.to_yaml(ostream)
+
+ istream = StringIO.StringIO(ostream.getvalue())
+ q = ProviderIndex.from_yaml(istream)
+
+ assert p == q
+
+
+def test_providers_for_simple(builtin_mock):
+ p = ProviderIndex(spack.repo.all_package_names())
+
+ blas_providers = p.providers_for('blas')
+ assert Spec('netlib-blas') in blas_providers
+ assert Spec('openblas') in blas_providers
+ assert Spec('openblas-with-lapack') in blas_providers
+
+ lapack_providers = p.providers_for('lapack')
+ assert Spec('netlib-lapack') in lapack_providers
+ assert Spec('openblas-with-lapack') in lapack_providers
+
+
+def test_mpi_providers(builtin_mock):
+ p = ProviderIndex(spack.repo.all_package_names())
+
+ mpi_2_providers = p.providers_for('mpi@2')
+ assert Spec('mpich2') in mpi_2_providers
+ assert Spec('mpich@3:') in mpi_2_providers
+
+ mpi_3_providers = p.providers_for('mpi@3')
+ assert Spec('mpich2') not in mpi_3_providers
+ assert Spec('mpich@3:') in mpi_3_providers
+ assert Spec('zmpi') in mpi_3_providers
+
+
+def test_equal(builtin_mock):
+ p = ProviderIndex(spack.repo.all_package_names())
+ q = ProviderIndex(spack.repo.all_package_names())
+ assert p == q
+
+
+def test_copy(builtin_mock):
+ p = ProviderIndex(spack.repo.all_package_names())
+ q = p.copy()
+ assert p == q
diff --git a/lib/spack/spack/test/python_version.py b/lib/spack/spack/test/python_version.py
index 6c09effc56..5af55bdc5f 100644
--- a/lib/spack/spack/test/python_version.py
+++ b/lib/spack/spack/test/python_version.py
@@ -36,7 +36,8 @@ import llnl.util.tty as tty
import pyqver2
import spack
-spack_max_version = (2,6)
+spack_max_version = (2, 6)
+
class PythonVersionTest(unittest.TestCase):
@@ -51,12 +52,10 @@ class PythonVersionTest(unittest.TestCase):
if re.match(r'^[^.#].*\.py$', filename):
yield os.path.join(root, filename)
-
def package_py_files(self):
for name in spack.repo.all_package_names():
yield spack.repo.filename_for_package_name(name)
-
def check_python_versions(self, *files):
# dict version -> filename -> reasons
all_issues = {}
@@ -66,7 +65,7 @@ class PythonVersionTest(unittest.TestCase):
versions = pyqver2.get_versions(pyfile.read())
for ver, reasons in versions.items():
if ver > spack_max_version:
- if not ver in all_issues:
+ if ver not in all_issues:
all_issues[ver] = {}
all_issues[ver][fn] = reasons
@@ -87,7 +86,7 @@ class PythonVersionTest(unittest.TestCase):
tty.error("These files require version %d.%d:" % v)
maxlen = max(len(f) for f, prob in msgs)
- fmt = "%%-%ds%%s" % (maxlen+3)
+ fmt = "%%-%ds%%s" % (maxlen + 3)
print fmt % ('File', 'Reason')
print fmt % ('-' * (maxlen), '-' * 20)
for msg in msgs:
@@ -95,10 +94,8 @@ class PythonVersionTest(unittest.TestCase):
self.assertTrue(len(all_issues) == 0)
-
def test_core_module_compatibility(self):
self.check_python_versions(*self.pyfiles(spack.lib_path))
-
def test_package_module_compatibility(self):
self.check_python_versions(*self.pyfiles(spack.packages_path))
diff --git a/lib/spack/spack/test/sbang.py b/lib/spack/spack/test/sbang.py
index 6aea1a68c7..12abce7b35 100644
--- a/lib/spack/spack/test/sbang.py
+++ b/lib/spack/spack/test/sbang.py
@@ -26,6 +26,7 @@
Test that Spack's shebang filtering works correctly.
"""
import os
+import stat
import unittest
import tempfile
import shutil
@@ -34,12 +35,16 @@ from llnl.util.filesystem import *
from spack.hooks.sbang import filter_shebangs_in_directory
import spack
-short_line = "#!/this/is/short/bin/bash\n"
-long_line = "#!/this/" + ('x' * 200) + "/is/long\n"
-sbang_line = '#!/bin/bash %s/bin/sbang\n' % spack.spack_root
-last_line = "last!\n"
+short_line = "#!/this/is/short/bin/bash\n"
+long_line = "#!/this/" + ('x' * 200) + "/is/long\n"
+lua_line = "#!/this/" + ('x' * 200) + "/is/lua\n"
+lua_line_patched = "--!/this/" + ('x' * 200) + "/is/lua\n"
+sbang_line = '#!/bin/bash %s/bin/sbang\n' % spack.spack_root
+last_line = "last!\n"
+
class SbangTest(unittest.TestCase):
+
def setUp(self):
self.tempdir = tempfile.mkdtemp()
@@ -59,6 +64,12 @@ class SbangTest(unittest.TestCase):
f.write(long_line)
f.write(last_line)
+ # Lua script with long shebang
+ self.lua_shebang = os.path.join(self.tempdir, 'lua')
+ with open(self.lua_shebang, 'w') as f:
+ f.write(lua_line)
+ f.write(last_line)
+
# Script already using sbang.
self.has_shebang = os.path.join(self.tempdir, 'shebang')
with open(self.has_shebang, 'w') as f:
@@ -66,11 +77,8 @@ class SbangTest(unittest.TestCase):
f.write(long_line)
f.write(last_line)
-
def tearDown(self):
- shutil.rmtree(self.tempdir, ignore_errors=True)
-
-
+ shutil.rmtree(self.tempdir, ignore_errors=True)
def test_shebang_handling(self):
filter_shebangs_in_directory(self.tempdir)
@@ -86,8 +94,25 @@ class SbangTest(unittest.TestCase):
self.assertEqual(f.readline(), long_line)
self.assertEqual(f.readline(), last_line)
+ # Make sure this got patched.
+ with open(self.lua_shebang, 'r') as f:
+ self.assertEqual(f.readline(), sbang_line)
+ self.assertEqual(f.readline(), lua_line_patched)
+ self.assertEqual(f.readline(), last_line)
+
# Make sure this is untouched
with open(self.has_shebang, 'r') as f:
self.assertEqual(f.readline(), sbang_line)
self.assertEqual(f.readline(), long_line)
self.assertEqual(f.readline(), last_line)
+
+ def test_shebang_handles_non_writable_files(self):
+ # make a file non-writable
+ st = os.stat(self.long_shebang)
+ not_writable_mode = st.st_mode & ~stat.S_IWRITE
+ os.chmod(self.long_shebang, not_writable_mode)
+
+ self.test_shebang_handling()
+
+ st = os.stat(self.long_shebang)
+ self.assertEqual(oct(not_writable_mode), oct(st.st_mode))
diff --git a/lib/spack/spack/test/yaml.py b/lib/spack/spack/test/spack_yaml.py
index f1b83e7b71..fbbb7b8e60 100644
--- a/lib/spack/spack/test/yaml.py
+++ b/lib/spack/spack/test/spack_yaml.py
@@ -45,26 +45,25 @@ config_file:
"""
test_data = {
- 'config_file' : syaml.syaml_dict([
+ 'config_file': syaml.syaml_dict([
('x86_64', syaml.syaml_dict([
('foo', '/path/to/foo'),
('bar', '/path/to/bar'),
- ('baz', '/path/to/baz' )])),
- ('some_list', [ 'item 1', 'item 2', 'item 3' ]),
- ('another_list', [ 1, 2, 3 ]),
+ ('baz', '/path/to/baz')])),
+ ('some_list', ['item 1', 'item 2', 'item 3']),
+ ('another_list', [1, 2, 3]),
('some_key', 'some_string')
])}
-class YamlTest(unittest.TestCase):
+
+class SpackYamlTest(unittest.TestCase):
def setUp(self):
self.data = syaml.load(test_file)
-
def test_parse(self):
self.assertEqual(test_data, self.data)
-
def test_dict_order(self):
self.assertEqual(
['x86_64', 'some_list', 'another_list', 'some_key'],
@@ -74,7 +73,6 @@ class YamlTest(unittest.TestCase):
['foo', 'bar', 'baz'],
self.data['config_file']['x86_64'].keys())
-
def test_line_numbers(self):
def check(obj, start_line, end_line):
self.assertEqual(obj._start_mark.line, start_line)
@@ -92,3 +90,19 @@ class YamlTest(unittest.TestCase):
check(self.data['config_file']['some_list'][2], 8, 8)
check(self.data['config_file']['another_list'], 10, 10)
check(self.data['config_file']['some_key'], 11, 11)
+
+ def test_yaml_aliases(self):
+ aliased_list_1 = ['foo']
+ aliased_list_2 = []
+ dict_with_aliases = {
+ 'a': aliased_list_1,
+ 'b': aliased_list_1,
+ 'c': aliased_list_1,
+ 'd': aliased_list_2,
+ 'e': aliased_list_2,
+ 'f': aliased_list_2,
+ }
+ string = syaml.dump(dict_with_aliases)
+
+ # ensure no YAML aliases appear in syaml dumps.
+ self.assertFalse('*id' in string)
diff --git a/lib/spack/spack/test/spec_dag.py b/lib/spack/spack/test/spec_dag.py
index 4645f98565..1578bcacbe 100644
--- a/lib/spack/spack/test/spec_dag.py
+++ b/lib/spack/spack/test/spec_dag.py
@@ -28,26 +28,64 @@ You can find the dummy packages here::
spack/lib/spack/spack/test/mock_packages
"""
+import pytest
import spack
+import spack.architecture
import spack.package
from spack.spec import Spec
-from spack.test.mock_packages_test import *
-class SpecDagTest(MockPackagesTest):
+def check_links(spec_to_check):
+ for spec in spec_to_check.traverse():
+ for dependent in spec.dependents():
+ assert spec.name in dependent.dependencies_dict()
- def test_conflicting_package_constraints(self):
- self.set_pkg_dep('mpileaks', 'mpich@1.0')
- self.set_pkg_dep('callpath', 'mpich@2.0')
+ for dependency in spec.dependencies():
+ assert spec.name in dependency.dependents_dict()
+
+
+@pytest.fixture()
+def saved_deps():
+ """Returns a dictionary to save the dependencies."""
+ return {}
+
+
+@pytest.fixture()
+def set_dependency(saved_deps):
+ """Returns a function that alters the dependency information
+ for a package.
+ """
+ def _mock(pkg_name, spec, deptypes=spack.alldeps):
+ """Alters dependence information for a package.
+
+ Adds a dependency on <spec> to pkg. Use this to mock up constraints.
+ """
+ spec = Spec(spec)
+ # Save original dependencies before making any changes.
+ pkg = spack.repo.get(pkg_name)
+ if pkg_name not in saved_deps:
+ saved_deps[pkg_name] = (pkg, pkg.dependencies.copy())
+ # Change dep spec
+ # XXX(deptype): handle deptypes.
+ pkg.dependencies[spec.name] = {Spec(pkg_name): spec}
+ pkg.dependency_types[spec.name] = set(deptypes)
+ return _mock
+
+
+@pytest.mark.usefixtures('refresh_builtin_mock')
+class TestSpecDag(object):
+
+ def test_conflicting_package_constraints(self, set_dependency):
+ set_dependency('mpileaks', 'mpich@1.0')
+ set_dependency('callpath', 'mpich@2.0')
spec = Spec('mpileaks ^mpich ^callpath ^dyninst ^libelf ^libdwarf')
- # TODO: try to do something to showt that the issue was with
+ # TODO: try to do something to show that the issue was with
# TODO: the user's input or with package inconsistencies.
- self.assertRaises(spack.spec.UnsatisfiableVersionSpecError,
- spec.normalize)
-
+ with pytest.raises(spack.spec.UnsatisfiableVersionSpecError):
+ spec.normalize()
def test_preorder_node_traversal(self):
dag = Spec('mpileaks ^zmpi')
@@ -55,14 +93,13 @@ class SpecDagTest(MockPackagesTest):
names = ['mpileaks', 'callpath', 'dyninst', 'libdwarf', 'libelf',
'zmpi', 'fake']
- pairs = zip([0,1,2,3,4,2,3], names)
+ pairs = zip([0, 1, 2, 3, 4, 2, 3], names)
traversal = dag.traverse()
- self.assertEqual([x.name for x in traversal], names)
+ assert [x.name for x in traversal] == names
traversal = dag.traverse(depth=True)
- self.assertEqual([(x, y.name) for x,y in traversal], pairs)
-
+ assert [(x, y.name) for x, y in traversal] == pairs
def test_preorder_edge_traversal(self):
dag = Spec('mpileaks ^zmpi')
@@ -70,14 +107,13 @@ class SpecDagTest(MockPackagesTest):
names = ['mpileaks', 'callpath', 'dyninst', 'libdwarf', 'libelf',
'libelf', 'zmpi', 'fake', 'zmpi']
- pairs = zip([0,1,2,3,4,3,2,3,1], names)
+ pairs = zip([0, 1, 2, 3, 4, 3, 2, 3, 1], names)
traversal = dag.traverse(cover='edges')
- self.assertEqual([x.name for x in traversal], names)
+ assert [x.name for x in traversal] == names
traversal = dag.traverse(cover='edges', depth=True)
- self.assertEqual([(x, y.name) for x,y in traversal], pairs)
-
+ assert [(x, y.name) for x, y in traversal] == pairs
def test_preorder_path_traversal(self):
dag = Spec('mpileaks ^zmpi')
@@ -85,14 +121,13 @@ class SpecDagTest(MockPackagesTest):
names = ['mpileaks', 'callpath', 'dyninst', 'libdwarf', 'libelf',
'libelf', 'zmpi', 'fake', 'zmpi', 'fake']
- pairs = zip([0,1,2,3,4,3,2,3,1,2], names)
+ pairs = zip([0, 1, 2, 3, 4, 3, 2, 3, 1, 2], names)
traversal = dag.traverse(cover='paths')
- self.assertEqual([x.name for x in traversal], names)
+ assert [x.name for x in traversal] == names
traversal = dag.traverse(cover='paths', depth=True)
- self.assertEqual([(x, y.name) for x,y in traversal], pairs)
-
+ assert [(x, y.name) for x, y in traversal] == pairs
def test_postorder_node_traversal(self):
dag = Spec('mpileaks ^zmpi')
@@ -100,14 +135,13 @@ class SpecDagTest(MockPackagesTest):
names = ['libelf', 'libdwarf', 'dyninst', 'fake', 'zmpi',
'callpath', 'mpileaks']
- pairs = zip([4,3,2,3,2,1,0], names)
+ pairs = zip([4, 3, 2, 3, 2, 1, 0], names)
traversal = dag.traverse(order='post')
- self.assertEqual([x.name for x in traversal], names)
+ assert [x.name for x in traversal] == names
traversal = dag.traverse(depth=True, order='post')
- self.assertEqual([(x, y.name) for x,y in traversal], pairs)
-
+ assert [(x, y.name) for x, y in traversal] == pairs
def test_postorder_edge_traversal(self):
dag = Spec('mpileaks ^zmpi')
@@ -115,14 +149,13 @@ class SpecDagTest(MockPackagesTest):
names = ['libelf', 'libdwarf', 'libelf', 'dyninst', 'fake', 'zmpi',
'callpath', 'zmpi', 'mpileaks']
- pairs = zip([4,3,3,2,3,2,1,1,0], names)
+ pairs = zip([4, 3, 3, 2, 3, 2, 1, 1, 0], names)
traversal = dag.traverse(cover='edges', order='post')
- self.assertEqual([x.name for x in traversal], names)
+ assert [x.name for x in traversal] == names
traversal = dag.traverse(cover='edges', depth=True, order='post')
- self.assertEqual([(x, y.name) for x,y in traversal], pairs)
-
+ assert [(x, y.name) for x, y in traversal] == pairs
def test_postorder_path_traversal(self):
dag = Spec('mpileaks ^zmpi')
@@ -130,14 +163,13 @@ class SpecDagTest(MockPackagesTest):
names = ['libelf', 'libdwarf', 'libelf', 'dyninst', 'fake', 'zmpi',
'callpath', 'fake', 'zmpi', 'mpileaks']
- pairs = zip([4,3,3,2,3,2,1,2,1,0], names)
+ pairs = zip([4, 3, 3, 2, 3, 2, 1, 2, 1, 0], names)
traversal = dag.traverse(cover='paths', order='post')
- self.assertEqual([x.name for x in traversal], names)
+ assert [x.name for x in traversal] == names
traversal = dag.traverse(cover='paths', depth=True, order='post')
- self.assertEqual([(x, y.name) for x,y in traversal], pairs)
-
+ assert [(x, y.name) for x, y in traversal] == pairs
def test_conflicting_spec_constraints(self):
mpileaks = Spec('mpileaks ^mpich ^callpath ^dyninst ^libelf ^libdwarf')
@@ -145,11 +177,12 @@ class SpecDagTest(MockPackagesTest):
# Normalize then add conflicting constraints to the DAG (this is an
# extremely unlikely scenario, but we test for it anyway)
mpileaks.normalize()
- mpileaks.dependencies['mpich'] = Spec('mpich@1.0')
- mpileaks.dependencies['callpath'].dependencies['mpich'] = Spec('mpich@2.0')
-
- self.assertRaises(spack.spec.InconsistentSpecError, mpileaks.flatten)
+ mpileaks._dependencies['mpich'].spec = Spec('mpich@1.0')
+ mpileaks._dependencies['callpath']. \
+ spec._dependencies['mpich'].spec = Spec('mpich@2.0')
+ with pytest.raises(spack.spec.InconsistentSpecError):
+ mpileaks.flat_dependencies(copy=False)
def test_normalize_twice(self):
"""Make sure normalize can be run twice on the same spec,
@@ -159,8 +192,7 @@ class SpecDagTest(MockPackagesTest):
n1 = spec.copy()
spec.normalize()
- self.assertEqual(n1, spec)
-
+ assert n1 == spec
def test_normalize_a_lot(self):
spec = Spec('mpileaks')
@@ -169,7 +201,6 @@ class SpecDagTest(MockPackagesTest):
spec.normalize()
spec.normalize()
-
def test_normalize_with_virtual_spec(self):
dag = Spec('mpileaks',
Spec('callpath',
@@ -184,76 +215,66 @@ class SpecDagTest(MockPackagesTest):
# make sure nothing with the same name occurs twice
counts = {}
for spec in dag.traverse(key=id):
- if not spec.name in counts:
+ if spec.name not in counts:
counts[spec.name] = 0
counts[spec.name] += 1
for name in counts:
- self.assertEqual(counts[name], 1, "Count for %s was not 1!" % name)
-
-
- def check_links(self, spec_to_check):
- for spec in spec_to_check.traverse():
- for dependent in spec.dependents.values():
- self.assertTrue(
- spec.name in dependent.dependencies,
- "%s not in dependencies of %s" % (spec.name, dependent.name))
-
- for dependency in spec.dependencies.values():
- self.assertTrue(
- spec.name in dependency.dependents,
- "%s not in dependents of %s" % (spec.name, dependency.name))
-
+ assert counts[name] == 1
def test_dependents_and_dependencies_are_correct(self):
spec = Spec('mpileaks',
- Spec('callpath',
- Spec('dyninst',
- Spec('libdwarf',
- Spec('libelf')),
- Spec('libelf')),
- Spec('mpi')),
- Spec('mpi'))
-
- self.check_links(spec)
+ Spec('callpath',
+ Spec('dyninst',
+ Spec('libdwarf',
+ Spec('libelf')),
+ Spec('libelf')),
+ Spec('mpi')),
+ Spec('mpi'))
+
+ check_links(spec)
spec.normalize()
- self.check_links(spec)
-
+ check_links(spec)
- def test_unsatisfiable_version(self):
- self.set_pkg_dep('mpileaks', 'mpich@1.0')
+ def test_unsatisfiable_version(self, set_dependency):
+ set_dependency('mpileaks', 'mpich@1.0')
spec = Spec('mpileaks ^mpich@2.0 ^callpath ^dyninst ^libelf ^libdwarf')
- self.assertRaises(spack.spec.UnsatisfiableVersionSpecError, spec.normalize)
-
-
- def test_unsatisfiable_compiler(self):
- self.set_pkg_dep('mpileaks', 'mpich%gcc')
- spec = Spec('mpileaks ^mpich%intel ^callpath ^dyninst ^libelf ^libdwarf')
- self.assertRaises(spack.spec.UnsatisfiableCompilerSpecError, spec.normalize)
-
-
- def test_unsatisfiable_compiler_version(self):
- self.set_pkg_dep('mpileaks', 'mpich%gcc@4.6')
- spec = Spec('mpileaks ^mpich%gcc@4.5 ^callpath ^dyninst ^libelf ^libdwarf')
- self.assertRaises(spack.spec.UnsatisfiableCompilerSpecError, spec.normalize)
-
-
- def test_unsatisfiable_architecture(self):
- self.set_pkg_dep('mpileaks', 'mpich=bgqos_0')
- spec = Spec('mpileaks ^mpich=sles_10_ppc64 ^callpath ^dyninst ^libelf ^libdwarf')
- self.assertRaises(spack.spec.UnsatisfiableArchitectureSpecError, spec.normalize)
-
+ with pytest.raises(spack.spec.UnsatisfiableVersionSpecError):
+ spec.normalize()
+
+ def test_unsatisfiable_compiler(self, set_dependency):
+ set_dependency('mpileaks', 'mpich%gcc')
+ spec = Spec('mpileaks ^mpich%intel ^callpath ^dyninst ^libelf'
+ ' ^libdwarf')
+ with pytest.raises(spack.spec.UnsatisfiableCompilerSpecError):
+ spec.normalize()
+
+ def test_unsatisfiable_compiler_version(self, set_dependency):
+ set_dependency('mpileaks', 'mpich%gcc@4.6')
+ spec = Spec('mpileaks ^mpich%gcc@4.5 ^callpath ^dyninst ^libelf'
+ ' ^libdwarf')
+ with pytest.raises(spack.spec.UnsatisfiableCompilerSpecError):
+ spec.normalize()
+
+ def test_unsatisfiable_architecture(self, set_dependency):
+ set_dependency('mpileaks', 'mpich platform=test target=be')
+ spec = Spec('mpileaks ^mpich platform=test target=fe ^callpath'
+ ' ^dyninst ^libelf ^libdwarf')
+ with pytest.raises(spack.spec.UnsatisfiableArchitectureSpecError):
+ spec.normalize()
def test_invalid_dep(self):
spec = Spec('libelf ^mpich')
- self.assertRaises(spack.spec.InvalidDependencyException, spec.normalize)
+ with pytest.raises(spack.spec.InvalidDependencyError):
+ spec.normalize()
spec = Spec('libelf ^libdwarf')
- self.assertRaises(spack.spec.InvalidDependencyException, spec.normalize)
+ with pytest.raises(spack.spec.InvalidDependencyError):
+ spec.normalize()
spec = Spec('mpich ^dyninst ^libelf')
- self.assertRaises(spack.spec.InvalidDependencyException, spec.normalize)
-
+ with pytest.raises(spack.spec.InvalidDependencyError):
+ spec.normalize()
def test_equal(self):
# Different spec structures to test for equality
@@ -276,26 +297,26 @@ class SpecDagTest(MockPackagesTest):
# All these are equal to each other with regular ==
specs = (flat, flat_init, flip_flat, dag, flip_dag)
for lhs, rhs in zip(specs, specs):
- self.assertEqual(lhs, rhs)
- self.assertEqual(str(lhs), str(rhs))
+ assert lhs == rhs
+ assert str(lhs) == str(rhs)
# Same DAGs constructed different ways are equal
- self.assertTrue(flat.eq_dag(flat_init))
+ assert flat.eq_dag(flat_init)
# order at same level does not matter -- (dep on same parent)
- self.assertTrue(flat.eq_dag(flip_flat))
+ assert flat.eq_dag(flip_flat)
# DAGs should be unequal if nesting is different
- self.assertFalse(flat.eq_dag(dag))
- self.assertFalse(flat.eq_dag(flip_dag))
- self.assertFalse(flip_flat.eq_dag(dag))
- self.assertFalse(flip_flat.eq_dag(flip_dag))
- self.assertFalse(dag.eq_dag(flip_dag))
-
+ assert not flat.eq_dag(dag)
+ assert not flat.eq_dag(flip_dag)
+ assert not flip_flat.eq_dag(dag)
+ assert not flip_flat.eq_dag(flip_dag)
+ assert not dag.eq_dag(flip_dag)
def test_normalize_mpileaks(self):
# Spec parsed in from a string
- spec = Spec('mpileaks ^mpich ^callpath ^dyninst ^libelf@1.8.11 ^libdwarf')
+ spec = Spec('mpileaks ^mpich ^callpath ^dyninst ^libelf@1.8.11'
+ ' ^libdwarf')
# What that spec should look like after parsing
expected_flat = Spec(
@@ -331,33 +352,33 @@ class SpecDagTest(MockPackagesTest):
# All specs here should be equal under regular equality
specs = (spec, expected_flat, expected_normalized, non_unique_nodes)
for lhs, rhs in zip(specs, specs):
- self.assertEqual(lhs, rhs)
- self.assertEqual(str(lhs), str(rhs))
+ assert lhs == rhs
+ assert str(lhs) == str(rhs)
# Test that equal and equal_dag are doing the right thing
- self.assertEqual(spec, expected_flat)
- self.assertTrue(spec.eq_dag(expected_flat))
+ assert spec == expected_flat
+ assert spec.eq_dag(expected_flat)
# Normalized has different DAG structure, so NOT equal.
- self.assertNotEqual(spec, expected_normalized)
- self.assertFalse(spec.eq_dag(expected_normalized))
+ assert spec != expected_normalized
+ assert not spec.eq_dag(expected_normalized)
# Again, different DAG structure so not equal.
- self.assertNotEqual(spec, non_unique_nodes)
- self.assertFalse(spec.eq_dag(non_unique_nodes))
+ assert spec != non_unique_nodes
+ assert not spec.eq_dag(non_unique_nodes)
spec.normalize()
# After normalizing, spec_dag_equal should match the normalized spec.
- self.assertNotEqual(spec, expected_flat)
- self.assertFalse(spec.eq_dag(expected_flat))
+ assert spec != expected_flat
+ assert not spec.eq_dag(expected_flat)
- self.assertEqual(spec, expected_normalized)
- self.assertTrue(spec.eq_dag(expected_normalized))
-
- self.assertEqual(spec, non_unique_nodes)
- self.assertFalse(spec.eq_dag(non_unique_nodes))
+ # verify DAG structure without deptypes.
+ assert spec.eq_dag(expected_normalized, deptypes=False)
+ assert not spec.eq_dag(non_unique_nodes, deptypes=False)
+ assert not spec.eq_dag(expected_normalized, deptypes=True)
+ assert not spec.eq_dag(non_unique_nodes, deptypes=True)
def test_normalize_with_virtual_package(self):
spec = Spec('mpileaks ^mpi ^libelf@1.8.11 ^libdwarf')
@@ -372,68 +393,300 @@ class SpecDagTest(MockPackagesTest):
Spec('libelf@1.8.11')),
Spec('mpi')), Spec('mpi'))
- self.assertEqual(str(spec), str(expected_normalized))
-
+ assert str(spec) == str(expected_normalized)
def test_contains(self):
spec = Spec('mpileaks ^mpi ^libelf@1.8.11 ^libdwarf')
- self.assertTrue(Spec('mpi') in spec)
- self.assertTrue(Spec('libelf') in spec)
- self.assertTrue(Spec('libelf@1.8.11') in spec)
- self.assertFalse(Spec('libelf@1.8.12') in spec)
- self.assertTrue(Spec('libdwarf') in spec)
- self.assertFalse(Spec('libgoblin') in spec)
- self.assertTrue(Spec('mpileaks') in spec)
-
+ assert Spec('mpi') in spec
+ assert Spec('libelf') in spec
+ assert Spec('libelf@1.8.11') in spec
+ assert Spec('libelf@1.8.12') not in spec
+ assert Spec('libdwarf') in spec
+ assert Spec('libgoblin') not in spec
+ assert Spec('mpileaks') in spec
def test_copy_simple(self):
orig = Spec('mpileaks')
copy = orig.copy()
+ check_links(copy)
- self.check_links(copy)
-
- self.assertEqual(orig, copy)
- self.assertTrue(orig.eq_dag(copy))
- self.assertEqual(orig._normal, copy._normal)
- self.assertEqual(orig._concrete, copy._concrete)
+ assert orig == copy
+ assert orig.eq_dag(copy)
+ assert orig._normal == copy._normal
+ assert orig._concrete == copy._concrete
# ensure no shared nodes bt/w orig and copy.
orig_ids = set(id(s) for s in orig.traverse())
copy_ids = set(id(s) for s in copy.traverse())
- self.assertFalse(orig_ids.intersection(copy_ids))
-
+ assert not orig_ids.intersection(copy_ids)
def test_copy_normalized(self):
orig = Spec('mpileaks')
orig.normalize()
copy = orig.copy()
+ check_links(copy)
- self.check_links(copy)
-
- self.assertEqual(orig, copy)
- self.assertTrue(orig.eq_dag(copy))
- self.assertEqual(orig._normal, copy._normal)
- self.assertEqual(orig._concrete, copy._concrete)
+ assert orig == copy
+ assert orig.eq_dag(copy)
+ assert orig._normal == copy._normal
+ assert orig._concrete == copy._concrete
# ensure no shared nodes bt/w orig and copy.
orig_ids = set(id(s) for s in orig.traverse())
copy_ids = set(id(s) for s in copy.traverse())
- self.assertFalse(orig_ids.intersection(copy_ids))
-
+ assert not orig_ids.intersection(copy_ids)
+ @pytest.mark.usefixtures('config')
def test_copy_concretized(self):
orig = Spec('mpileaks')
orig.concretize()
copy = orig.copy()
- self.check_links(copy)
+ check_links(copy)
- self.assertEqual(orig, copy)
- self.assertTrue(orig.eq_dag(copy))
- self.assertEqual(orig._normal, copy._normal)
- self.assertEqual(orig._concrete, copy._concrete)
+ assert orig == copy
+ assert orig.eq_dag(copy)
+ assert orig._normal == copy._normal
+ assert orig._concrete == copy._concrete
# ensure no shared nodes bt/w orig and copy.
orig_ids = set(id(s) for s in orig.traverse())
copy_ids = set(id(s) for s in copy.traverse())
- self.assertFalse(orig_ids.intersection(copy_ids))
+ assert not orig_ids.intersection(copy_ids)
+
+ """
+ Here is the graph with deptypes labeled (assume all packages have a 'dt'
+ prefix). Arrows are marked with the deptypes ('b' for 'build', 'l' for
+ 'link', 'r' for 'run').
+
+ use -bl-> top
+
+ top -b-> build1
+ top -bl-> link1
+ top -r-> run1
+
+ build1 -b-> build2
+ build1 -bl-> link2
+ build1 -r-> run2
+
+ link1 -bl-> link3
+
+ run1 -bl-> link5
+ run1 -r-> run3
+
+ link3 -b-> build2
+ link3 -bl-> link4
+
+ run3 -b-> build3
+ """
+
+ def test_deptype_traversal(self):
+ dag = Spec('dtuse')
+ dag.normalize()
+
+ names = ['dtuse', 'dttop', 'dtbuild1', 'dtbuild2', 'dtlink2',
+ 'dtlink1', 'dtlink3', 'dtlink4']
+
+ traversal = dag.traverse(deptype=('build', 'link'))
+ assert [x.name for x in traversal] == names
+
+ def test_deptype_traversal_with_builddeps(self):
+ dag = Spec('dttop')
+ dag.normalize()
+
+ names = ['dttop', 'dtbuild1', 'dtbuild2', 'dtlink2',
+ 'dtlink1', 'dtlink3', 'dtlink4']
+
+ traversal = dag.traverse(deptype=('build', 'link'))
+ assert [x.name for x in traversal] == names
+
+ def test_deptype_traversal_full(self):
+ dag = Spec('dttop')
+ dag.normalize()
+
+ names = ['dttop', 'dtbuild1', 'dtbuild2', 'dtlink2', 'dtrun2',
+ 'dtlink1', 'dtlink3', 'dtlink4', 'dtrun1', 'dtlink5',
+ 'dtrun3', 'dtbuild3']
+
+ traversal = dag.traverse(deptype=spack.alldeps)
+ assert [x.name for x in traversal] == names
+
+ def test_deptype_traversal_run(self):
+ dag = Spec('dttop')
+ dag.normalize()
+
+ names = ['dttop', 'dtrun1', 'dtrun3']
+
+ traversal = dag.traverse(deptype='run')
+ assert [x.name for x in traversal] == names
+
+ def test_hash_bits(self):
+ """Ensure getting first n bits of a base32-encoded DAG hash works."""
+
+ # RFC 4648 base32 decode table
+ b32 = dict((j, i) for i, j in enumerate('abcdefghijklmnopqrstuvwxyz'))
+ b32.update(dict((j, i) for i, j in enumerate('234567', 26)))
+
+ # some package hashes
+ tests = [
+ '35orsd4cenv743hg4i5vxha2lzayycby',
+ '6kfqtj7dap3773rxog6kkmoweix5gpwo',
+ 'e6h6ff3uvmjbq3azik2ckr6ckwm3depv',
+ 'snz2juf4ij7sv77cq3vs467q6acftmur',
+ '4eg47oedi5bbkhpoxw26v3oe6vamkfd7',
+ 'vrwabwj6umeb5vjw6flx2rnft3j457rw']
+
+ for test_hash in tests:
+ # string containing raw bits of hash ('1' and '0')
+ expected = ''.join([format(b32[c], '#07b').replace('0b', '')
+ for c in test_hash])
+
+ for bits in (1, 2, 3, 4, 7, 8, 9, 16, 64, 117, 128, 160):
+ actual_int = spack.spec.base32_prefix_bits(test_hash, bits)
+ fmt = "#0%sb" % (bits + 2)
+ actual = format(actual_int, fmt).replace('0b', '')
+
+ assert expected[:bits] == actual
+
+ with pytest.raises(ValueError):
+ spack.spec.base32_prefix_bits(test_hash, 161)
+
+ with pytest.raises(ValueError):
+ spack.spec.base32_prefix_bits(test_hash, 256)
+
+ def test_traversal_directions(self):
+ """Make sure child and parent traversals of specs work."""
+ # We'll use d for a diamond dependency
+ d = Spec('d')
+
+ # Mock spec.
+ spec = Spec('a',
+ Spec('b',
+ Spec('c', d),
+ Spec('e')),
+ Spec('f',
+ Spec('g', d)))
+
+ assert (
+ ['a', 'b', 'c', 'd', 'e', 'f', 'g'] ==
+ [s.name for s in spec.traverse(direction='children')])
+
+ assert (
+ ['g', 'f', 'a'] ==
+ [s.name for s in spec['g'].traverse(direction='parents')])
+
+ assert (
+ ['d', 'c', 'b', 'a', 'g', 'f'] ==
+ [s.name for s in spec['d'].traverse(direction='parents')])
+
+ def test_edge_traversals(self):
+ """Make sure child and parent traversals of specs work."""
+ # We'll use d for a diamond dependency
+ d = Spec('d')
+
+ # Mock spec.
+ spec = Spec('a',
+ Spec('b',
+ Spec('c', d),
+ Spec('e')),
+ Spec('f',
+ Spec('g', d)))
+
+ assert (
+ ['a', 'b', 'c', 'd', 'e', 'f', 'g'] ==
+ [s.name for s in spec.traverse(direction='children')])
+
+ assert (
+ ['g', 'f', 'a'] ==
+ [s.name for s in spec['g'].traverse(direction='parents')])
+
+ assert (
+ ['d', 'c', 'b', 'a', 'g', 'f'] ==
+ [s.name for s in spec['d'].traverse(direction='parents')])
+
+ def test_copy_dependencies(self):
+ s1 = Spec('mpileaks ^mpich2@1.1')
+ s2 = s1.copy()
+
+ assert '^mpich2@1.1' in s2
+ assert '^mpich2' in s2
+
+ def test_construct_spec_with_deptypes(self):
+ s = Spec('a',
+ Spec('b',
+ ['build'], Spec('c')),
+ Spec('d',
+ ['build', 'link'], Spec('e',
+ ['run'], Spec('f'))))
+
+ assert s['b']._dependencies['c'].deptypes == ('build',)
+ assert s['d']._dependencies['e'].deptypes == ('build', 'link')
+ assert s['e']._dependencies['f'].deptypes == ('run',)
+
+ assert s['b']._dependencies['c'].deptypes == ('build',)
+ assert s['d']._dependencies['e'].deptypes == ('build', 'link')
+ assert s['e']._dependencies['f'].deptypes == ('run',)
+
+ assert s['c']._dependents['b'].deptypes == ('build',)
+ assert s['e']._dependents['d'].deptypes == ('build', 'link')
+ assert s['f']._dependents['e'].deptypes == ('run',)
+
+ assert s['c']._dependents['b'].deptypes == ('build',)
+ assert s['e']._dependents['d'].deptypes == ('build', 'link')
+ assert s['f']._dependents['e'].deptypes == ('run',)
+
+ def check_diamond_deptypes(self, spec):
+ """Validate deptypes in dt-diamond spec."""
+ assert spec['dt-diamond']._dependencies[
+ 'dt-diamond-left'].deptypes == ('build', 'link')
+
+ assert spec['dt-diamond']._dependencies[
+ 'dt-diamond-right'].deptypes == ('build', 'link')
+
+ assert spec['dt-diamond-left']._dependencies[
+ 'dt-diamond-bottom'].deptypes == ('build',)
+
+ assert spec['dt-diamond-right'] ._dependencies[
+ 'dt-diamond-bottom'].deptypes == ('build', 'link', 'run')
+
+ def check_diamond_normalized_dag(self, spec):
+ bottom = Spec('dt-diamond-bottom')
+ dag = Spec('dt-diamond',
+ ['build', 'link'], Spec('dt-diamond-left',
+ ['build'], bottom),
+ ['build', 'link'], Spec('dt-diamond-right',
+ ['build', 'link', 'run'], bottom))
+ assert spec.eq_dag(dag)
+
+ def test_normalize_diamond_deptypes(self):
+ """Ensure that dependency types are preserved even if the same thing is
+ depended on in two different ways."""
+ s = Spec('dt-diamond')
+ s.normalize()
+
+ self.check_diamond_deptypes(s)
+ self.check_diamond_normalized_dag(s)
+
+ def test_concretize_deptypes(self):
+ """Ensure that dependency types are preserved after concretization."""
+ s = Spec('dt-diamond')
+ s.concretize()
+ self.check_diamond_deptypes(s)
+
+ def test_copy_deptypes(self):
+ """Ensure that dependency types are preserved by spec copy."""
+ s1 = Spec('dt-diamond')
+ s1.normalize()
+ self.check_diamond_deptypes(s1)
+ self.check_diamond_normalized_dag(s1)
+
+ s2 = s1.copy()
+ self.check_diamond_normalized_dag(s2)
+ self.check_diamond_deptypes(s2)
+
+ s3 = Spec('dt-diamond')
+ s3.concretize()
+ self.check_diamond_deptypes(s3)
+
+ s4 = s3.copy()
+ self.check_diamond_deptypes(s4)
diff --git a/lib/spack/spack/test/spec_semantics.py b/lib/spack/spack/test/spec_semantics.py
index 60eb86d652..84c8650f15 100644
--- a/lib/spack/spack/test/spec_semantics.py
+++ b/lib/spack/spack/test/spec_semantics.py
@@ -22,251 +22,345 @@
# License along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
+import spack.architecture
+import pytest
from spack.spec import *
-from spack.test.mock_packages_test import *
-class SpecSematicsTest(MockPackagesTest):
- """This tests satisfies(), constrain() and other semantic operations
- on specs."""
-
- # ================================================================================
- # Utility functions to set everything up.
- # ================================================================================
- def check_satisfies(self, spec, anon_spec, concrete=False):
- left = Spec(spec, concrete=concrete)
- try:
- right = Spec(anon_spec) # if it's not anonymous, allow it.
- except:
- right = parse_anonymous_spec(anon_spec, left.name)
-
- # Satisfies is one-directional.
- self.assertTrue(left.satisfies(right))
- self.assertTrue(left.satisfies(anon_spec))
-
- # if left satisfies right, then we should be able to consrain
- # right by left. Reverse is not always true.
- right.copy().constrain(left)
+def check_satisfies(spec, anon_spec, concrete=False):
+ left = Spec(spec, concrete=concrete)
+ try:
+ right = Spec(anon_spec) # if it's not anonymous, allow it.
+ except Exception:
+ right = parse_anonymous_spec(anon_spec, left.name)
- def check_unsatisfiable(self, spec, anon_spec, concrete=False):
- left = Spec(spec, concrete=concrete)
- try:
- right = Spec(anon_spec) # if it's not anonymous, allow it.
- except:
- right = parse_anonymous_spec(anon_spec, left.name)
+ # Satisfies is one-directional.
+ assert left.satisfies(right)
+ assert left.satisfies(anon_spec)
- self.assertFalse(left.satisfies(right))
- self.assertFalse(left.satisfies(anon_spec))
+ # if left satisfies right, then we should be able to consrain
+ # right by left. Reverse is not always true.
+ right.copy().constrain(left)
- self.assertRaises(UnsatisfiableSpecError, right.copy().constrain, left)
+def check_unsatisfiable(spec, anon_spec, concrete=False):
+ left = Spec(spec, concrete=concrete)
+ try:
+ right = Spec(anon_spec) # if it's not anonymous, allow it.
+ except Exception:
+ right = parse_anonymous_spec(anon_spec, left.name)
- def check_constrain(self, expected, spec, constraint):
- exp = Spec(expected)
- spec = Spec(spec)
- constraint = Spec(constraint)
- spec.constrain(constraint)
- self.assertEqual(exp, spec)
+ assert not left.satisfies(right)
+ assert not left.satisfies(anon_spec)
+ with pytest.raises(UnsatisfiableSpecError):
+ right.copy().constrain(left)
- def check_constrain_changed(self, spec, constraint):
- spec = Spec(spec)
- self.assertTrue(spec.constrain(constraint))
+def check_constrain(expected, spec, constraint):
+ exp = Spec(expected)
+ spec = Spec(spec)
+ constraint = Spec(constraint)
+ spec.constrain(constraint)
+ assert exp == spec
- def check_constrain_not_changed(self, spec, constraint):
- spec = Spec(spec)
- self.assertFalse(spec.constrain(constraint))
+def check_constrain_changed(spec, constraint):
+ spec = Spec(spec)
+ assert spec.constrain(constraint)
- def check_invalid_constraint(self, spec, constraint):
- spec = Spec(spec)
- constraint = Spec(constraint)
- self.assertRaises(UnsatisfiableSpecError, spec.constrain, constraint)
+def check_constrain_not_changed(spec, constraint):
+ spec = Spec(spec)
+ assert not spec.constrain(constraint)
- # ================================================================================
- # Satisfiability
- # ================================================================================
- def test_satisfies(self):
- self.check_satisfies('libelf@0.8.13', '@0:1')
- self.check_satisfies('libdwarf^libelf@0.8.13', '^libelf@0:1')
+def check_invalid_constraint(spec, constraint):
+ spec = Spec(spec)
+ constraint = Spec(constraint)
+ with pytest.raises(UnsatisfiableSpecError):
+ spec.constrain(constraint)
- def test_satisfies_namespace(self):
- self.check_satisfies('builtin.mpich', 'mpich')
- self.check_satisfies('builtin.mock.mpich', 'mpich')
- # TODO: only works for deps now, but shouldn't we allow this for root spec?
- # self.check_satisfies('builtin.mock.mpich', 'mpi')
+@pytest.mark.usefixtures('config', 'builtin_mock')
+class TestSpecSematics(object):
+ """This tests satisfies(), constrain() and other semantic operations
+ on specs.
+ """
+ def test_satisfies(self):
+ check_satisfies('libelf@0.8.13', '@0:1')
+ check_satisfies('libdwarf^libelf@0.8.13', '^libelf@0:1')
+
+ def test_satisfies_namespace(self):
+ check_satisfies('builtin.mpich', 'mpich')
+ check_satisfies('builtin.mock.mpich', 'mpich')
- self.check_satisfies('builtin.mock.mpich', 'builtin.mock.mpich')
+ # TODO: only works for deps now, but shouldn't we allow for root spec?
+ # check_satisfies('builtin.mock.mpich', 'mpi')
- self.check_unsatisfiable('builtin.mock.mpich', 'builtin.mpich')
+ check_satisfies('builtin.mock.mpich', 'builtin.mock.mpich')
+ check_unsatisfiable('builtin.mock.mpich', 'builtin.mpich')
def test_satisfies_namespaced_dep(self):
- """Ensure spec from same or unspecified namespace satisfies namespace constraint."""
- self.check_satisfies('mpileaks ^builtin.mock.mpich', '^mpich')
-
- self.check_satisfies('mpileaks ^builtin.mock.mpich', '^mpi')
- self.check_satisfies('mpileaks ^builtin.mock.mpich', '^builtin.mock.mpich')
+ """Ensure spec from same or unspecified namespace satisfies namespace
+ constraint."""
+ check_satisfies('mpileaks ^builtin.mock.mpich', '^mpich')
- self.check_unsatisfiable('mpileaks ^builtin.mock.mpich', '^builtin.mpich')
+ check_satisfies('mpileaks ^builtin.mock.mpich', '^mpi')
+ check_satisfies(
+ 'mpileaks ^builtin.mock.mpich', '^builtin.mock.mpich')
+ check_unsatisfiable(
+ 'mpileaks ^builtin.mock.mpich', '^builtin.mpich')
def test_satisfies_compiler(self):
- self.check_satisfies('foo%gcc', '%gcc')
- self.check_satisfies('foo%intel', '%intel')
- self.check_unsatisfiable('foo%intel', '%gcc')
- self.check_unsatisfiable('foo%intel', '%pgi')
-
+ check_satisfies('foo%gcc', '%gcc')
+ check_satisfies('foo%intel', '%intel')
+ check_unsatisfiable('foo%intel', '%gcc')
+ check_unsatisfiable('foo%intel', '%pgi')
def test_satisfies_compiler_version(self):
- self.check_satisfies('foo%gcc', '%gcc@4.7.2')
- self.check_satisfies('foo%intel', '%intel@4.7.2')
+ check_satisfies('foo%gcc', '%gcc@4.7.2')
+ check_satisfies('foo%intel', '%intel@4.7.2')
- self.check_satisfies('foo%pgi@4.5', '%pgi@4.4:4.6')
- self.check_satisfies('foo@2.0%pgi@4.5', '@1:3%pgi@4.4:4.6')
+ check_satisfies('foo%pgi@4.5', '%pgi@4.4:4.6')
+ check_satisfies('foo@2.0%pgi@4.5', '@1:3%pgi@4.4:4.6')
- self.check_unsatisfiable('foo%pgi@4.3', '%pgi@4.4:4.6')
- self.check_unsatisfiable('foo@4.0%pgi', '@1:3%pgi')
- self.check_unsatisfiable('foo@4.0%pgi@4.5', '@1:3%pgi@4.4:4.6')
-
- self.check_satisfies('foo %gcc@4.7.3', '%gcc@4.7')
- self.check_unsatisfiable('foo %gcc@4.7', '%gcc@4.7.3')
+ check_unsatisfiable('foo%pgi@4.3', '%pgi@4.4:4.6')
+ check_unsatisfiable('foo@4.0%pgi', '@1:3%pgi')
+ check_unsatisfiable('foo@4.0%pgi@4.5', '@1:3%pgi@4.4:4.6')
+ check_satisfies('foo %gcc@4.7.3', '%gcc@4.7')
+ check_unsatisfiable('foo %gcc@4.7', '%gcc@4.7.3')
def test_satisfies_architecture(self):
- self.check_satisfies('foo=chaos_5_x86_64_ib', '=chaos_5_x86_64_ib')
- self.check_satisfies('foo=bgqos_0', '=bgqos_0')
-
- self.check_unsatisfiable('foo=bgqos_0', '=chaos_5_x86_64_ib')
- self.check_unsatisfiable('foo=chaos_5_x86_64_ib', '=bgqos_0')
-
+ check_satisfies(
+ 'foo platform=test',
+ 'platform=test')
+ check_satisfies(
+ 'foo platform=linux',
+ 'platform=linux')
+ check_satisfies(
+ 'foo platform=test',
+ 'platform=test target=frontend')
+ check_satisfies(
+ 'foo platform=test',
+ 'platform=test os=frontend target=frontend')
+ check_satisfies(
+ 'foo platform=test os=frontend target=frontend',
+ 'platform=test')
+
+ check_unsatisfiable(
+ 'foo platform=linux',
+ 'platform=test os=redhat6 target=x86_32')
+ check_unsatisfiable(
+ 'foo os=redhat6',
+ 'platform=test os=debian6 target=x86_64')
+ check_unsatisfiable(
+ 'foo target=x86_64',
+ 'platform=test os=redhat6 target=x86_32')
+
+ check_satisfies(
+ 'foo arch=test-None-None',
+ 'platform=test')
+ check_satisfies(
+ 'foo arch=test-None-frontend',
+ 'platform=test target=frontend')
+ check_satisfies(
+ 'foo arch=test-frontend-frontend',
+ 'platform=test os=frontend target=frontend')
+ check_satisfies(
+ 'foo arch=test-frontend-frontend',
+ 'platform=test')
+ check_unsatisfiable(
+ 'foo arch=test-frontend-frontend',
+ 'platform=test os=frontend target=backend')
+
+ check_satisfies(
+ 'foo platform=test target=frontend os=frontend',
+ 'platform=test target=frontend os=frontend')
+ check_satisfies(
+ 'foo platform=test target=backend os=backend',
+ 'platform=test target=backend os=backend')
+ check_satisfies(
+ 'foo platform=test target=default_target os=default_os',
+ 'platform=test os=default_os')
+ check_unsatisfiable(
+ 'foo platform=test target=x86_32 os=redhat6',
+ 'platform=linux target=x86_32 os=redhat6')
def test_satisfies_dependencies(self):
- self.check_satisfies('mpileaks^mpich', '^mpich')
- self.check_satisfies('mpileaks^zmpi', '^zmpi')
-
- self.check_unsatisfiable('mpileaks^mpich', '^zmpi')
- self.check_unsatisfiable('mpileaks^zmpi', '^mpich')
+ check_satisfies('mpileaks^mpich', '^mpich')
+ check_satisfies('mpileaks^zmpi', '^zmpi')
+ check_unsatisfiable('mpileaks^mpich', '^zmpi')
+ check_unsatisfiable('mpileaks^zmpi', '^mpich')
def test_satisfies_dependency_versions(self):
- self.check_satisfies('mpileaks^mpich@2.0', '^mpich@1:3')
- self.check_unsatisfiable('mpileaks^mpich@1.2', '^mpich@2.0')
-
- self.check_satisfies('mpileaks^mpich@2.0^callpath@1.5', '^mpich@1:3^callpath@1.4:1.6')
- self.check_unsatisfiable('mpileaks^mpich@4.0^callpath@1.5', '^mpich@1:3^callpath@1.4:1.6')
- self.check_unsatisfiable('mpileaks^mpich@2.0^callpath@1.7', '^mpich@1:3^callpath@1.4:1.6')
- self.check_unsatisfiable('mpileaks^mpich@4.0^callpath@1.7', '^mpich@1:3^callpath@1.4:1.6')
-
+ check_satisfies('mpileaks^mpich@2.0', '^mpich@1:3')
+ check_unsatisfiable('mpileaks^mpich@1.2', '^mpich@2.0')
+
+ check_satisfies(
+ 'mpileaks^mpich@2.0^callpath@1.5', '^mpich@1:3^callpath@1.4:1.6')
+ check_unsatisfiable(
+ 'mpileaks^mpich@4.0^callpath@1.5', '^mpich@1:3^callpath@1.4:1.6')
+ check_unsatisfiable(
+ 'mpileaks^mpich@2.0^callpath@1.7', '^mpich@1:3^callpath@1.4:1.6')
+ check_unsatisfiable(
+ 'mpileaks^mpich@4.0^callpath@1.7', '^mpich@1:3^callpath@1.4:1.6')
def test_satisfies_virtual_dependencies(self):
- self.check_satisfies('mpileaks^mpi', '^mpi')
- self.check_satisfies('mpileaks^mpi', '^mpich')
-
- self.check_satisfies('mpileaks^mpi', '^zmpi')
- self.check_unsatisfiable('mpileaks^mpich', '^zmpi')
+ check_satisfies('mpileaks^mpi', '^mpi')
+ check_satisfies('mpileaks^mpi', '^mpich')
+ check_satisfies('mpileaks^mpi', '^zmpi')
+ check_unsatisfiable('mpileaks^mpich', '^zmpi')
def test_satisfies_virtual_dependency_versions(self):
- self.check_satisfies('mpileaks^mpi@1.5', '^mpi@1.2:1.6')
- self.check_unsatisfiable('mpileaks^mpi@3', '^mpi@1.2:1.6')
+ check_satisfies('mpileaks^mpi@1.5', '^mpi@1.2:1.6')
+ check_unsatisfiable('mpileaks^mpi@3', '^mpi@1.2:1.6')
- self.check_satisfies('mpileaks^mpi@2:', '^mpich')
- self.check_satisfies('mpileaks^mpi@2:', '^mpich@3.0.4')
- self.check_satisfies('mpileaks^mpi@2:', '^mpich2@1.4')
+ check_satisfies('mpileaks^mpi@2:', '^mpich')
+ check_satisfies('mpileaks^mpi@2:', '^mpich@3.0.4')
+ check_satisfies('mpileaks^mpi@2:', '^mpich2@1.4')
- self.check_satisfies('mpileaks^mpi@1:', '^mpich2')
- self.check_satisfies('mpileaks^mpi@2:', '^mpich2')
-
- self.check_unsatisfiable('mpileaks^mpi@3:', '^mpich2@1.4')
- self.check_unsatisfiable('mpileaks^mpi@3:', '^mpich2')
- self.check_unsatisfiable('mpileaks^mpi@3:', '^mpich@1.0')
+ check_satisfies('mpileaks^mpi@1:', '^mpich2')
+ check_satisfies('mpileaks^mpi@2:', '^mpich2')
+ check_unsatisfiable('mpileaks^mpi@3:', '^mpich2@1.4')
+ check_unsatisfiable('mpileaks^mpi@3:', '^mpich2')
+ check_unsatisfiable('mpileaks^mpi@3:', '^mpich@1.0')
def test_satisfies_matching_variant(self):
- self.check_satisfies('mpich+foo', 'mpich+foo')
- self.check_satisfies('mpich~foo', 'mpich~foo')
+ check_satisfies('mpich+foo', 'mpich+foo')
+ check_satisfies('mpich~foo', 'mpich~foo')
+ check_satisfies('mpich foo=1', 'mpich foo=1')
+ # confirm that synonymous syntax works correctly
+ check_satisfies('mpich+foo', 'mpich foo=True')
+ check_satisfies('mpich foo=true', 'mpich+foo')
+ check_satisfies('mpich~foo', 'mpich foo=FALSE')
+ check_satisfies('mpich foo=False', 'mpich~foo')
def test_satisfies_unconstrained_variant(self):
# only asked for mpich, no constraints. Either will do.
- self.check_satisfies('mpich+foo', 'mpich')
- self.check_satisfies('mpich~foo', 'mpich')
-
+ check_satisfies('mpich+foo', 'mpich')
+ check_satisfies('mpich~foo', 'mpich')
+ check_satisfies('mpich foo=1', 'mpich')
def test_unsatisfiable_variants(self):
# This case is different depending on whether the specs are concrete.
# 'mpich' is not concrete:
- self.check_satisfies('mpich', 'mpich+foo', False)
- self.check_satisfies('mpich', 'mpich~foo', False)
+ check_satisfies('mpich', 'mpich+foo', False)
+ check_satisfies('mpich', 'mpich~foo', False)
+ check_satisfies('mpich', 'mpich foo=1', False)
# 'mpich' is concrete:
- self.check_unsatisfiable('mpich', 'mpich+foo', True)
- self.check_unsatisfiable('mpich', 'mpich~foo', True)
-
+ check_unsatisfiable('mpich', 'mpich+foo', True)
+ check_unsatisfiable('mpich', 'mpich~foo', True)
+ check_unsatisfiable('mpich', 'mpich foo=1', True)
def test_unsatisfiable_variant_mismatch(self):
# No matchi in specs
- self.check_unsatisfiable('mpich~foo', 'mpich+foo')
- self.check_unsatisfiable('mpich+foo', 'mpich~foo')
+ check_unsatisfiable('mpich~foo', 'mpich+foo')
+ check_unsatisfiable('mpich+foo', 'mpich~foo')
+ check_unsatisfiable('mpich foo=1', 'mpich foo=2')
+ def test_satisfies_matching_compiler_flag(self):
+ check_satisfies('mpich cppflags="-O3"', 'mpich cppflags="-O3"')
+ check_satisfies(
+ 'mpich cppflags="-O3 -Wall"', 'mpich cppflags="-O3 -Wall"'
+ )
+
+ def test_satisfies_unconstrained_compiler_flag(self):
+ # only asked for mpich, no constraints. Any will do.
+ check_satisfies('mpich cppflags="-O3"', 'mpich')
+
+ def test_unsatisfiable_compiler_flag(self):
+ # This case is different depending on whether the specs are concrete.
+
+ # 'mpich' is not concrete:
+ check_satisfies('mpich', 'mpich cppflags="-O3"', False)
+
+ # 'mpich' is concrete:
+ check_unsatisfiable('mpich', 'mpich cppflags="-O3"', True)
+
+ def test_unsatisfiable_compiler_flag_mismatch(self):
+ # No matchi in specs
+ check_unsatisfiable(
+ 'mpich cppflags="-O3"', 'mpich cppflags="-O2"')
def test_satisfies_virtual(self):
# Don't use check_satisfies: it checks constrain() too, and
# you can't constrain a non-virtual by a virtual.
- self.assertTrue(Spec('mpich').satisfies(Spec('mpi')))
- self.assertTrue(Spec('mpich2').satisfies(Spec('mpi')))
- self.assertTrue(Spec('zmpi').satisfies(Spec('mpi')))
-
+ assert Spec('mpich').satisfies(Spec('mpi'))
+ assert Spec('mpich2').satisfies(Spec('mpi'))
+ assert Spec('zmpi').satisfies(Spec('mpi'))
def test_satisfies_virtual_dep_with_virtual_constraint(self):
"""Ensure we can satisfy virtual constraints when there are multiple
vdep providers in the specs."""
- self.assertTrue(Spec('netlib-lapack ^openblas').satisfies('netlib-lapack ^openblas'))
- self.assertFalse(Spec('netlib-lapack ^netlib-blas').satisfies('netlib-lapack ^openblas'))
-
- self.assertFalse(Spec('netlib-lapack ^openblas').satisfies('netlib-lapack ^netlib-blas'))
- self.assertTrue(Spec('netlib-lapack ^netlib-blas').satisfies('netlib-lapack ^netlib-blas'))
-
-
- # ================================================================================
+ assert Spec('netlib-lapack ^openblas').satisfies(
+ 'netlib-lapack ^openblas'
+ )
+ assert not Spec('netlib-lapack ^netlib-blas').satisfies(
+ 'netlib-lapack ^openblas'
+ )
+ assert not Spec('netlib-lapack ^openblas').satisfies(
+ 'netlib-lapack ^netlib-blas'
+ )
+ assert Spec('netlib-lapack ^netlib-blas').satisfies(
+ 'netlib-lapack ^netlib-blas'
+ )
+
+ def test_satisfies_same_spec_with_different_hash(self):
+ """Ensure that concrete specs are matched *exactly* by hash."""
+ s1 = Spec('mpileaks').concretized()
+ s2 = s1.copy()
+
+ assert s1.satisfies(s2)
+ assert s2.satisfies(s1)
+
+ # Simulate specs that were installed before and after a change to
+ # Spack's hashing algorithm. This just reverses s2's hash.
+ s2._hash = s1.dag_hash()[-1::-1]
+
+ assert not s1.satisfies(s2)
+ assert not s2.satisfies(s1)
+
+ # ========================================================================
# Indexing specs
- # ================================================================================
+ # ========================================================================
def test_self_index(self):
s = Spec('callpath')
- self.assertTrue(s['callpath'] == s)
-
+ assert s['callpath'] == s
def test_dep_index(self):
s = Spec('callpath')
s.normalize()
- self.assertTrue(s['callpath'] == s)
- self.assertTrue(type(s['dyninst']) == Spec)
- self.assertTrue(type(s['libdwarf']) == Spec)
- self.assertTrue(type(s['libelf']) == Spec)
- self.assertTrue(type(s['mpi']) == Spec)
-
- self.assertTrue(s['dyninst'].name == 'dyninst')
- self.assertTrue(s['libdwarf'].name == 'libdwarf')
- self.assertTrue(s['libelf'].name == 'libelf')
- self.assertTrue(s['mpi'].name == 'mpi')
+ assert s['callpath'] == s
+ assert type(s['dyninst']) == Spec
+ assert type(s['libdwarf']) == Spec
+ assert type(s['libelf']) == Spec
+ assert type(s['mpi']) == Spec
+ assert s['dyninst'].name == 'dyninst'
+ assert s['libdwarf'].name == 'libdwarf'
+ assert s['libelf'].name == 'libelf'
+ assert s['mpi'].name == 'mpi'
def test_spec_contains_deps(self):
s = Spec('callpath')
s.normalize()
- self.assertTrue('dyninst' in s)
- self.assertTrue('libdwarf' in s)
- self.assertTrue('libelf' in s)
- self.assertTrue('mpi' in s)
-
+ assert 'dyninst' in s
+ assert 'libdwarf' in s
+ assert 'libelf' in s
+ assert 'mpi' in s
+ @pytest.mark.usefixtures('config')
def test_virtual_index(self):
s = Spec('callpath')
s.concretize()
@@ -280,89 +374,149 @@ class SpecSematicsTest(MockPackagesTest):
s_zmpi = Spec('callpath ^zmpi')
s_zmpi.concretize()
-
- self.assertTrue(s['mpi'].name != 'mpi')
- self.assertTrue(s_mpich['mpi'].name == 'mpich')
- self.assertTrue(s_mpich2['mpi'].name == 'mpich2')
- self.assertTrue(s_zmpi['zmpi'].name == 'zmpi')
+ assert s['mpi'].name != 'mpi'
+ assert s_mpich['mpi'].name == 'mpich'
+ assert s_mpich2['mpi'].name == 'mpich2'
+ assert s_zmpi['zmpi'].name == 'zmpi'
for spec in [s, s_mpich, s_mpich2, s_zmpi]:
- self.assertTrue('mpi' in spec)
-
+ assert 'mpi' in spec
- # ================================================================================
+ # ========================================================================
# Constraints
- # ================================================================================
+ # ========================================================================
def test_constrain_variants(self):
- self.check_constrain('libelf@2.1:2.5', 'libelf@0:2.5', 'libelf@2.1:3')
- self.check_constrain('libelf@2.1:2.5%gcc@4.5:4.6',
- 'libelf@0:2.5%gcc@2:4.6', 'libelf@2.1:3%gcc@4.5:4.7')
-
- self.check_constrain('libelf+debug+foo', 'libelf+debug', 'libelf+foo')
- self.check_constrain('libelf+debug+foo', 'libelf+debug', 'libelf+debug+foo')
-
- self.check_constrain('libelf+debug~foo', 'libelf+debug', 'libelf~foo')
- self.check_constrain('libelf+debug~foo', 'libelf+debug', 'libelf+debug~foo')
-
-
- def test_constrain_arch(self):
- self.check_constrain('libelf=bgqos_0', 'libelf=bgqos_0', 'libelf=bgqos_0')
- self.check_constrain('libelf=bgqos_0', 'libelf', 'libelf=bgqos_0')
-
+ check_constrain('libelf@2.1:2.5', 'libelf@0:2.5', 'libelf@2.1:3')
+ check_constrain(
+ 'libelf@2.1:2.5%gcc@4.5:4.6',
+ 'libelf@0:2.5%gcc@2:4.6',
+ 'libelf@2.1:3%gcc@4.5:4.7'
+ )
+ check_constrain('libelf+debug+foo', 'libelf+debug', 'libelf+foo')
+ check_constrain(
+ 'libelf+debug+foo', 'libelf+debug', 'libelf+debug+foo'
+ )
+ check_constrain(
+ 'libelf debug=2 foo=1', 'libelf debug=2', 'libelf foo=1'
+ )
+ check_constrain(
+ 'libelf debug=2 foo=1', 'libelf debug=2', 'libelf debug=2 foo=1'
+ )
+
+ check_constrain('libelf+debug~foo', 'libelf+debug', 'libelf~foo')
+ check_constrain(
+ 'libelf+debug~foo', 'libelf+debug', 'libelf+debug~foo'
+ )
+
+ def test_constrain_compiler_flags(self):
+ check_constrain(
+ 'libelf cflags="-O3" cppflags="-Wall"',
+ 'libelf cflags="-O3"',
+ 'libelf cppflags="-Wall"'
+ )
+ check_constrain(
+ 'libelf cflags="-O3" cppflags="-Wall"',
+ 'libelf cflags="-O3"',
+ 'libelf cflags="-O3" cppflags="-Wall"'
+ )
+
+ def test_constrain_architecture(self):
+ check_constrain(
+ 'libelf target=default_target os=default_os',
+ 'libelf target=default_target os=default_os',
+ 'libelf target=default_target os=default_os'
+ )
+ check_constrain(
+ 'libelf target=default_target os=default_os',
+ 'libelf',
+ 'libelf target=default_target os=default_os'
+ )
def test_constrain_compiler(self):
- self.check_constrain('libelf=bgqos_0', 'libelf=bgqos_0', 'libelf=bgqos_0')
- self.check_constrain('libelf=bgqos_0', 'libelf', 'libelf=bgqos_0')
-
+ check_constrain(
+ 'libelf %gcc@4.4.7', 'libelf %gcc@4.4.7', 'libelf %gcc@4.4.7'
+ )
+ check_constrain(
+ 'libelf %gcc@4.4.7', 'libelf', 'libelf %gcc@4.4.7'
+ )
def test_invalid_constraint(self):
- self.check_invalid_constraint('libelf@0:2.0', 'libelf@2.1:3')
- self.check_invalid_constraint('libelf@0:2.5%gcc@4.8:4.9', 'libelf@2.1:3%gcc@4.5:4.7')
-
- self.check_invalid_constraint('libelf+debug', 'libelf~debug')
- self.check_invalid_constraint('libelf+debug~foo', 'libelf+debug+foo')
+ check_invalid_constraint('libelf@0:2.0', 'libelf@2.1:3')
+ check_invalid_constraint(
+ 'libelf@0:2.5%gcc@4.8:4.9', 'libelf@2.1:3%gcc@4.5:4.7')
- self.check_invalid_constraint('libelf=bgqos_0', 'libelf=x86_54')
+ check_invalid_constraint('libelf+debug', 'libelf~debug')
+ check_invalid_constraint('libelf+debug~foo', 'libelf+debug+foo')
+ check_invalid_constraint('libelf debug=2', 'libelf debug=1')
+ check_invalid_constraint(
+ 'libelf cppflags="-O3"', 'libelf cppflags="-O2"')
+ check_invalid_constraint(
+ 'libelf platform=test target=be os=be', 'libelf target=fe os=fe'
+ )
def test_constrain_changed(self):
- self.check_constrain_changed('libelf', '@1.0')
- self.check_constrain_changed('libelf', '@1.0:5.0')
- self.check_constrain_changed('libelf', '%gcc')
- self.check_constrain_changed('libelf%gcc', '%gcc@4.5')
- self.check_constrain_changed('libelf', '+debug')
- self.check_constrain_changed('libelf', '~debug')
- self.check_constrain_changed('libelf', '=bgqos_0')
-
+ check_constrain_changed('libelf', '@1.0')
+ check_constrain_changed('libelf', '@1.0:5.0')
+ check_constrain_changed('libelf', '%gcc')
+ check_constrain_changed('libelf%gcc', '%gcc@4.5')
+ check_constrain_changed('libelf', '+debug')
+ check_constrain_changed('libelf', '~debug')
+ check_constrain_changed('libelf', 'debug=2')
+ check_constrain_changed('libelf', 'cppflags="-O3"')
+
+ platform = spack.architecture.platform()
+ check_constrain_changed(
+ 'libelf', 'target=' + platform.target('default_target').name)
+ check_constrain_changed(
+ 'libelf', 'os=' + platform.operating_system('default_os').name)
def test_constrain_not_changed(self):
- self.check_constrain_not_changed('libelf', 'libelf')
- self.check_constrain_not_changed('libelf@1.0', '@1.0')
- self.check_constrain_not_changed('libelf@1.0:5.0', '@1.0:5.0')
- self.check_constrain_not_changed('libelf%gcc', '%gcc')
- self.check_constrain_not_changed('libelf%gcc@4.5', '%gcc@4.5')
- self.check_constrain_not_changed('libelf+debug', '+debug')
- self.check_constrain_not_changed('libelf~debug', '~debug')
- self.check_constrain_not_changed('libelf=bgqos_0', '=bgqos_0')
- self.check_constrain_not_changed('libelf^foo', 'libelf^foo')
- self.check_constrain_not_changed('libelf^foo^bar', 'libelf^foo^bar')
-
+ check_constrain_not_changed('libelf', 'libelf')
+ check_constrain_not_changed('libelf@1.0', '@1.0')
+ check_constrain_not_changed('libelf@1.0:5.0', '@1.0:5.0')
+ check_constrain_not_changed('libelf%gcc', '%gcc')
+ check_constrain_not_changed('libelf%gcc@4.5', '%gcc@4.5')
+ check_constrain_not_changed('libelf+debug', '+debug')
+ check_constrain_not_changed('libelf~debug', '~debug')
+ check_constrain_not_changed('libelf debug=2', 'debug=2')
+ check_constrain_not_changed(
+ 'libelf cppflags="-O3"', 'cppflags="-O3"')
+
+ platform = spack.architecture.platform()
+ default_target = platform.target('default_target').name
+ check_constrain_not_changed(
+ 'libelf target=' + default_target, 'target=' + default_target)
def test_constrain_dependency_changed(self):
- self.check_constrain_changed('libelf^foo', 'libelf^foo@1.0')
- self.check_constrain_changed('libelf^foo', 'libelf^foo@1.0:5.0')
- self.check_constrain_changed('libelf^foo', 'libelf^foo%gcc')
- self.check_constrain_changed('libelf^foo%gcc', 'libelf^foo%gcc@4.5')
- self.check_constrain_changed('libelf^foo', 'libelf^foo+debug')
- self.check_constrain_changed('libelf^foo', 'libelf^foo~debug')
- self.check_constrain_changed('libelf^foo', 'libelf^foo=bgqos_0')
-
+ check_constrain_changed('libelf^foo', 'libelf^foo@1.0')
+ check_constrain_changed('libelf^foo', 'libelf^foo@1.0:5.0')
+ check_constrain_changed('libelf^foo', 'libelf^foo%gcc')
+ check_constrain_changed('libelf^foo%gcc', 'libelf^foo%gcc@4.5')
+ check_constrain_changed('libelf^foo', 'libelf^foo+debug')
+ check_constrain_changed('libelf^foo', 'libelf^foo~debug')
+
+ platform = spack.architecture.platform()
+ default_target = platform.target('default_target').name
+ check_constrain_changed(
+ 'libelf^foo', 'libelf^foo target=' + default_target)
def test_constrain_dependency_not_changed(self):
- self.check_constrain_not_changed('libelf^foo@1.0', 'libelf^foo@1.0')
- self.check_constrain_not_changed('libelf^foo@1.0:5.0', 'libelf^foo@1.0:5.0')
- self.check_constrain_not_changed('libelf^foo%gcc', 'libelf^foo%gcc')
- self.check_constrain_not_changed('libelf^foo%gcc@4.5', 'libelf^foo%gcc@4.5')
- self.check_constrain_not_changed('libelf^foo+debug', 'libelf^foo+debug')
- self.check_constrain_not_changed('libelf^foo~debug', 'libelf^foo~debug')
- self.check_constrain_not_changed('libelf^foo=bgqos_0', 'libelf^foo=bgqos_0')
+ check_constrain_not_changed('libelf^foo@1.0', 'libelf^foo@1.0')
+ check_constrain_not_changed(
+ 'libelf^foo@1.0:5.0', 'libelf^foo@1.0:5.0')
+ check_constrain_not_changed('libelf^foo%gcc', 'libelf^foo%gcc')
+ check_constrain_not_changed(
+ 'libelf^foo%gcc@4.5', 'libelf^foo%gcc@4.5')
+ check_constrain_not_changed(
+ 'libelf^foo+debug', 'libelf^foo+debug')
+ check_constrain_not_changed(
+ 'libelf^foo~debug', 'libelf^foo~debug')
+ check_constrain_not_changed(
+ 'libelf^foo cppflags="-O3"', 'libelf^foo cppflags="-O3"')
+
+ platform = spack.architecture.platform()
+ default_target = platform.target('default_target').name
+ check_constrain_not_changed(
+ 'libelf^foo target=' + default_target,
+ 'libelf^foo target=' + default_target)
diff --git a/lib/spack/spack/test/spec_syntax.py b/lib/spack/spack/test/spec_syntax.py
index 928d111ea9..043d9b176f 100644
--- a/lib/spack/spack/test/spec_syntax.py
+++ b/lib/spack/spack/test/spec_syntax.py
@@ -22,71 +22,111 @@
# License along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
-import unittest
+import pytest
+import shlex
-import spack.spec
+import spack.spec as sp
from spack.parse import Token
from spack.spec import *
# Sample output for a complex lexing.
-complex_lex = [Token(ID, 'mvapich_foo'),
- Token(DEP),
- Token(ID, '_openmpi'),
- Token(AT),
- Token(ID, '1.2'),
- Token(COLON),
- Token(ID, '1.4'),
- Token(COMMA),
- Token(ID, '1.6'),
- Token(PCT),
- Token(ID, 'intel'),
- Token(AT),
- Token(ID, '12.1'),
- Token(COLON),
- Token(ID, '12.6'),
- Token(ON),
- Token(ID, 'debug'),
- Token(OFF),
- Token(ID, 'qt_4'),
- Token(DEP),
- Token(ID, 'stackwalker'),
- Token(AT),
- Token(ID, '8.1_1e')]
-
-
-class SpecSyntaxTest(unittest.TestCase):
- # ================================================================================
+complex_lex = [Token(sp.ID, 'mvapich_foo'),
+ Token(sp.DEP),
+ Token(sp.ID, '_openmpi'),
+ Token(sp.AT),
+ Token(sp.ID, '1.2'),
+ Token(sp.COLON),
+ Token(sp.ID, '1.4'),
+ Token(sp.COMMA),
+ Token(sp.ID, '1.6'),
+ Token(sp.PCT),
+ Token(sp.ID, 'intel'),
+ Token(sp.AT),
+ Token(sp.ID, '12.1'),
+ Token(sp.COLON),
+ Token(sp.ID, '12.6'),
+ Token(sp.ON),
+ Token(sp.ID, 'debug'),
+ Token(sp.OFF),
+ Token(sp.ID, 'qt_4'),
+ Token(sp.DEP),
+ Token(sp.ID, 'stackwalker'),
+ Token(sp.AT),
+ Token(sp.ID, '8.1_1e')]
+
+# Another sample lexer output with a kv pair.
+kv_lex = [Token(sp.ID, 'mvapich_foo'),
+ Token(sp.ID, 'debug'),
+ Token(sp.EQ),
+ Token(sp.VAL, '4'),
+ Token(sp.DEP),
+ Token(sp.ID, '_openmpi'),
+ Token(sp.AT),
+ Token(sp.ID, '1.2'),
+ Token(sp.COLON),
+ Token(sp.ID, '1.4'),
+ Token(sp.COMMA),
+ Token(sp.ID, '1.6'),
+ Token(sp.PCT),
+ Token(sp.ID, 'intel'),
+ Token(sp.AT),
+ Token(sp.ID, '12.1'),
+ Token(sp.COLON),
+ Token(sp.ID, '12.6'),
+ Token(sp.ON),
+ Token(sp.ID, 'debug'),
+ Token(sp.OFF),
+ Token(sp.ID, 'qt_4'),
+ Token(sp.DEP),
+ Token(sp.ID, 'stackwalker'),
+ Token(sp.AT),
+ Token(sp.ID, '8.1_1e')]
+
+
+class TestSpecSyntax(object):
+ # ========================================================================
# Parse checks
- # ================================================================================
- def check_parse(self, expected, spec=None):
+ # ========================================================================
+
+ def check_parse(self, expected, spec=None, remove_arch=True):
"""Assert that the provided spec is able to be parsed.
- If this is called with one argument, it assumes that the string is
- canonical (i.e., no spaces and ~ instead of - for variants) and that it
- will convert back to the string it came from.
- If this is called with two arguments, the first argument is the expected
- canonical form and the second is a non-canonical input to be parsed.
+ If this is called with one argument, it assumes that the
+ string is canonical (i.e., no spaces and ~ instead of - for
+ variants) and that it will convert back to the string it came
+ from.
+
+ If this is called with two arguments, the first argument is
+ the expected canonical form and the second is a non-canonical
+ input to be parsed.
+
"""
if spec is None:
spec = expected
- output = spack.spec.parse(spec)
- parsed = (" ".join(str(spec) for spec in output))
- self.assertEqual(expected, parsed)
+ output = sp.parse(spec)
+ parsed = (" ".join(str(spec) for spec in output))
+ assert expected == parsed
def check_lex(self, tokens, spec):
- """Check that the provided spec parses to the provided list of tokens."""
- lex_output = SpecLexer().lex(spec)
+ """Check that the provided spec parses to the provided token list."""
+ spec = shlex.split(spec)
+ lex_output = sp.SpecLexer().lex(spec)
for tok, spec_tok in zip(tokens, lex_output):
- if tok.type == ID:
- self.assertEqual(tok, spec_tok)
+ if tok.type == sp.ID or tok.type == sp.VAL:
+ assert tok == spec_tok
else:
# Only check the type for non-identifiers.
- self.assertEqual(tok.type, spec_tok.type)
+ assert tok.type == spec_tok.type
- # ================================================================================
+ def _check_raises(self, exc_type, items):
+ for item in items:
+ with pytest.raises(exc_type):
+ self.check_parse(item)
+
+ # ========================================================================
# Parse checks
- # ===============================================================================
+ # ========================================================================
def test_package_names(self):
self.check_parse("mvapich")
self.check_parse("mvapich_foo")
@@ -102,80 +142,223 @@ class SpecSyntaxTest(unittest.TestCase):
self.check_parse("openmpi^hwloc@:1.4b7-rc3")
self.check_parse("openmpi^hwloc@1.2e6:1.4b7-rc3")
+ def test_multiple_specs(self):
+ self.check_parse("mvapich emacs")
+
+ def test_multiple_specs_after_kv(self):
+ self.check_parse('mvapich cppflags="-O3 -fPIC" emacs')
+ self.check_parse('mvapich cflags="-O3" emacs',
+ 'mvapich cflags=-O3 emacs')
+
+ def test_multiple_specs_long_second(self):
+ self.check_parse('mvapich emacs@1.1.1%intel cflags="-O3"',
+ 'mvapich emacs @1.1.1 %intel cflags=-O3')
+ self.check_parse('mvapich cflags="-O3 -fPIC" emacs^ncurses%intel')
+
def test_full_specs(self):
- self.check_parse("mvapich_foo^_openmpi@1.2:1.4,1.6%intel@12.1+debug~qt_4^stackwalker@8.1_1e")
+ self.check_parse(
+ "mvapich_foo"
+ "^_openmpi@1.2:1.4,1.6%intel@12.1+debug~qt_4"
+ "^stackwalker@8.1_1e")
+ self.check_parse(
+ "mvapich_foo"
+ "^_openmpi@1.2:1.4,1.6%intel@12.1 debug=2 ~qt_4"
+ "^stackwalker@8.1_1e")
+ self.check_parse(
+ 'mvapich_foo'
+ '^_openmpi@1.2:1.4,1.6%intel@12.1 cppflags="-O3" +debug~qt_4'
+ '^stackwalker@8.1_1e')
+ self.check_parse(
+ "mvapich_foo"
+ "^_openmpi@1.2:1.4,1.6%intel@12.1 debug=2 ~qt_4"
+ "^stackwalker@8.1_1e arch=test-redhat6-x86_32")
def test_canonicalize(self):
self.check_parse(
- "mvapich_foo^_openmpi@1.2:1.4,1.6%intel@12.1:12.6+debug~qt_4^stackwalker@8.1_1e",
- "mvapich_foo ^_openmpi@1.6,1.2:1.4%intel@12.1:12.6+debug~qt_4 ^stackwalker@8.1_1e")
+ "mvapich_foo"
+ "^_openmpi@1.2:1.4,1.6%intel@12.1:12.6+debug~qt_4"
+ "^stackwalker@8.1_1e",
+
+ "mvapich_foo "
+ "^_openmpi@1.6,1.2:1.4%intel@12.1:12.6+debug~qt_4 "
+ "^stackwalker@8.1_1e")
self.check_parse(
- "mvapich_foo^_openmpi@1.2:1.4,1.6%intel@12.1:12.6+debug~qt_4^stackwalker@8.1_1e",
- "mvapich_foo ^stackwalker@8.1_1e ^_openmpi@1.6,1.2:1.4%intel@12.1:12.6~qt_4+debug")
+ "mvapich_foo"
+ "^_openmpi@1.2:1.4,1.6%intel@12.1:12.6+debug~qt_4"
+ "^stackwalker@8.1_1e",
+
+ "mvapich_foo "
+ "^stackwalker@8.1_1e "
+ "^_openmpi@1.6,1.2:1.4%intel@12.1:12.6~qt_4+debug")
self.check_parse(
"x^y@1,2:3,4%intel@1,2,3,4+a~b+c~d+e~f",
"x ^y~f+e~d+c~b+a@4,2:3,1%intel@4,3,2,1")
+ self.check_parse(
+ "x arch=test-redhat6-None "
+ "^y arch=test-None-x86_64 "
+ "^z arch=linux-None-None",
+
+ "x os=fe "
+ "^y target=be "
+ "^z platform=linux")
+
+ self.check_parse(
+ "x arch=test-debian6-x86_64 "
+ "^y arch=test-debian6-x86_64",
+
+ "x os=default_os target=default_target "
+ "^y os=default_os target=default_target")
+
self.check_parse("x^y", "x@: ^y@:")
def test_parse_errors(self):
- self.assertRaises(SpecParseError, self.check_parse, "x@@1.2")
- self.assertRaises(SpecParseError, self.check_parse, "x ^y@@1.2")
- self.assertRaises(SpecParseError, self.check_parse, "x@1.2::")
- self.assertRaises(SpecParseError, self.check_parse, "x::")
+ errors = ['x@@1.2', 'x ^y@@1.2', 'x@1.2::', 'x::']
+ self._check_raises(SpecParseError, errors)
def test_duplicate_variant(self):
- self.assertRaises(DuplicateVariantError, self.check_parse, "x@1.2+debug+debug")
- self.assertRaises(DuplicateVariantError, self.check_parse, "x ^y@1.2+debug+debug")
+ duplicates = [
+ 'x@1.2+debug+debug',
+ 'x ^y@1.2+debug debug=true',
+ 'x ^y@1.2 debug=false debug=true',
+ 'x ^y@1.2 debug=false ~debug'
+ ]
+ self._check_raises(DuplicateVariantError, duplicates)
- def test_duplicate_depdendence(self):
- self.assertRaises(DuplicateDependencyError, self.check_parse, "x ^y ^y")
+ def test_duplicate_dependency(self):
+ self._check_raises(DuplicateDependencyError, ["x ^y ^y"])
def test_duplicate_compiler(self):
- self.assertRaises(DuplicateCompilerSpecError, self.check_parse, "x%intel%intel")
- self.assertRaises(DuplicateCompilerSpecError, self.check_parse, "x%intel%gcc")
- self.assertRaises(DuplicateCompilerSpecError, self.check_parse, "x%gcc%intel")
- self.assertRaises(DuplicateCompilerSpecError, self.check_parse, "x ^y%intel%intel")
- self.assertRaises(DuplicateCompilerSpecError, self.check_parse, "x ^y%intel%gcc")
- self.assertRaises(DuplicateCompilerSpecError, self.check_parse, "x ^y%gcc%intel")
+ duplicates = [
+ "x%intel%intel",
+ "x%intel%gcc",
+ "x%gcc%intel",
+ "x ^y%intel%intel",
+ "x ^y%intel%gcc",
+ "x ^y%gcc%intel"
+ ]
+ self._check_raises(DuplicateCompilerSpecError, duplicates)
+ def test_duplicate_architecture(self):
+ duplicates = [
+ "x arch=linux-rhel7-x86_64 arch=linux-rhel7-x86_64",
+ "x arch=linux-rhel7-x86_64 arch=linux-rhel7-ppc64le",
+ "x arch=linux-rhel7-ppc64le arch=linux-rhel7-x86_64",
+ "y ^x arch=linux-rhel7-x86_64 arch=linux-rhel7-x86_64",
+ "y ^x arch=linux-rhel7-x86_64 arch=linux-rhel7-ppc64le"
+ ]
+ self._check_raises(DuplicateArchitectureError, duplicates)
- # ================================================================================
+ def test_duplicate_architecture_component(self):
+ duplicates = [
+ "x os=fe os=fe",
+ "x os=fe os=be",
+ "x target=fe target=fe",
+ "x target=fe target=be",
+ "x platform=test platform=test",
+ "x os=fe platform=test target=fe os=fe",
+ "x target=be platform=test os=be os=fe"
+ ]
+ self._check_raises(DuplicateArchitectureError, duplicates)
+
+ # ========================================================================
# Lex checks
- # ================================================================================
+ # ========================================================================
def test_ambiguous(self):
# This first one is ambiguous because - can be in an identifier AND
# indicate disabling an option.
- self.assertRaises(
- AssertionError, self.check_lex, complex_lex,
- "mvapich_foo^_openmpi@1.2:1.4,1.6%intel@12.1:12.6+debug-qt_4^stackwalker@8.1_1e")
+ with pytest.raises(AssertionError):
+ self.check_lex(
+ complex_lex,
+ "mvapich_foo"
+ "^_openmpi@1.2:1.4,1.6%intel@12.1:12.6+debug-qt_4"
+ "^stackwalker@8.1_1e"
+ )
- # The following lexes are non-ambiguous (add a space before -qt_4) and should all
- # result in the tokens in complex_lex
+ # The following lexes are non-ambiguous (add a space before -qt_4)
+ # and should all result in the tokens in complex_lex
def test_minimal_spaces(self):
self.check_lex(
complex_lex,
- "mvapich_foo^_openmpi@1.2:1.4,1.6%intel@12.1:12.6+debug -qt_4^stackwalker@8.1_1e")
+ "mvapich_foo"
+ "^_openmpi@1.2:1.4,1.6%intel@12.1:12.6+debug -qt_4"
+ "^stackwalker@8.1_1e")
self.check_lex(
complex_lex,
- "mvapich_foo^_openmpi@1.2:1.4,1.6%intel@12.1:12.6+debug~qt_4^stackwalker@8.1_1e")
+ "mvapich_foo"
+ "^_openmpi@1.2:1.4,1.6%intel@12.1:12.6+debug~qt_4"
+ "^stackwalker@8.1_1e")
def test_spaces_between_dependences(self):
self.check_lex(
complex_lex,
- "mvapich_foo ^_openmpi@1.2:1.4,1.6%intel@12.1:12.6+debug -qt_4 ^stackwalker @ 8.1_1e")
+ "mvapich_foo "
+ "^_openmpi@1.2:1.4,1.6%intel@12.1:12.6+debug -qt_4 "
+ "^stackwalker @ 8.1_1e")
self.check_lex(
complex_lex,
- "mvapich_foo ^_openmpi@1.2:1.4,1.6%intel@12.1:12.6+debug~qt_4 ^stackwalker @ 8.1_1e")
+ "mvapich_foo "
+ "^_openmpi@1.2:1.4,1.6%intel@12.1:12.6+debug~qt_4 "
+ "^stackwalker @ 8.1_1e")
def test_spaces_between_options(self):
self.check_lex(
complex_lex,
- "mvapich_foo ^_openmpi @1.2:1.4,1.6 %intel @12.1:12.6 +debug -qt_4 ^stackwalker @8.1_1e")
+ "mvapich_foo "
+ "^_openmpi @1.2:1.4,1.6 %intel @12.1:12.6 +debug -qt_4 "
+ "^stackwalker @8.1_1e")
def test_way_too_many_spaces(self):
self.check_lex(
complex_lex,
- "mvapich_foo ^ _openmpi @ 1.2 : 1.4 , 1.6 % intel @ 12.1 : 12.6 + debug - qt_4 ^ stackwalker @ 8.1_1e")
+ "mvapich_foo "
+ "^ _openmpi @1.2 : 1.4 , 1.6 % intel @ 12.1 : 12.6 + debug - qt_4 "
+ "^ stackwalker @ 8.1_1e")
+ self.check_lex(
+ complex_lex,
+ "mvapich_foo "
+ "^ _openmpi @1.2 : 1.4 , 1.6 % intel @ 12.1 : 12.6 + debug ~ qt_4 "
+ "^ stackwalker @ 8.1_1e")
+
+ def test_kv_with_quotes(self):
+ self.check_lex(
+ kv_lex,
+ "mvapich_foo debug='4' "
+ "^ _openmpi @1.2 : 1.4 , 1.6 % intel @ 12.1 : 12.6 + debug - qt_4 "
+ "^ stackwalker @ 8.1_1e")
+ self.check_lex(
+ kv_lex,
+ 'mvapich_foo debug="4" '
+ "^ _openmpi @1.2 : 1.4 , 1.6 % intel @ 12.1 : 12.6 + debug - qt_4 "
+ "^ stackwalker @ 8.1_1e")
+ self.check_lex(
+ kv_lex,
+ "mvapich_foo 'debug = 4' "
+ "^ _openmpi @1.2 : 1.4 , 1.6 % intel @ 12.1 : 12.6 + debug - qt_4 "
+ "^ stackwalker @ 8.1_1e")
+
+ def test_kv_without_quotes(self):
+ self.check_lex(
+ kv_lex,
+ "mvapich_foo debug=4 "
+ "^ _openmpi @1.2 : 1.4 , 1.6 % intel @ 12.1 : 12.6 + debug - qt_4 "
+ "^ stackwalker @ 8.1_1e")
+
+ def test_kv_with_spaces(self):
+ self.check_lex(
+ kv_lex,
+ "mvapich_foo debug = 4 "
+ "^ _openmpi @1.2 : 1.4 , 1.6 % intel @ 12.1 : 12.6 + debug - qt_4 "
+ "^ stackwalker @ 8.1_1e")
+ self.check_lex(
+ kv_lex,
+ "mvapich_foo debug =4 "
+ "^ _openmpi @1.2 : 1.4 , 1.6 % intel @ 12.1 : 12.6 + debug - qt_4 "
+ "^ stackwalker @ 8.1_1e")
+ self.check_lex(
+ kv_lex,
+ "mvapich_foo debug= 4 "
+ "^ _openmpi @1.2 : 1.4 , 1.6 % intel @ 12.1 : 12.6 + debug - qt_4 "
+ "^ stackwalker @ 8.1_1e")
diff --git a/lib/spack/spack/test/spec_yaml.py b/lib/spack/spack/test/spec_yaml.py
index 0230fc203a..e913dc8412 100644
--- a/lib/spack/spack/test/spec_yaml.py
+++ b/lib/spack/spack/test/spec_yaml.py
@@ -27,45 +27,162 @@
YAML format preserves DAG informatoin in the spec.
"""
+import spack.util.spack_json as sjson
+import spack.util.spack_yaml as syaml
from spack.spec import Spec
-from spack.test.mock_packages_test import *
+from spack.util.spack_yaml import syaml_dict
-class SpecDagTest(MockPackagesTest):
- def check_yaml_round_trip(self, spec):
- yaml_text = spec.to_yaml()
- spec_from_yaml = Spec.from_yaml(yaml_text)
- self.assertTrue(spec.eq_dag(spec_from_yaml))
+def check_yaml_round_trip(spec):
+ yaml_text = spec.to_yaml()
+ spec_from_yaml = Spec.from_yaml(yaml_text)
+ assert spec.eq_dag(spec_from_yaml)
- def test_simple_spec(self):
- spec = Spec('mpileaks')
- self.check_yaml_round_trip(spec)
+def test_simple_spec():
+ spec = Spec('mpileaks')
+ check_yaml_round_trip(spec)
- def test_normal_spec(self):
- spec = Spec('mpileaks+debug~opt')
- spec.normalize()
- self.check_yaml_round_trip(spec)
+def test_normal_spec(builtin_mock):
+ spec = Spec('mpileaks+debug~opt')
+ spec.normalize()
+ check_yaml_round_trip(spec)
- def test_ambiguous_version_spec(self):
- spec = Spec('mpileaks@1.0:5.0,6.1,7.3+debug~opt')
- spec.normalize()
- self.check_yaml_round_trip(spec)
+def test_ambiguous_version_spec(builtin_mock):
+ spec = Spec('mpileaks@1.0:5.0,6.1,7.3+debug~opt')
+ spec.normalize()
+ check_yaml_round_trip(spec)
- def test_concrete_spec(self):
- spec = Spec('mpileaks+debug~opt')
- spec.concretize()
- self.check_yaml_round_trip(spec)
+def test_concrete_spec(config, builtin_mock):
+ spec = Spec('mpileaks+debug~opt')
+ spec.concretize()
+ check_yaml_round_trip(spec)
- def test_yaml_subdag(self):
- spec = Spec('mpileaks^mpich+debug')
- spec.concretize()
+def test_yaml_subdag(config, builtin_mock):
+ spec = Spec('mpileaks^mpich+debug')
+ spec.concretize()
+ yaml_spec = Spec.from_yaml(spec.to_yaml())
+
+ for dep in ('callpath', 'mpich', 'dyninst', 'libdwarf', 'libelf'):
+ assert spec[dep].eq_dag(yaml_spec[dep])
+
+
+def test_using_ordered_dict(builtin_mock):
+ """ Checks that dicts are ordered
- yaml_spec = Spec.from_yaml(spec.to_yaml())
+ Necessary to make sure that dag_hash is stable across python
+ versions and processes.
+ """
+ def descend_and_check(iterable, level=0):
+ from spack.util.spack_yaml import syaml_dict
+ from collections import Iterable, Mapping
+ if isinstance(iterable, Mapping):
+ assert isinstance(iterable, syaml_dict)
+ return descend_and_check(iterable.values(), level=level + 1)
+ max_level = level
+ for value in iterable:
+ if isinstance(value, Iterable) and not isinstance(value, str):
+ nlevel = descend_and_check(value, level=level + 1)
+ if nlevel > max_level:
+ max_level = nlevel
+ return max_level
+
+ specs = ['mpileaks ^zmpi', 'dttop', 'dtuse']
+ for spec in specs:
+ dag = Spec(spec)
+ dag.normalize()
+ level = descend_and_check(dag.to_node_dict())
+ # level just makes sure we are doing something here
+ assert level >= 5
+
+
+def test_ordered_read_not_required_for_consistent_dag_hash(
+ config, builtin_mock
+):
+ """Make sure ordered serialization isn't required to preserve hashes.
+
+ For consistent hashes, we require that YAML and json documents
+ have their keys serialized in a deterministic order. However, we
+ don't want to require them to be serialized in order. This
+ ensures that is not required.
+ """
+ specs = ['mpileaks ^zmpi', 'dttop', 'dtuse']
+ for spec in specs:
+ spec = Spec(spec)
+ spec.concretize()
- for dep in ('callpath', 'mpich', 'dyninst', 'libdwarf', 'libelf'):
- self.assertTrue(spec[dep].eq_dag(yaml_spec[dep]))
+ #
+ # Dict & corresponding YAML & JSON from the original spec.
+ #
+ spec_dict = spec.to_dict()
+ spec_yaml = spec.to_yaml()
+ spec_json = spec.to_json()
+
+ #
+ # Make a spec with reversed OrderedDicts for every
+ # OrderedDict in the original.
+ #
+ reversed_spec_dict = reverse_all_dicts(spec.to_dict())
+
+ #
+ # Dump to YAML and JSON
+ #
+ yaml_string = syaml.dump(spec_dict, default_flow_style=False)
+ reversed_yaml_string = syaml.dump(reversed_spec_dict,
+ default_flow_style=False)
+ json_string = sjson.dump(spec_dict)
+ reversed_json_string = sjson.dump(reversed_spec_dict)
+
+ #
+ # Do many consistency checks
+ #
+
+ # spec yaml is ordered like the spec dict
+ assert yaml_string == spec_yaml
+ assert json_string == spec_json
+
+ # reversed string is different from the original, so it
+ # *would* generate a different hash
+ assert yaml_string != reversed_yaml_string
+ assert json_string != reversed_json_string
+
+ # build specs from the "wrongly" ordered data
+ round_trip_yaml_spec = Spec.from_yaml(yaml_string)
+ round_trip_json_spec = Spec.from_json(json_string)
+ round_trip_reversed_yaml_spec = Spec.from_yaml(
+ reversed_yaml_string
+ )
+ round_trip_reversed_json_spec = Spec.from_yaml(
+ reversed_json_string
+ )
+
+ # TODO: remove this when build deps are in provenance.
+ spec = spec.copy(deps=('link', 'run'))
+ # specs are equal to the original
+ assert spec == round_trip_yaml_spec
+ assert spec == round_trip_json_spec
+ assert spec == round_trip_reversed_yaml_spec
+ assert spec == round_trip_reversed_json_spec
+ assert round_trip_yaml_spec == round_trip_reversed_yaml_spec
+ assert round_trip_json_spec == round_trip_reversed_json_spec
+ # dag_hashes are equal
+ assert spec.dag_hash() == round_trip_yaml_spec.dag_hash()
+ assert spec.dag_hash() == round_trip_json_spec.dag_hash()
+ assert spec.dag_hash() == round_trip_reversed_yaml_spec.dag_hash()
+ assert spec.dag_hash() == round_trip_reversed_json_spec.dag_hash()
+
+
+def reverse_all_dicts(data):
+ """Descend into data and reverse all the dictionaries"""
+ if isinstance(data, dict):
+ return syaml_dict(reversed(
+ [(reverse_all_dicts(k), reverse_all_dicts(v))
+ for k, v in data.items()]))
+ elif isinstance(data, (list, tuple)):
+ return type(data)(reverse_all_dicts(elt) for elt in data)
+ else:
+ return data
diff --git a/lib/spack/spack/test/stage.py b/lib/spack/spack/test/stage.py
index 6d8c3ac67c..5b4c46e0bf 100644
--- a/lib/spack/spack/test/stage.py
+++ b/lib/spack/spack/test/stage.py
@@ -22,293 +22,360 @@
# License along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
-"""\
-Test that the Stage class works correctly.
-"""
+"""Test that the Stage class works correctly."""
+import collections
import os
-import shutil
-import unittest
-from contextlib import *
+import pytest
import spack
-from llnl.util.filesystem import *
+import spack.stage
+import spack.util.executable
+from llnl.util.filesystem import join_path
from spack.stage import Stage
-from spack.util.executable import which
-test_files_dir = join_path(spack.stage_path, '.test')
-test_tmp_path = join_path(test_files_dir, 'tmp')
-archive_dir = 'test-files'
-archive_name = archive_dir + '.tar.gz'
-archive_dir_path = join_path(test_files_dir, archive_dir)
-archive_url = 'file://' + join_path(test_files_dir, archive_name)
-readme_name = 'README.txt'
-test_readme = join_path(archive_dir_path, readme_name)
-readme_text = "hello world!\n"
+def check_chdir_to_source(stage, stage_name):
+ stage_path = get_stage_path(stage, stage_name)
+ archive_dir = 'test-files'
+ assert join_path(
+ os.path.realpath(stage_path), archive_dir
+ ) == os.getcwd()
-stage_name = 'spack-test-stage'
+def check_expand_archive(stage, stage_name, mock_archive):
+ stage_path = get_stage_path(stage, stage_name)
+ archive_name = 'test-files.tar.gz'
+ archive_dir = 'test-files'
+ assert archive_name in os.listdir(stage_path)
+ assert archive_dir in os.listdir(stage_path)
-@contextmanager
-def use_tmp(use_tmp):
- """Allow some test code to be executed with spack.use_tmp_stage
- set to a certain value. Context manager makes sure it's reset
- on failure.
- """
- old_tmp = spack.use_tmp_stage
- spack.use_tmp_stage = use_tmp
- yield
- spack.use_tmp_stage = old_tmp
-
-
-class StageTest(unittest.TestCase):
- def setUp(self):
- """This sets up a mock archive to fetch, and a mock temp space for use
- by the Stage class. It doesn't actually create the Stage -- that
- is done by individual tests.
- """
- if os.path.exists(test_files_dir):
- shutil.rmtree(test_files_dir)
-
- mkdirp(test_files_dir)
- mkdirp(archive_dir_path)
- mkdirp(test_tmp_path)
-
- with open(test_readme, 'w') as readme:
- readme.write(readme_text)
-
- with working_dir(test_files_dir):
- tar = which('tar')
- tar('czf', archive_name, archive_dir)
-
- # Make spack use the test environment for tmp stuff.
- self.old_tmp_dirs = spack.tmp_dirs
- spack.tmp_dirs = [test_tmp_path]
-
- # record this since this test changes to directories that will
- # be removed.
- self.working_dir = os.getcwd()
-
-
- def tearDown(self):
- """Blows away the test environment directory."""
- shutil.rmtree(test_files_dir)
-
- # chdir back to original working dir
- os.chdir(self.working_dir)
-
- # restore spack's original tmp environment
- spack.tmp_dirs = self.old_tmp_dirs
-
-
- def get_stage_path(self, stage, stage_name):
- """Figure out where a stage should be living. This depends on
- whether it's named.
- """
- if stage_name is not None:
- # If it is a named stage, we know where the stage should be
- return join_path(spack.stage_path, stage_name)
- else:
- # If it's unnamed, ensure that we ran mkdtemp in the right spot.
- self.assertTrue(stage.path is not None)
- self.assertTrue(stage.path.startswith(spack.stage_path))
- return stage.path
-
-
- def check_setup(self, stage, stage_name):
- """Figure out whether a stage was set up correctly."""
- stage_path = self.get_stage_path(stage, stage_name)
-
- # Ensure stage was created in the spack stage directory
- self.assertTrue(os.path.isdir(stage_path))
-
- if spack.use_tmp_stage:
- # Check that the stage dir is really a symlink.
- self.assertTrue(os.path.islink(stage_path))
-
- # Make sure it points to a valid directory
- target = os.path.realpath(stage_path)
- self.assertTrue(os.path.isdir(target))
- self.assertFalse(os.path.islink(target))
-
- # Make sure the directory is in the place we asked it to
- # be (see setUp and tearDown)
- self.assertTrue(target.startswith(test_tmp_path))
-
- else:
- # Make sure the stage path is NOT a link for a non-tmp stage
- self.assertFalse(os.path.islink(stage_path))
-
-
- def check_fetch(self, stage, stage_name):
- stage_path = self.get_stage_path(stage, stage_name)
- self.assertTrue(archive_name in os.listdir(stage_path))
- self.assertEqual(join_path(stage_path, archive_name),
- stage.fetcher.archive_file)
+ assert join_path(stage_path, archive_dir) == stage.source_path
+ readme = join_path(stage_path, archive_dir, 'README.txt')
+ assert os.path.isfile(readme)
+ with open(readme) as file:
+ 'hello world!\n' == file.read()
- def check_expand_archive(self, stage, stage_name):
- stage_path = self.get_stage_path(stage, stage_name)
- self.assertTrue(archive_name in os.listdir(stage_path))
- self.assertTrue(archive_dir in os.listdir(stage_path))
- self.assertEqual(
- join_path(stage_path, archive_dir),
- stage.source_path)
+def check_fetch(stage, stage_name):
+ archive_name = 'test-files.tar.gz'
+ stage_path = get_stage_path(stage, stage_name)
+ assert archive_name in os.listdir(stage_path)
+ assert join_path(stage_path, archive_name) == stage.fetcher.archive_file
- readme = join_path(stage_path, archive_dir, readme_name)
- self.assertTrue(os.path.isfile(readme))
- with open(readme) as file:
- self.assertEqual(readme_text, file.read())
+def check_chdir(stage, stage_name):
+ stage_path = get_stage_path(stage, stage_name)
+ assert os.path.realpath(stage_path) == os.getcwd()
- def check_chdir(self, stage, stage_name):
- stage_path = self.get_stage_path(stage, stage_name)
- self.assertEqual(os.path.realpath(stage_path), os.getcwd())
+def check_destroy(stage, stage_name):
+ """Figure out whether a stage was destroyed correctly."""
+ stage_path = get_stage_path(stage, stage_name)
+ # check that the stage dir/link was removed.
+ assert not os.path.exists(stage_path)
- def check_chdir_to_source(self, stage, stage_name):
- stage_path = self.get_stage_path(stage, stage_name)
- self.assertEqual(
- join_path(os.path.realpath(stage_path), archive_dir),
- os.getcwd())
+ # tmp stage needs to remove tmp dir too.
+ if spack.stage._use_tmp_stage:
+ target = os.path.realpath(stage_path)
+ assert not os.path.exists(target)
- def check_destroy(self, stage, stage_name):
- """Figure out whether a stage was destroyed correctly."""
- stage_path = self.get_stage_path(stage, stage_name)
+def check_setup(stage, stage_name, archive):
+ """Figure out whether a stage was set up correctly."""
+ stage_path = get_stage_path(stage, stage_name)
- # check that the stage dir/link was removed.
- self.assertFalse(os.path.exists(stage_path))
+ # Ensure stage was created in the spack stage directory
+ assert os.path.isdir(stage_path)
- # tmp stage needs to remove tmp dir too.
- if spack.use_tmp_stage:
- target = os.path.realpath(stage_path)
- self.assertFalse(os.path.exists(target))
+ if spack.stage.get_tmp_root():
+ # Check that the stage dir is really a symlink.
+ assert os.path.islink(stage_path)
+ # Make sure it points to a valid directory
+ target = os.path.realpath(stage_path)
+ assert os.path.isdir(target)
+ assert not os.path.islink(target)
- def test_setup_and_destroy_name_with_tmp(self):
- with use_tmp(True):
- with Stage(archive_url, name=stage_name) as stage:
- self.check_setup(stage, stage_name)
- self.check_destroy(stage, stage_name)
+ # Make sure the directory is in the place we asked it to
+ # be (see setUp, tearDown, and use_tmp)
+ assert target.startswith(str(archive.test_tmp_dir))
+ else:
+ # Make sure the stage path is NOT a link for a non-tmp stage
+ assert os.path.islink(stage_path)
- def test_setup_and_destroy_name_without_tmp(self):
- with use_tmp(False):
- with Stage(archive_url, name=stage_name) as stage:
- self.check_setup(stage, stage_name)
- self.check_destroy(stage, stage_name)
-
- def test_setup_and_destroy_no_name_with_tmp(self):
- with use_tmp(True):
- with Stage(archive_url) as stage:
- self.check_setup(stage, None)
- self.check_destroy(stage, None)
-
-
- def test_setup_and_destroy_no_name_without_tmp(self):
- with use_tmp(False):
- with Stage(archive_url) as stage:
- self.check_setup(stage, None)
- self.check_destroy(stage, None)
-
-
- def test_chdir(self):
- with Stage(archive_url, name=stage_name) as stage:
+def get_stage_path(stage, stage_name):
+ """Figure out where a stage should be living. This depends on
+ whether it's named.
+ """
+ if stage_name is not None:
+ # If it is a named stage, we know where the stage should be
+ return join_path(spack.stage_path, stage_name)
+ else:
+ # If it's unnamed, ensure that we ran mkdtemp in the right spot.
+ assert stage.path is not None
+ assert stage.path.startswith(spack.stage_path)
+ return stage.path
+
+
+@pytest.fixture()
+def tmpdir_for_stage(mock_archive):
+ """Uses a temporary directory for staging"""
+ current = spack.stage_path
+ spack.config.update_config(
+ 'config',
+ {'build_stage': [str(mock_archive.test_tmp_dir)]},
+ scope='user'
+ )
+ yield
+ spack.config.update_config(
+ 'config', {'build_stage': [current]}, scope='user'
+ )
+
+
+@pytest.fixture()
+def mock_archive(tmpdir, monkeypatch):
+ """Creates a mock archive with the structure expected by the tests"""
+ # Mock up a stage area that looks like this:
+ #
+ # TMPDIR/ test_files_dir
+ # tmp/ test_tmp_path (where stage should be)
+ # test-files/ archive_dir_path
+ # README.txt test_readme (contains "hello world!\n")
+ # test-files.tar.gz archive_url = file:///path/to/this
+ #
+ test_tmp_path = tmpdir.join('tmp')
+ # set _test_tmp_path as the default test directory to use for stages.
+ spack.config.update_config(
+ 'config', {'build_stage': [str(test_tmp_path)]}, scope='user'
+ )
+
+ archive_dir = tmpdir.join('test-files')
+ archive_name = 'test-files.tar.gz'
+ archive = tmpdir.join(archive_name)
+ archive_url = 'file://' + str(archive)
+ test_readme = archive_dir.join('README.txt')
+ archive_dir.ensure(dir=True)
+ test_tmp_path.ensure(dir=True)
+ test_readme.write('hello world!\n')
+
+ current = tmpdir.chdir()
+ tar = spack.util.executable.which('tar', required=True)
+ tar('czf', str(archive_name), 'test-files')
+ current.chdir()
+
+ # Make spack use the test environment for tmp stuff.
+ monkeypatch.setattr(spack.stage, '_tmp_root', None)
+ monkeypatch.setattr(spack.stage, '_use_tmp_stage', True)
+
+ Archive = collections.namedtuple(
+ 'Archive', ['url', 'tmpdir', 'test_tmp_dir', 'archive_dir']
+ )
+ yield Archive(
+ url=archive_url,
+ tmpdir=tmpdir,
+ test_tmp_dir=test_tmp_path,
+ archive_dir=archive_dir
+ )
+ # record this since this test changes to directories that will
+ # be removed.
+ current.chdir()
+
+
+@pytest.fixture()
+def failing_search_fn():
+ """Returns a search function that fails! Always!"""
+ def _mock():
+ raise Exception("This should not have been called")
+ return _mock
+
+
+@pytest.fixture()
+def failing_fetch_strategy():
+ """Returns a fetch strategy that fails."""
+ class FailingFetchStrategy(spack.fetch_strategy.FetchStrategy):
+ def fetch(self):
+ raise spack.fetch_strategy.FailedDownloadError(
+ "<non-existent URL>",
+ "This implementation of FetchStrategy always fails"
+ )
+ return FailingFetchStrategy()
+
+
+@pytest.fixture()
+def search_fn():
+ """Returns a search function that always succeeds."""
+ class _Mock(object):
+ performed_search = False
+
+ def __call__(self):
+ self.performed_search = True
+ return []
+
+ return _Mock()
+
+
+@pytest.mark.usefixtures('builtin_mock')
+class TestStage(object):
+
+ stage_name = 'spack-test-stage'
+
+ @pytest.mark.usefixtures('tmpdir_for_stage')
+ def test_setup_and_destroy_name_with_tmp(self, mock_archive):
+ with Stage(mock_archive.url, name=self.stage_name) as stage:
+ check_setup(stage, self.stage_name, mock_archive)
+ check_destroy(stage, self.stage_name)
+
+ def test_setup_and_destroy_name_without_tmp(self, mock_archive):
+ with Stage(mock_archive.url, name=self.stage_name) as stage:
+ check_setup(stage, self.stage_name, mock_archive)
+ check_destroy(stage, self.stage_name)
+
+ @pytest.mark.usefixtures('tmpdir_for_stage')
+ def test_setup_and_destroy_no_name_with_tmp(self, mock_archive):
+ with Stage(mock_archive.url) as stage:
+ check_setup(stage, None, mock_archive)
+ check_destroy(stage, None)
+
+ def test_setup_and_destroy_no_name_without_tmp(self, mock_archive):
+ with Stage(mock_archive.url) as stage:
+ check_setup(stage, None, mock_archive)
+ check_destroy(stage, None)
+
+ def test_chdir(self, mock_archive):
+ with Stage(mock_archive.url, name=self.stage_name) as stage:
stage.chdir()
- self.check_setup(stage, stage_name)
- self.check_chdir(stage, stage_name)
- self.check_destroy(stage, stage_name)
-
+ check_setup(stage, self.stage_name, mock_archive)
+ check_chdir(stage, self.stage_name)
+ check_destroy(stage, self.stage_name)
- def test_fetch(self):
- with Stage(archive_url, name=stage_name) as stage:
+ def test_fetch(self, mock_archive):
+ with Stage(mock_archive.url, name=self.stage_name) as stage:
stage.fetch()
- self.check_setup(stage, stage_name)
- self.check_chdir(stage, stage_name)
- self.check_fetch(stage, stage_name)
- self.check_destroy(stage, stage_name)
-
-
- def test_expand_archive(self):
- with Stage(archive_url, name=stage_name) as stage:
+ check_setup(stage, self.stage_name, mock_archive)
+ check_chdir(stage, self.stage_name)
+ check_fetch(stage, self.stage_name)
+ check_destroy(stage, self.stage_name)
+
+ def test_no_search_if_default_succeeds(
+ self, mock_archive, failing_search_fn
+ ):
+ with Stage(
+ mock_archive.url,
+ name=self.stage_name,
+ search_fn=failing_search_fn
+ ) as stage:
+ stage.fetch()
+ check_destroy(stage, self.stage_name)
+
+ def test_no_search_mirror_only(
+ self, failing_fetch_strategy, failing_search_fn
+ ):
+ with Stage(
+ failing_fetch_strategy,
+ name=self.stage_name,
+ search_fn=failing_search_fn
+ ) as stage:
+ try:
+ stage.fetch(mirror_only=True)
+ except spack.fetch_strategy.FetchError:
+ pass
+ check_destroy(stage, self.stage_name)
+
+ def test_search_if_default_fails(self, failing_fetch_strategy, search_fn):
+ with Stage(
+ failing_fetch_strategy,
+ name=self.stage_name,
+ search_fn=search_fn
+ ) as stage:
+ try:
+ stage.fetch(mirror_only=False)
+ except spack.fetch_strategy.FetchError:
+ pass
+ check_destroy(stage, self.stage_name)
+ assert search_fn.performed_search
+
+ def test_expand_archive(self, mock_archive):
+ with Stage(mock_archive.url, name=self.stage_name) as stage:
stage.fetch()
- self.check_setup(stage, stage_name)
- self.check_fetch(stage, stage_name)
+ check_setup(stage, self.stage_name, mock_archive)
+ check_fetch(stage, self.stage_name)
stage.expand_archive()
- self.check_expand_archive(stage, stage_name)
- self.check_destroy(stage, stage_name)
-
+ check_expand_archive(stage, self.stage_name, mock_archive)
+ check_destroy(stage, self.stage_name)
- def test_expand_archive(self):
- with Stage(archive_url, name=stage_name) as stage:
+ def test_expand_archive_with_chdir(self, mock_archive):
+ with Stage(mock_archive.url, name=self.stage_name) as stage:
stage.fetch()
- self.check_setup(stage, stage_name)
- self.check_fetch(stage, stage_name)
+ check_setup(stage, self.stage_name, mock_archive)
+ check_fetch(stage, self.stage_name)
stage.expand_archive()
stage.chdir_to_source()
- self.check_expand_archive(stage, stage_name)
- self.check_chdir_to_source(stage, stage_name)
- self.check_destroy(stage, stage_name)
+ check_expand_archive(stage, self.stage_name, mock_archive)
+ check_chdir_to_source(stage, self.stage_name)
+ check_destroy(stage, self.stage_name)
-
- def test_restage(self):
- with Stage(archive_url, name=stage_name) as stage:
+ def test_restage(self, mock_archive):
+ with Stage(mock_archive.url, name=self.stage_name) as stage:
stage.fetch()
stage.expand_archive()
stage.chdir_to_source()
- self.check_expand_archive(stage, stage_name)
- self.check_chdir_to_source(stage, stage_name)
+ check_expand_archive(stage, self.stage_name, mock_archive)
+ check_chdir_to_source(stage, self.stage_name)
# Try to make a file in the old archive dir
with open('foobar', 'w') as file:
file.write("this file is to be destroyed.")
- self.assertTrue('foobar' in os.listdir(stage.source_path))
+ assert 'foobar' in os.listdir(stage.source_path)
# Make sure the file is not there after restage.
stage.restage()
- self.check_chdir(stage, stage_name)
- self.check_fetch(stage, stage_name)
+ check_chdir(stage, self.stage_name)
+ check_fetch(stage, self.stage_name)
stage.chdir_to_source()
- self.check_chdir_to_source(stage, stage_name)
- self.assertFalse('foobar' in os.listdir(stage.source_path))
- self.check_destroy(stage, stage_name)
-
-
- def test_no_keep_without_exceptions(self):
- with Stage(archive_url, name=stage_name, keep=False) as stage:
+ check_chdir_to_source(stage, self.stage_name)
+ assert 'foobar' not in os.listdir(stage.source_path)
+ check_destroy(stage, self.stage_name)
+
+ def test_no_keep_without_exceptions(self, mock_archive):
+ with Stage(
+ mock_archive.url, name=self.stage_name, keep=False
+ ) as stage:
pass
- self.check_destroy(stage, stage_name)
+ check_destroy(stage, self.stage_name)
-
- def test_keep_without_exceptions(self):
- with Stage(archive_url, name=stage_name, keep=True) as stage:
+ def test_keep_without_exceptions(self, mock_archive):
+ with Stage(
+ mock_archive.url, name=self.stage_name, keep=True
+ ) as stage:
pass
- path = self.get_stage_path(stage, stage_name)
- self.assertTrue(os.path.isdir(path))
-
+ path = get_stage_path(stage, self.stage_name)
+ assert os.path.isdir(path)
- def test_no_keep_with_exceptions(self):
+ def test_no_keep_with_exceptions(self, mock_archive):
+ class ThisMustFailHere(Exception):
+ pass
try:
- with Stage(archive_url, name=stage_name, keep=False) as stage:
- raise Exception()
-
- path = self.get_stage_path(stage, stage_name)
- self.assertTrue(os.path.isdir(path))
- except:
- pass # ignore here.
-
-
- def test_keep_exceptions(self):
+ with Stage(
+ mock_archive.url, name=self.stage_name, keep=False
+ ) as stage:
+ raise ThisMustFailHere()
+ except ThisMustFailHere:
+ path = get_stage_path(stage, self.stage_name)
+ assert os.path.isdir(path)
+
+ def test_keep_exceptions(self, mock_archive):
+ class ThisMustFailHere(Exception):
+ pass
try:
- with Stage(archive_url, name=stage_name, keep=True) as stage:
- raise Exception()
-
- path = self.get_stage_path(stage, stage_name)
- self.assertTrue(os.path.isdir(path))
- except:
- pass # ignore here.
+ with Stage(
+ mock_archive.url, name=self.stage_name, keep=True
+ ) as stage:
+ raise ThisMustFailHere()
+ except ThisMustFailHere:
+ path = get_stage_path(stage, self.stage_name)
+ assert os.path.isdir(path)
diff --git a/lib/spack/spack/test/svn_fetch.py b/lib/spack/spack/test/svn_fetch.py
index 0a745a090b..962a150909 100644
--- a/lib/spack/spack/test/svn_fetch.py
+++ b/lib/spack/spack/test/svn_fetch.py
@@ -23,88 +23,62 @@
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
import os
-import re
-import spack
-from spack.test.mock_repo import svn, MockSvnRepo
-from spack.version import ver
-from spack.test.mock_packages_test import *
+import pytest
+import spack
from llnl.util.filesystem import *
+from spack.spec import Spec
+from spack.version import ver
-class SvnFetchTest(MockPackagesTest):
- """Tests fetching from a dummy git repository."""
-
- def setUp(self):
- """Create an svn repository with two revisions."""
- super(SvnFetchTest, self).setUp()
-
- self.repo = MockSvnRepo()
-
- spec = Spec('svn-test')
- spec.concretize()
- self.pkg = spack.repo.get(spec, new=True)
-
- def tearDown(self):
- """Destroy the stage space used by this test."""
- super(SvnFetchTest, self).tearDown()
- self.repo.destroy()
-
- def assert_rev(self, rev):
- """Check that the current revision is equal to the supplied rev."""
- def get_rev():
- output = svn('info', output=str)
- self.assertTrue("Revision" in output)
- for line in output.split('\n'):
- match = re.match(r'Revision: (\d+)', line)
- if match:
- return match.group(1)
- self.assertEqual(get_rev(), rev)
-
- def try_fetch(self, rev, test_file, args):
- """Tries to:
- 1. Fetch the repo using a fetch strategy constructed with
- supplied args.
- 2. Check if the test_file is in the checked out repository.
- 3. Assert that the repository is at the revision supplied.
- 4. Add and remove some files, then reset the repo, and
- ensure it's all there again.
- """
- self.pkg.versions[ver('svn')] = args
-
- with self.pkg.stage:
- self.pkg.do_stage()
- self.assert_rev(rev)
-
- file_path = join_path(self.pkg.stage.source_path, test_file)
- self.assertTrue(os.path.isdir(self.pkg.stage.source_path))
- self.assertTrue(os.path.isfile(file_path))
-
- os.unlink(file_path)
- self.assertFalse(os.path.isfile(file_path))
-
- untracked = 'foobarbaz'
- touch(untracked)
- self.assertTrue(os.path.isfile(untracked))
- self.pkg.do_restage()
- self.assertFalse(os.path.isfile(untracked))
-
- self.assertTrue(os.path.isdir(self.pkg.stage.source_path))
- self.assertTrue(os.path.isfile(file_path))
-
- self.assert_rev(rev)
-
-
- def test_fetch_default(self):
- """Test a default checkout and make sure it's on rev 1"""
- self.try_fetch(self.repo.r1, self.repo.r1_file, {
- 'svn' : self.repo.url
- })
-
-
- def test_fetch_r1(self):
- """Test fetching an older revision (0)."""
- self.try_fetch(self.repo.r0, self.repo.r0_file, {
- 'svn' : self.repo.url,
- 'revision' : self.repo.r0
- })
+@pytest.fixture(params=['default', 'rev0'])
+def type_of_test(request):
+ """Returns one of the test type available for the mock_hg_repository"""
+ return request.param
+
+
+def test_fetch(
+ type_of_test,
+ mock_svn_repository,
+ config,
+ refresh_builtin_mock
+):
+ """Tries to:
+
+ 1. Fetch the repo using a fetch strategy constructed with
+ supplied args (they depend on type_of_test).
+ 2. Check if the test_file is in the checked out repository.
+ 3. Assert that the repository is at the revision supplied.
+ 4. Add and remove some files, then reset the repo, and
+ ensure it's all there again.
+ """
+ # Retrieve the right test parameters
+ t = mock_svn_repository.checks[type_of_test]
+ h = mock_svn_repository.hash
+ # Construct the package under test
+ spec = Spec('hg-test')
+ spec.concretize()
+ pkg = spack.repo.get(spec, new=True)
+ pkg.versions[ver('hg')] = t.args
+ # Enter the stage directory and check some properties
+ with pkg.stage:
+ pkg.do_stage()
+ assert h() == t.revision
+
+ file_path = join_path(pkg.stage.source_path, t.file)
+ assert os.path.isdir(pkg.stage.source_path)
+ assert os.path.isfile(file_path)
+
+ os.unlink(file_path)
+ assert not os.path.isfile(file_path)
+
+ untracked_file = 'foobarbaz'
+ touch(untracked_file)
+ assert os.path.isfile(untracked_file)
+ pkg.do_restage()
+ assert not os.path.isfile(untracked_file)
+
+ assert os.path.isdir(pkg.stage.source_path)
+ assert os.path.isfile(file_path)
+
+ assert h() == t.revision
diff --git a/lib/spack/spack/test/url_extrapolate.py b/lib/spack/spack/test/url_extrapolate.py
index ffd4230f71..5f5cf555ae 100644
--- a/lib/spack/spack/test/url_extrapolate.py
+++ b/lib/spack/spack/test/url_extrapolate.py
@@ -22,11 +22,12 @@
# License along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
-"""\
-Tests ability of spack to extrapolate URL versions from existing versions.
+"""Tests ability of spack to extrapolate URL versions from
+existing versions.
"""
+import unittest
+
import spack.url as url
-from spack.test.mock_packages_test import *
class UrlExtrapolateTest(unittest.TestCase):
@@ -34,20 +35,21 @@ class UrlExtrapolateTest(unittest.TestCase):
def check_url(self, base, version, new_url):
self.assertEqual(url.substitute_version(base, version), new_url)
-
def test_libelf_version(self):
base = "http://www.mr511.de/software/libelf-0.8.13.tar.gz"
self.check_url(base, '0.8.13', base)
- self.check_url(base, '0.8.12', "http://www.mr511.de/software/libelf-0.8.12.tar.gz")
- self.check_url(base, '0.3.1', "http://www.mr511.de/software/libelf-0.3.1.tar.gz")
- self.check_url(base, '1.3.1b', "http://www.mr511.de/software/libelf-1.3.1b.tar.gz")
-
+ self.check_url(
+ base, '0.8.12', "http://www.mr511.de/software/libelf-0.8.12.tar.gz")
+ self.check_url(
+ base, '0.3.1', "http://www.mr511.de/software/libelf-0.3.1.tar.gz")
+ self.check_url(
+ base, '1.3.1b', "http://www.mr511.de/software/libelf-1.3.1b.tar.gz")
def test_libdwarf_version(self):
base = "http://www.prevanders.net/libdwarf-20130729.tar.gz"
self.check_url(base, '20130729', base)
- self.check_url(base, '8.12', "http://www.prevanders.net/libdwarf-8.12.tar.gz")
-
+ self.check_url(
+ base, '8.12', "http://www.prevanders.net/libdwarf-8.12.tar.gz")
def test_dyninst_version(self):
# Dyninst has a version twice in the URL.
@@ -58,7 +60,6 @@ class UrlExtrapolateTest(unittest.TestCase):
self.check_url(base, '8.3.1',
"http://www.dyninst.org/sites/default/files/downloads/dyninst/8.3.1/DyninstAPI-8.3.1.tgz")
-
def test_partial_version_prefix(self):
# Test now with a partial prefix earlier in the URL -- this is
# hard to figure out so Spack only substitutes the last
@@ -72,7 +73,6 @@ class UrlExtrapolateTest(unittest.TestCase):
self.check_url(base, '8.3.1',
"http://www.dyninst.org/sites/default/files/downloads/dyninst/8.1/DyninstAPI-8.3.1.tgz")
-
def test_scalasca_partial_version(self):
# Note that this probably doesn't actually work, but sites are
# inconsistent about their directory structure, so it's not
@@ -84,19 +84,16 @@ class UrlExtrapolateTest(unittest.TestCase):
self.check_url('http://apps.fz-juelich.de/scalasca/releases/cube/4.3/dist/cube-4.3-TP1.tar.gz', '8.3.1',
'http://apps.fz-juelich.de/scalasca/releases/cube/4.3/dist/cube-8.3.1.tar.gz')
-
def test_mpileaks_version(self):
self.check_url('https://github.com/hpc/mpileaks/releases/download/v1.0/mpileaks-1.0.tar.gz', '2.1.3',
'https://github.com/hpc/mpileaks/releases/download/v2.1.3/mpileaks-2.1.3.tar.gz')
-
def test_gcc(self):
self.check_url('http://open-source-box.org/gcc/gcc-4.9.2/gcc-4.9.2.tar.bz2', '4.7',
'http://open-source-box.org/gcc/gcc-4.7/gcc-4.7.tar.bz2')
self.check_url('http://open-source-box.org/gcc/gcc-4.4.7/gcc-4.4.7.tar.bz2', '4.4.7',
'http://open-source-box.org/gcc/gcc-4.4.7/gcc-4.4.7.tar.bz2')
-
def test_github_raw(self):
self.check_url('https://github.com/losalamos/CLAMR/blob/packages/PowerParser_v2.0.7.tgz?raw=true', '2.0.7',
'https://github.com/losalamos/CLAMR/blob/packages/PowerParser_v2.0.7.tgz?raw=true')
diff --git a/lib/spack/spack/test/url_parse.py b/lib/spack/spack/test/url_parse.py
index 648996aaaa..c4718d56b8 100644
--- a/lib/spack/spack/test/url_parse.py
+++ b/lib/spack/spack/test/url_parse.py
@@ -32,11 +32,11 @@ import spack.url as url
class UrlParseTest(unittest.TestCase):
+
def assert_not_detected(self, string):
self.assertRaises(
url.UndetectableVersionError, url.parse_name_and_version, string)
-
def check(self, name, v, string, **kwargs):
# Make sure correct name and version are extracted.
parsed_name, parsed_v = url.parse_name_and_version(string)
@@ -52,7 +52,6 @@ class UrlParseTest(unittest.TestCase):
# build one with a specific version.
self.assertEqual(string, url.substitute_version(string, v))
-
def test_wwwoffle_version(self):
self.check(
'wwwoffle', '2.9h',
@@ -60,10 +59,10 @@ class UrlParseTest(unittest.TestCase):
def test_version_sourceforge_download(self):
self.check(
- 'foo_bar', '1.21',
+ 'foo-bar', '1.21',
'http://sourceforge.net/foo_bar-1.21.tar.gz/download')
self.check(
- 'foo_bar', '1.21',
+ 'foo-bar', '1.21',
'http://sf.net/foo_bar-1.21.tar.gz/download')
def test_no_version(self):
@@ -72,7 +71,7 @@ class UrlParseTest(unittest.TestCase):
def test_version_all_dots(self):
self.check(
- 'foo.bar.la', '1.14','http://example.com/foo.bar.la.1.14.zip')
+ 'foo-bar-la', '1.14', 'http://example.com/foo.bar.la.1.14.zip')
def test_version_underscore_separator(self):
self.check(
@@ -137,12 +136,12 @@ class UrlParseTest(unittest.TestCase):
def test_version_single_digit(self):
self.check(
- 'foo_bar', '45',
+ 'foo-bar', '45',
'http://example.com/foo_bar.45.tar.gz')
def test_noseparator_single_digit(self):
self.check(
- 'foo_bar', '45',
+ 'foo-bar', '45',
'http://example.com/foo_bar45.tar.gz')
def test_version_developer_that_hates_us_format(self):
@@ -152,9 +151,23 @@ class UrlParseTest(unittest.TestCase):
def test_version_regular(self):
self.check(
- 'foo_bar', '1.21',
+ 'foo-bar', '1.21',
'http://example.com/foo_bar-1.21.tar.gz')
+ def test_version_gitlab(self):
+ self.check(
+ 'vtk', '7.0.0',
+ 'https://gitlab.kitware.com/vtk/vtk/repository/'
+ 'archive.tar.bz2?ref=v7.0.0')
+ self.check(
+ 'icet', '1.2.3',
+ 'https://gitlab.kitware.com/icet/icet/repository/'
+ 'archive.tar.gz?ref=IceT-1.2.3')
+ self.check(
+ 'foo', '42.1337',
+ 'http://example.com/org/foo/repository/'
+ 'archive.zip?ref=42.1337bar')
+
def test_version_github(self):
self.check(
'yajl', '1.0.5',
@@ -217,7 +230,7 @@ class UrlParseTest(unittest.TestCase):
def test_imagemagick_style(self):
self.check(
- 'ImageMagick', '6.7.5-7',
+ 'imagemagick', '6.7.5-7',
'http://downloads.sf.net/project/machomebrew/mirror/ImageMagick-6.7.5-7.tar.bz2')
@@ -248,7 +261,7 @@ class UrlParseTest(unittest.TestCase):
def test_xaw3d_version(self):
self.check(
- 'Xaw3d', '1.5E',
+ 'xaw3d', '1.5E',
'ftp://ftp.visi.com/users/hawkeyd/X/Xaw3d-1.5E.tar.gz')
def test_fann_version(self):
@@ -286,7 +299,7 @@ class UrlParseTest(unittest.TestCase):
'mvapich2', '1.9',
'http://mvapich.cse.ohio-state.edu/download/mvapich2/mv2/mvapich2-1.9.tgz')
- def test_mvapich2_19_version(self):
+ def test_mvapich2_20_version(self):
self.check(
'mvapich2', '2.0',
'http://mvapich.cse.ohio-state.edu/download/mvapich/mv2/mvapich2-2.0.tar.gz')
@@ -325,5 +338,29 @@ class UrlParseTest(unittest.TestCase):
def test_github_raw_url(self):
self.check(
- 'PowerParser', '2.0.7',
+ 'powerparser', '2.0.7',
'https://github.com/losalamos/CLAMR/blob/packages/PowerParser_v2.0.7.tgz?raw=true')
+
+ def test_r_xml_version(self):
+ self.check(
+ 'xml', '3.98-1.4',
+ 'https://cran.r-project.org/src/contrib/XML_3.98-1.4.tar.gz')
+
+ def test_nco_version(self):
+ self.check(
+ 'nco', '4.6.2-beta03',
+ 'https://github.com/nco/nco/archive/4.6.2-beta03.tar.gz')
+
+ self.check(
+ 'nco', '4.6.3-alpha04',
+ 'https://github.com/nco/nco/archive/4.6.3-alpha04.tar.gz')
+
+ def test_yorick_version(self):
+ self.check(
+ 'yorick', '2_2_04',
+ 'https://github.com/dhmunro/yorick/archive/y_2_2_04.tar.gz')
+
+ def test_luaposix_version(self):
+ self.check(
+ 'luaposix', '33.4.0',
+ 'https://github.com/luaposix/luaposix/archive/release-v33.4.0.tar.gz')
diff --git a/lib/spack/spack/test/url_substitution.py b/lib/spack/spack/test/url_substitution.py
index 9cc04834b6..ea6374e3d2 100644
--- a/lib/spack/spack/test/url_substitution.py
+++ b/lib/spack/spack/test/url_substitution.py
@@ -26,37 +26,31 @@
This test does sanity checks on substituting new versions into URLs
"""
import unittest
-
import spack.url as url
+base = "https://comp.llnl.gov/linear_solvers/download/hypre-2.9.0b.tar.gz"
+stem = "https://comp.llnl.gov/linear_solvers/download/hypre-"
+
+
class PackageSanityTest(unittest.TestCase):
- def test_hypre_url_substitution(self):
- base = "https://computation-rnd.llnl.gov/linear_solvers/download/hypre-2.9.0b.tar.gz"
+ def test_hypre_url_substitution(self):
self.assertEqual(url.substitute_version(base, '2.9.0b'), base)
self.assertEqual(
- url.substitute_version(base, '2.8.0b'),
- "https://computation-rnd.llnl.gov/linear_solvers/download/hypre-2.8.0b.tar.gz")
+ url.substitute_version(base, '2.8.0b'), stem + "2.8.0b.tar.gz")
self.assertEqual(
- url.substitute_version(base, '2.7.0b'),
- "https://computation-rnd.llnl.gov/linear_solvers/download/hypre-2.7.0b.tar.gz")
+ url.substitute_version(base, '2.7.0b'), stem + "2.7.0b.tar.gz")
self.assertEqual(
- url.substitute_version(base, '2.6.0b'),
- "https://computation-rnd.llnl.gov/linear_solvers/download/hypre-2.6.0b.tar.gz")
+ url.substitute_version(base, '2.6.0b'), stem + "2.6.0b.tar.gz")
self.assertEqual(
- url.substitute_version(base, '1.14.0b'),
- "https://computation-rnd.llnl.gov/linear_solvers/download/hypre-1.14.0b.tar.gz")
+ url.substitute_version(base, '1.14.0b'), stem + "1.14.0b.tar.gz")
self.assertEqual(
- url.substitute_version(base, '1.13.0b'),
- "https://computation-rnd.llnl.gov/linear_solvers/download/hypre-1.13.0b.tar.gz")
+ url.substitute_version(base, '1.13.0b'), stem + "1.13.0b.tar.gz")
self.assertEqual(
- url.substitute_version(base, '2.0.0'),
- "https://computation-rnd.llnl.gov/linear_solvers/download/hypre-2.0.0.tar.gz")
+ url.substitute_version(base, '2.0.0'), stem + "2.0.0.tar.gz")
self.assertEqual(
- url.substitute_version(base, '1.6.0'),
- "https://computation-rnd.llnl.gov/linear_solvers/download/hypre-1.6.0.tar.gz")
-
+ url.substitute_version(base, '1.6.0'), stem + "1.6.0.tar.gz")
def test_otf2_url_substitution(self):
base = "http://www.vi-hps.org/upload/packages/otf2/otf2-1.4.tar.gz"
diff --git a/lib/spack/spack/test/versions.py b/lib/spack/spack/test/versions.py
index a026403e2e..c1d427783c 100644
--- a/lib/spack/spack/test/versions.py
+++ b/lib/spack/spack/test/versions.py
@@ -43,7 +43,6 @@ class VersionsTest(unittest.TestCase):
self.assertFalse(a > b)
self.assertFalse(a >= b)
-
def assert_ver_gt(self, a, b):
a, b = ver(a), ver(b)
self.assertTrue(a > b)
@@ -53,7 +52,6 @@ class VersionsTest(unittest.TestCase):
self.assertFalse(a < b)
self.assertFalse(a <= b)
-
def assert_ver_eq(self, a, b):
a, b = ver(a), ver(b)
self.assertFalse(a > b)
@@ -63,55 +61,46 @@ class VersionsTest(unittest.TestCase):
self.assertFalse(a < b)
self.assertTrue(a <= b)
-
def assert_in(self, needle, haystack):
self.assertTrue(ver(needle) in ver(haystack))
-
def assert_not_in(self, needle, haystack):
self.assertFalse(ver(needle) in ver(haystack))
-
def assert_canonical(self, canonical_list, version_list):
self.assertEqual(ver(canonical_list), ver(version_list))
-
def assert_overlaps(self, v1, v2):
self.assertTrue(ver(v1).overlaps(ver(v2)))
-
def assert_no_overlap(self, v1, v2):
self.assertFalse(ver(v1).overlaps(ver(v2)))
-
def assert_satisfies(self, v1, v2):
self.assertTrue(ver(v1).satisfies(ver(v2)))
-
def assert_does_not_satisfy(self, v1, v2):
self.assertFalse(ver(v1).satisfies(ver(v2)))
-
def check_intersection(self, expected, a, b):
self.assertEqual(ver(expected), ver(a).intersection(ver(b)))
-
def check_union(self, expected, a, b):
self.assertEqual(ver(expected), ver(a).union(ver(b)))
-
def test_two_segments(self):
self.assert_ver_eq('1.0', '1.0')
self.assert_ver_lt('1.0', '2.0')
self.assert_ver_gt('2.0', '1.0')
-
+ self.assert_ver_eq('develop', 'develop')
+ self.assert_ver_lt('1.0', 'develop')
+ self.assert_ver_gt('develop', '1.0')
def test_three_segments(self):
self.assert_ver_eq('2.0.1', '2.0.1')
self.assert_ver_lt('2.0', '2.0.1')
self.assert_ver_gt('2.0.1', '2.0')
-
def test_alpha(self):
# TODO: not sure whether I like this. 2.0.1a is *usually*
# TODO: less than 2.0.1, but special-casing it makes version
@@ -120,7 +109,6 @@ class VersionsTest(unittest.TestCase):
self.assert_ver_gt('2.0.1a', '2.0.1')
self.assert_ver_lt('2.0.1', '2.0.1a')
-
def test_patch(self):
self.assert_ver_eq('5.5p1', '5.5p1')
self.assert_ver_lt('5.5p1', '5.5p2')
@@ -129,7 +117,6 @@ class VersionsTest(unittest.TestCase):
self.assert_ver_lt('5.5p1', '5.5p10')
self.assert_ver_gt('5.5p10', '5.5p1')
-
def test_num_alpha_with_no_separator(self):
self.assert_ver_lt('10xyz', '10.1xyz')
self.assert_ver_gt('10.1xyz', '10xyz')
@@ -137,7 +124,6 @@ class VersionsTest(unittest.TestCase):
self.assert_ver_lt('xyz10', 'xyz10.1')
self.assert_ver_gt('xyz10.1', 'xyz10')
-
def test_alpha_with_dots(self):
self.assert_ver_eq('xyz.4', 'xyz.4')
self.assert_ver_lt('xyz.4', '8')
@@ -145,30 +131,25 @@ class VersionsTest(unittest.TestCase):
self.assert_ver_lt('xyz.4', '2')
self.assert_ver_gt('2', 'xyz.4')
-
def test_nums_and_patch(self):
self.assert_ver_lt('5.5p2', '5.6p1')
self.assert_ver_gt('5.6p1', '5.5p2')
self.assert_ver_lt('5.6p1', '6.5p1')
self.assert_ver_gt('6.5p1', '5.6p1')
-
def test_rc_versions(self):
self.assert_ver_gt('6.0.rc1', '6.0')
self.assert_ver_lt('6.0', '6.0.rc1')
-
def test_alpha_beta(self):
self.assert_ver_gt('10b2', '10a1')
self.assert_ver_lt('10a2', '10b2')
-
def test_double_alpha(self):
self.assert_ver_eq('1.0aa', '1.0aa')
self.assert_ver_lt('1.0a', '1.0aa')
self.assert_ver_gt('1.0aa', '1.0a')
-
def test_padded_numbers(self):
self.assert_ver_eq('10.0001', '10.0001')
self.assert_ver_eq('10.0001', '10.1')
@@ -176,24 +157,20 @@ class VersionsTest(unittest.TestCase):
self.assert_ver_lt('10.0001', '10.0039')
self.assert_ver_gt('10.0039', '10.0001')
-
def test_close_numbers(self):
self.assert_ver_lt('4.999.9', '5.0')
self.assert_ver_gt('5.0', '4.999.9')
-
def test_date_stamps(self):
self.assert_ver_eq('20101121', '20101121')
self.assert_ver_lt('20101121', '20101122')
self.assert_ver_gt('20101122', '20101121')
-
def test_underscores(self):
self.assert_ver_eq('2_0', '2_0')
self.assert_ver_eq('2.0', '2_0')
self.assert_ver_eq('2_0', '2.0')
-
def test_rpm_oddities(self):
self.assert_ver_eq('1b.fc17', '1b.fc17')
self.assert_ver_lt('1b.fc17', '1.fc17')
@@ -202,7 +179,6 @@ class VersionsTest(unittest.TestCase):
self.assert_ver_gt('1g.fc17', '1.fc17')
self.assert_ver_lt('1.fc17', '1g.fc17')
-
# Stuff below here is not taken from RPM's tests and is
# unique to spack
def test_version_ranges(self):
@@ -214,7 +190,6 @@ class VersionsTest(unittest.TestCase):
self.assert_ver_lt('1.2:1.4', '1.5:1.6')
self.assert_ver_gt('1.5:1.6', '1.2:1.4')
-
def test_contains(self):
self.assert_in('1.3', '1.2:1.4')
self.assert_in('1.2.5', '1.2:1.4')
@@ -233,7 +208,6 @@ class VersionsTest(unittest.TestCase):
self.assert_in('1.4.1', '1.2.7:1.4')
self.assert_not_in('1.4.1', '1.2.7:1.4.0')
-
def test_in_list(self):
self.assert_in('1.2', ['1.5', '1.2', '1.3'])
self.assert_in('1.2.5', ['1.5', '1.2:1.3'])
@@ -245,7 +219,6 @@ class VersionsTest(unittest.TestCase):
self.assert_not_in('1.2.5:1.5', ['1.5', '1.2:1.3'])
self.assert_not_in('1.1:1.2.5', ['1.5', '1.2:1.3'])
-
def test_ranges_overlap(self):
self.assert_overlaps('1.2', '1.2')
self.assert_overlaps('1.2.1', '1.2.1')
@@ -262,7 +235,6 @@ class VersionsTest(unittest.TestCase):
self.assert_overlaps(':', '1.6:1.9')
self.assert_overlaps('1.6:1.9', ':')
-
def test_overlap_with_containment(self):
self.assert_in('1.6.5', '1.6')
self.assert_in('1.6.5', ':1.6')
@@ -273,7 +245,6 @@ class VersionsTest(unittest.TestCase):
self.assert_not_in(':1.6', '1.6.5')
self.assert_in('1.6.5', ':1.6')
-
def test_lists_overlap(self):
self.assert_overlaps('1.2b:1.7,5', '1.6:1.9,1')
self.assert_overlaps('1,2,3,4,5', '3,4,5,6,7')
@@ -287,7 +258,6 @@ class VersionsTest(unittest.TestCase):
self.assert_no_overlap('1,2,3,4,5', '6,7')
self.assert_no_overlap('1,2,3,4,5', '6:7')
-
def test_canonicalize_list(self):
self.assert_canonical(['1.2', '1.3', '1.4'],
['1.2', '1.3', '1.3', '1.4'])
@@ -316,7 +286,6 @@ class VersionsTest(unittest.TestCase):
self.assert_canonical([':'],
[':,1.3, 1.3.1,1.3.9,1.4 : 1.5 , 1.3 : 1.4'])
-
def test_intersection(self):
self.check_intersection('2.5',
'1.0:2.5', '2.5:3.0')
@@ -325,12 +294,11 @@ class VersionsTest(unittest.TestCase):
self.check_intersection('0:1', ':', '0:1')
self.check_intersection(['1.0', '2.5:2.7'],
- ['1.0:2.7'], ['2.5:3.0','1.0'])
+ ['1.0:2.7'], ['2.5:3.0', '1.0'])
self.check_intersection(['2.5:2.7'],
- ['1.1:2.7'], ['2.5:3.0','1.0'])
+ ['1.1:2.7'], ['2.5:3.0', '1.0'])
self.check_intersection(['0:1'], [':'], ['0:1'])
-
def test_intersect_with_containment(self):
self.check_intersection('1.6.5', '1.6.5', ':1.6')
self.check_intersection('1.6.5', ':1.6', '1.6.5')
@@ -338,7 +306,6 @@ class VersionsTest(unittest.TestCase):
self.check_intersection('1.6:1.6.5', ':1.6.5', '1.6')
self.check_intersection('1.6:1.6.5', '1.6', ':1.6.5')
-
def test_union_with_containment(self):
self.check_union(':1.6', '1.6.5', ':1.6')
self.check_union(':1.6', ':1.6', '1.6.5')
@@ -346,8 +313,6 @@ class VersionsTest(unittest.TestCase):
self.check_union(':1.6', ':1.6.5', '1.6')
self.check_union(':1.6', '1.6', ':1.6.5')
-
- def test_union_with_containment(self):
self.check_union(':', '1.0:', ':2.0')
self.check_union('1:4', '1:3', '2:4')
@@ -356,7 +321,6 @@ class VersionsTest(unittest.TestCase):
# Tests successor/predecessor case.
self.check_union('1:4', '1:2', '3:4')
-
def test_basic_version_satisfaction(self):
self.assert_satisfies('4.7.3', '4.7.3')
@@ -372,7 +336,6 @@ class VersionsTest(unittest.TestCase):
self.assert_does_not_satisfy('4.8', '4.9')
self.assert_does_not_satisfy('4', '4.9')
-
def test_basic_version_satisfaction_in_lists(self):
self.assert_satisfies(['4.7.3'], ['4.7.3'])
@@ -388,7 +351,6 @@ class VersionsTest(unittest.TestCase):
self.assert_does_not_satisfy(['4.8'], ['4.9'])
self.assert_does_not_satisfy(['4'], ['4.9'])
-
def test_version_range_satisfaction(self):
self.assert_satisfies('4.7b6', '4.3:4.7')
self.assert_satisfies('4.3.0', '4.3:4.7')
@@ -400,7 +362,6 @@ class VersionsTest(unittest.TestCase):
self.assert_satisfies('4.7b6', '4.3:4.7')
self.assert_does_not_satisfy('4.8.0', '4.3:4.7')
-
def test_version_range_satisfaction_in_lists(self):
self.assert_satisfies(['4.7b6'], ['4.3:4.7'])
self.assert_satisfies(['4.3.0'], ['4.3:4.7'])
@@ -423,3 +384,51 @@ class VersionsTest(unittest.TestCase):
self.assert_satisfies('4.8.0', '4.2, 4.3:4.8')
self.assert_satisfies('4.8.2', '4.2, 4.3:4.8')
+
+ def test_formatted_strings(self):
+ versions = '1.2.3', '1_2_3', '1-2-3'
+ for item in versions:
+ v = Version(item)
+ self.assertEqual(v.dotted, '1.2.3')
+ self.assertEqual(v.dashed, '1-2-3')
+ self.assertEqual(v.underscored, '1_2_3')
+ self.assertEqual(v.joined, '123')
+
+ def test_repr_and_str(self):
+
+ def check_repr_and_str(vrs):
+ a = Version(vrs)
+ self.assertEqual(repr(a), 'Version(\'' + vrs + '\')')
+ b = eval(repr(a))
+ self.assertEqual(a, b)
+ self.assertEqual(str(a), vrs)
+ self.assertEqual(str(a), str(b))
+
+ check_repr_and_str('1.2.3')
+ check_repr_and_str('R2016a')
+ check_repr_and_str('R2016a.2-3_4')
+
+ def test_get_item(self):
+ a = Version('0.1_2-3')
+ self.assertTrue(isinstance(a[1], int))
+ # Test slicing
+ b = a[0:2]
+ self.assertTrue(isinstance(b, Version))
+ self.assertEqual(b, Version('0.1'))
+ self.assertEqual(repr(b), 'Version(\'0.1\')')
+ self.assertEqual(str(b), '0.1')
+ b = a[0:3]
+ self.assertTrue(isinstance(b, Version))
+ self.assertEqual(b, Version('0.1_2'))
+ self.assertEqual(repr(b), 'Version(\'0.1_2\')')
+ self.assertEqual(str(b), '0.1_2')
+ b = a[1:]
+ self.assertTrue(isinstance(b, Version))
+ self.assertEqual(b, Version('1_2-3'))
+ self.assertEqual(repr(b), 'Version(\'1_2-3\')')
+ self.assertEqual(str(b), '1_2-3')
+ # Raise TypeError on tuples
+ self.assertRaises(TypeError, b.__getitem__, 1, 2)
+
+if __name__ == '__main__':
+ unittest.main()
diff --git a/lib/spack/spack/url.py b/lib/spack/spack/url.py
index f678a2dca9..a1eec6067e 100644
--- a/lib/spack/spack/url.py
+++ b/lib/spack/spack/url.py
@@ -56,12 +56,12 @@ import spack.error
import spack.util.compression as comp
from spack.version import Version
+
#
# Note: We call the input to most of these functions a "path" but the functions
# work on paths and URLs. There's not a good word for both of these, but
# "path" seemed like the most generic term.
#
-
def find_list_url(url):
"""Finds a good list URL for the supplied URL. This depends on
the site. By default, just assumes that a good list URL is the
@@ -71,8 +71,8 @@ def find_list_url(url):
url_types = [
# e.g. https://github.com/llnl/callpath/archive/v1.0.1.tar.gz
- (r'^(https://github.com/[^/]+/[^/]+)/archive/', lambda m: m.group(1) + '/releases')
- ]
+ (r'^(https://github.com/[^/]+/[^/]+)/archive/',
+ lambda m: m.group(1) + '/releases')]
for pattern, fun in url_types:
match = re.search(pattern, url)
@@ -89,8 +89,10 @@ def strip_query_and_fragment(path):
query, frag = components[3:5]
suffix = ''
- if query: suffix += '?' + query
- if frag: suffix += '#' + frag
+ if query:
+ suffix += '?' + query
+ if frag:
+ suffix += '#' + frag
return (urlunsplit(stripped), suffix)
@@ -104,19 +106,22 @@ def split_url_extension(path):
1. https://github.com/losalamos/CLAMR/blob/packages/PowerParser_v2.0.7.tgz?raw=true
2. http://www.apache.org/dyn/closer.cgi?path=/cassandra/1.2.0/apache-cassandra-1.2.0-rc2-bin.tar.gz
+ 3. https://gitlab.kitware.com/vtk/vtk/repository/archive.tar.bz2?ref=v7.0.0
In (1), the query string needs to be stripped to get at the
- extension, but in (2), the filename is IN a single final query
+ extension, but in (2) & (3), the filename is IN a single final query
argument.
This strips the URL into three pieces: prefix, ext, and suffix.
The suffix contains anything that was stripped off the URL to
get at the file extension. In (1), it will be '?raw=true', but
- in (2), it will be empty. e.g.:
+ in (2), it will be empty. In (3) the suffix is a parameter that follows
+ after the file extension, e.g.:
1. ('https://github.com/losalamos/CLAMR/blob/packages/PowerParser_v2.0.7', '.tgz', '?raw=true')
2. ('http://www.apache.org/dyn/closer.cgi?path=/cassandra/1.2.0/apache-cassandra-1.2.0-rc2-bin',
'.tar.gz', None)
+ 3. ('https://gitlab.kitware.com/vtk/vtk/repository/archive', '.tar.bz2', '?ref=v7.0.0')
"""
prefix, ext, suffix = path, '', ''
@@ -140,7 +145,7 @@ def split_url_extension(path):
return prefix, ext, suffix
-def downloaded_file_extension(path):
+def determine_url_file_extension(path):
"""This returns the type of archive a URL refers to. This is
sometimes confusing because of URLs like:
@@ -152,16 +157,16 @@ def downloaded_file_extension(path):
"""
match = re.search(r'github.com/.+/(zip|tar)ball/', path)
if match:
- if match.group(1) == 'zip': return 'zip'
- elif match.group(1) == 'tar': return 'tar.gz'
+ if match.group(1) == 'zip':
+ return 'zip'
+ elif match.group(1) == 'tar':
+ return 'tar.gz'
prefix, ext, suffix = split_url_extension(path)
- if not ext:
- raise UrlParseError("Cannot deduce archive type in %s" % path, path)
return ext
-def parse_version_offset(path):
+def parse_version_offset(path, debug=False):
"""Try to extract a version string from a filename or URL. This is taken
largely from Homebrew's Version class."""
original_path = path
@@ -186,28 +191,42 @@ def parse_version_offset(path):
# e.g. https://github.com/petdance/ack/tarball/1.93_02
(r'github.com/.+/(?:zip|tar)ball/v?((\d+\.)+\d+_(\d+))$', path),
+ # Yorick is very special.
+ # e.g. https://github.com/dhmunro/yorick/archive/y_2_2_04.tar.gz
+ (r'github.com/[^/]+/yorick/archive/y_(\d+(?:_\d+)*)$', path),
+
# e.g. https://github.com/hpc/lwgrp/archive/v1.0.1.tar.gz
- (r'github.com/[^/]+/[^/]+/archive/v?(\d+(?:\.\d+)*)$', path),
+ (r'github.com/[^/]+/[^/]+/archive/(?:release-)?v?(\w+(?:[.-]\w+)*)$', path), # noqa
# e.g. https://github.com/erlang/otp/tarball/OTP_R15B01 (erlang style)
(r'[-_](R\d+[AB]\d*(-\d+)?)', path),
# e.g., https://github.com/hpc/libcircle/releases/download/0.2.1-rc.1/libcircle-0.2.1-rc.1.tar.gz
- # e.g., https://github.com/hpc/mpileaks/releases/download/v1.0/mpileaks-1.0.tar.gz
+ # e.g.,
+ # https://github.com/hpc/mpileaks/releases/download/v1.0/mpileaks-1.0.tar.gz
(r'github.com/[^/]+/[^/]+/releases/download/v?([^/]+)/.*$', path),
+ # GitLab syntax:
+ # {baseUrl}{/organization}{/projectName}/repository/archive.{fileEnding}?ref={gitTag}
+ # as with github releases, we hope a version can be found in the
+ # git tag
+ # Search dotted versions:
+ # e.g., https://gitlab.kitware.com/vtk/vtk/repository/archive.tar.bz2?ref=v7.0.0
+ # e.g., https://example.com/org/repo/repository/archive.tar.bz2?ref=SomePrefix-2.1.1
+ (r'\?ref=(?:.*-|v)*((\d+\.)+\d+).*$', suffix),
+
# e.g. boost_1_39_0
(r'((\d+_)+\d+)$', stem),
# e.g. foobar-4.5.1-1
# e.g. ruby-1.9.1-p243
- (r'-((\d+\.)*\d\.\d+-(p|rc|RC)?\d+)(?:[-._](?:bin|dist|stable|src|sources))?$', stem),
+ (r'-((\d+\.)*\d\.\d+-(p|rc|RC)?\d+)(?:[-._](?:bin|dist|stable|src|sources))?$', stem), # noqa
# e.g. lame-398-1
(r'-((\d)+-\d)', stem),
- # e.g. foobar_1.2-3
- (r'_((\d+\.)+\d+(-\d+)?[a-z]?)', stem),
+ # e.g. foobar_1.2-3 or 3.98-1.4
+ (r'_((\d+\.)+\d+(-(\d+(\.\d+)?))?[a-z]?)', stem),
# e.g. foobar-4.5.1
(r'-((\d+\.)*\d+)$', stem),
@@ -243,6 +262,10 @@ def parse_version_offset(path):
regex, match_string = vtype
match = re.search(regex, match_string)
if match and match.group(1) is not None:
+ if debug:
+ tty.msg("Parsing URL: %s" % path,
+ " Matched regex %d: r'%s'" % (i, regex))
+
version = match.group(1)
start = match.start(1)
@@ -255,17 +278,17 @@ def parse_version_offset(path):
raise UndetectableVersionError(original_path)
-def parse_version(path):
+def parse_version(path, debug=False):
"""Given a URL or archive name, extract a version from it and return
a version object.
"""
- ver, start, l = parse_version_offset(path)
+ ver, start, l = parse_version_offset(path, debug=debug)
return Version(ver)
-def parse_name_offset(path, v=None):
+def parse_name_offset(path, v=None, debug=False):
if v is None:
- v = parse_version(path)
+ v = parse_version(path, debug=debug)
path, ext, suffix = split_url_extension(path)
@@ -275,15 +298,18 @@ def parse_name_offset(path, v=None):
name_types = [
(r'/sourceforge/([^/]+)/', path),
- (r'github.com/[^/]+/[^/]+/releases/download/%s/(.*)-%s$' % (v, v), path),
+ (r'github.com/[^/]+/[^/]+/releases/download/%s/(.*)-%s$' %
+ (v, v), path),
(r'/([^/]+)/(tarball|zipball)/', path),
(r'/([^/]+)[_.-](bin|dist|stable|src|sources)[_.-]%s' % v, path),
(r'github.com/[^/]+/([^/]+)/archive', path),
+ (r'[^/]+/([^/]+)/repository/archive', path), # gitlab
(r'([^/]+)[_.-]v?%s' % v, stem), # prefer the stem
(r'([^/]+)%s' % v, stem),
- (r'/([^/]+)[_.-]v?%s' % v, path), # accept the path if name is not in stem.
+ # accept the path if name is not in stem.
+ (r'/([^/]+)[_.-]v?%s' % v, path),
(r'/([^/]+)%s' % v, path),
(r'^([^/]+)[_.-]v?%s' % v, path),
@@ -300,6 +326,10 @@ def parse_name_offset(path, v=None):
if match_string is stem:
start += offset
+ # package names should be lowercase and separated by dashes.
+ name = name.lower()
+ name = re.sub('[_.]', '-', name)
+
return name, start, len(name)
raise UndetectableNameError(path)
@@ -318,7 +348,7 @@ def parse_name_and_version(path):
def insensitize(string):
"""Change upper and lowercase letters to be case insensitive in
- the provided string. e.g., 'a' because '[Aa]', 'B' becomes
+ the provided string. e.g., 'a' becomes '[Aa]', 'B' becomes
'[bB]', etc. Use for building regexes."""
def to_ins(match):
char = match.group(1)
@@ -326,7 +356,7 @@ def insensitize(string):
return re.sub(r'([a-zA-Z])', to_ins, string)
-def cumsum(elts, init=0, fn=lambda x:x):
+def cumsum(elts, init=0, fn=lambda x: x):
"""Return cumulative sum of result of fn on each element in elts."""
sums = []
s = init
@@ -337,21 +367,20 @@ def cumsum(elts, init=0, fn=lambda x:x):
def substitution_offsets(path):
- """This returns offsets for substituting versions and names in the provided path.
- It is a helper for substitute_version().
+ """This returns offsets for substituting versions and names in the
+ provided path. It is a helper for substitute_version().
"""
# Get name and version offsets
try:
ver, vs, vl = parse_version_offset(path)
name, ns, nl = parse_name_offset(path, ver)
- except UndetectableNameError, e:
+ except UndetectableNameError:
return (None, -1, -1, (), ver, vs, vl, (vs,))
- except UndetectableVersionError, e:
+ except UndetectableVersionError:
return (None, -1, -1, (), None, -1, -1, ())
# protect extensions like bz2 from getting inadvertently
# considered versions.
- ext = comp.extension(path)
path = comp.strip_extension(path)
# Construct a case-insensitive regular expression for the package name.
@@ -449,7 +478,7 @@ def color_url(path, **kwargs):
Cyan: The version found by parse_version_offset().
Red: The name found by parse_name_offset().
- Green: Instances of version string substituted by substitute_version().
+ Green: Instances of version string from substitute_version().
Magenta: Instances of the name (protected from substitution).
Optional args:
@@ -469,31 +498,46 @@ def color_url(path, **kwargs):
nerr = verr = 0
out = StringIO()
for i in range(len(path)):
- if i == vs: out.write('@c'); verr += 1
- elif i == ns: out.write('@r'); nerr += 1
+ if i == vs:
+ out.write('@c')
+ verr += 1
+ elif i == ns:
+ out.write('@r')
+ nerr += 1
elif subs:
- if i in voffs: out.write('@g')
- elif i in noffs: out.write('@m')
+ if i in voffs:
+ out.write('@g')
+ elif i in noffs:
+ out.write('@m')
out.write(path[i])
- if i == vs + vl - 1: out.write('@.'); verr += 1
- elif i == ns + nl - 1: out.write('@.'); nerr += 1
+ if i == vs + vl - 1:
+ out.write('@.')
+ verr += 1
+ elif i == ns + nl - 1:
+ out.write('@.')
+ nerr += 1
elif subs:
if i in vends or i in nends:
out.write('@.')
if errors:
- if nerr == 0: out.write(" @r{[no name]}")
- if verr == 0: out.write(" @r{[no version]}")
- if nerr == 1: out.write(" @r{[incomplete name]}")
- if verr == 1: out.write(" @r{[incomplete version]}")
+ if nerr == 0:
+ out.write(" @r{[no name]}")
+ if verr == 0:
+ out.write(" @r{[no version]}")
+ if nerr == 1:
+ out.write(" @r{[incomplete name]}")
+ if verr == 1:
+ out.write(" @r{[incomplete version]}")
return colorize(out.getvalue())
class UrlParseError(spack.error.SpackError):
"""Raised when the URL module can't parse something correctly."""
+
def __init__(self, msg, path):
super(UrlParseError, self).__init__(msg)
self.path = path
@@ -501,6 +545,7 @@ class UrlParseError(spack.error.SpackError):
class UndetectableVersionError(UrlParseError):
"""Raised when we can't parse a version from a string."""
+
def __init__(self, path):
super(UndetectableVersionError, self).__init__(
"Couldn't detect version in: " + path, path)
@@ -508,6 +553,7 @@ class UndetectableVersionError(UrlParseError):
class UndetectableNameError(UrlParseError):
"""Raised when we can't parse a package name from a string."""
+
def __init__(self, path):
super(UndetectableNameError, self).__init__(
"Couldn't parse package name in: " + path, path)
diff --git a/lib/spack/spack/util/compression.py b/lib/spack/spack/util/compression.py
index dc1188eb0f..caec70064d 100644
--- a/lib/spack/spack/util/compression.py
+++ b/lib/spack/spack/util/compression.py
@@ -32,17 +32,24 @@ PRE_EXTS = ["tar"]
EXTS = ["gz", "bz2", "xz", "Z", "zip", "tgz"]
# Add PRE_EXTS and EXTS last so that .tar.gz is matched *before* .tar or .gz
-ALLOWED_ARCHIVE_TYPES = [".".join(l) for l in product(PRE_EXTS, EXTS)] + PRE_EXTS + EXTS
+ALLOWED_ARCHIVE_TYPES = [".".join(l) for l in product(
+ PRE_EXTS, EXTS)] + PRE_EXTS + EXTS
+
def allowed_archive(path):
return any(path.endswith(t) for t in ALLOWED_ARCHIVE_TYPES)
-def decompressor_for(path):
+def decompressor_for(path, extension=None):
"""Get the appropriate decompressor for a path."""
- if path.endswith(".zip"):
+ if ((extension and re.match(r'\.?zip$', extension)) or
+ path.endswith('.zip')):
unzip = which('unzip', required=True)
+ unzip.add_default_arg('-q')
return unzip
+ if extension and re.match(r'gz', extension):
+ gunzip = which('gunzip', required=True)
+ return gunzip
tar = which('tar', required=True)
tar.add_default_arg('-xf')
return tar
diff --git a/lib/spack/spack/util/crypto.py b/lib/spack/spack/util/crypto.py
index 1ae9793518..d074716022 100644
--- a/lib/spack/spack/util/crypto.py
+++ b/lib/spack/spack/util/crypto.py
@@ -31,7 +31,7 @@ _acceptable_hashes = [
hashlib.sha224,
hashlib.sha256,
hashlib.sha384,
- hashlib.sha512 ]
+ hashlib.sha512]
"""Index for looking up hasher for a digest."""
_size_to_hash = dict((h().digest_size, h) for h in _acceptable_hashes)
@@ -52,7 +52,6 @@ def checksum(hashlib_algo, filename, **kwargs):
return hasher.hexdigest()
-
class Checker(object):
"""A checker checks files against one particular hex digest.
It will automatically determine what hashing algorithm
@@ -74,25 +73,25 @@ class Checker(object):
adjusting the block_size optional arg. By default it's
a 1MB (2**20 bytes) buffer.
"""
+
def __init__(self, hexdigest, **kwargs):
self.block_size = kwargs.get('block_size', 2**20)
self.hexdigest = hexdigest
self.sum = None
bytes = len(hexdigest) / 2
- if not bytes in _size_to_hash:
+ if bytes not in _size_to_hash:
raise ValueError(
- 'Spack knows no hash algorithm for this digest: %s' % hexdigest)
+ 'Spack knows no hash algorithm for this digest: %s'
+ % hexdigest)
self.hash_fun = _size_to_hash[bytes]
-
@property
def hash_name(self):
"""Get the name of the hash function this Checker is using."""
return self.hash_fun().name
-
def check(self, filename):
"""Read the file with the specified name and check its checksum
against self.hexdigest. Return True if they match, False
@@ -101,3 +100,24 @@ class Checker(object):
self.sum = checksum(
self.hash_fun, filename, block_size=self.block_size)
return self.sum == self.hexdigest
+
+
+def prefix_bits(byte_array, bits):
+ """Return the first <bits> bits of a byte array as an integer."""
+ result = 0
+ n = 0
+ for i, b in enumerate(byte_array):
+ n += 8
+ result = (result << 8) | ord(b)
+ if n >= bits:
+ break
+
+ result >>= (n - bits)
+ return result
+
+
+def bit_length(num):
+ """Number of bits required to represent an integer in binary."""
+ s = bin(num)
+ s = s.lstrip('-0b')
+ return len(s)
diff --git a/lib/spack/spack/util/debug.py b/lib/spack/spack/util/debug.py
index e8a0595416..cf485a611d 100644
--- a/lib/spack/spack/util/debug.py
+++ b/lib/spack/spack/util/debug.py
@@ -33,10 +33,11 @@ import code
import traceback
import signal
+
def debug_handler(sig, frame):
"""Interrupt running process, and provide a python prompt for
interactive debugging."""
- d = {'_frame':frame} # Allow access to frame object.
+ d = {'_frame': frame} # Allow access to frame object.
d.update(frame.f_globals) # Unless shadowed by global
d.update(frame.f_locals)
@@ -48,5 +49,5 @@ def debug_handler(sig, frame):
def register_interrupt_handler():
- """Register a handler to print a stack trace and enter an interpreter on Ctrl-C"""
+ """Print traceback and enter an interpreter on Ctrl-C"""
signal.signal(signal.SIGINT, debug_handler)
diff --git a/lib/spack/spack/util/environment.py b/lib/spack/spack/util/environment.py
index f23110d2d9..420cce8245 100644
--- a/lib/spack/spack/util/environment.py
+++ b/lib/spack/spack/util/environment.py
@@ -24,6 +24,25 @@
##############################################################################
import os
+system_paths = ['/', '/usr', '/usr/local']
+suffixes = ['lib', 'lib64', 'include']
+system_dirs = [os.path.join(p, s) for s in suffixes for p in system_paths] + \
+ system_paths
+system_bins = [os.path.join(p, 'bin') for p in system_paths]
+
+
+def filter_system_paths(paths):
+ return [p for p in paths if p not in system_dirs]
+
+
+def filter_system_bin_paths(paths):
+ # Turn the iterable into a list. Assume it's a list from here on.
+ _paths = list(paths)
+ bins = [p for p in _paths if p in system_bins]
+ nobins = [p for p in _paths if p not in system_bins]
+ # put bins infront as PATH is set by: prepend_path('PATH', item)
+ return bins + nobins
+
def get_path(name):
path = os.environ.get(name, "").strip()
diff --git a/lib/spack/spack/util/executable.py b/lib/spack/spack/util/executable.py
index 38b778fa00..63bbbb7c92 100644
--- a/lib/spack/spack/util/executable.py
+++ b/lib/spack/spack/util/executable.py
@@ -26,7 +26,6 @@
import os
import re
import subprocess
-import inspect
import llnl.util.tty as tty
import spack
@@ -40,6 +39,7 @@ class Executable(object):
def __init__(self, name):
self.exe = name.split(' ')
+ self.default_env = {}
self.returncode = None
if not self.exe:
@@ -48,6 +48,9 @@ class Executable(object):
def add_default_arg(self, arg):
self.exe.append(arg)
+ def add_default_env(self, key, value):
+ self.default_env[key] = value
+
@property
def command(self):
return ' '.join(self.exe)
@@ -103,7 +106,13 @@ class Executable(object):
fail_on_error = kwargs.pop("fail_on_error", True)
ignore_errors = kwargs.pop("ignore_errors", ())
+ # environment
env = kwargs.get('env', None)
+ if env is None:
+ env = os.environ.copy()
+ env.update(self.default_env)
+ else:
+ env = self.default_env.copy().update(env)
# TODO: This is deprecated. Remove in a future version.
return_output = kwargs.pop("return_output", False)
@@ -149,6 +158,7 @@ class Executable(object):
cmd_line = "'%s'" % "' '".join(
map(lambda arg: arg.replace("'", "'\"'\"'"), cmd))
+
tty.debug(cmd_line)
try:
@@ -173,11 +183,11 @@ class Executable(object):
result += err
return result
- except OSError, e:
+ except OSError as e:
raise ProcessError(
"%s: %s" % (self.exe[0], e.strerror), "Command: " + cmd_line)
- except subprocess.CalledProcessError, e:
+ except subprocess.CalledProcessError as e:
if fail_on_error:
raise ProcessError(
str(e), "\nExit status %d when invoking command: %s" %
@@ -185,11 +195,11 @@ class Executable(object):
finally:
if close_ostream:
- output.close()
+ ostream.close()
if close_estream:
- error.close()
+ estream.close()
if close_istream:
- input.close()
+ istream.close()
def __eq__(self, other):
return self.exe == other.exe
@@ -226,73 +236,4 @@ def which(name, **kwargs):
class ProcessError(spack.error.SpackError):
- def __init__(self, msg, long_message=None):
- # These are used for detailed debugging information for
- # package builds. They're built up gradually as the exception
- # propagates.
- self.package_context = _get_package_context()
- self.build_log = None
-
- super(ProcessError, self).__init__(msg, long_message)
-
- @property
- def long_message(self):
- msg = self._long_message
- if msg:
- msg += "\n\n"
-
- if self.build_log:
- msg += "See build log for details:\n"
- msg += " %s" % self.build_log
-
- if self.package_context:
- if msg:
- msg += "\n\n"
- msg += '\n'.join(self.package_context)
-
- return msg
-
-
-def _get_package_context():
- """Return some context for an error message when the build fails.
-
- This should be called within a ProcessError when the exception is
- thrown.
-
- Args:
- process_error -- A ProcessError raised during install()
-
- This function inspects the stack to find where we failed in the
- package file, and it adds detailed context to the long_message
- from there.
-
- """
- lines = []
-
- # Walk up the stack
- for f in inspect.stack():
- frame = f[0]
-
- # Find a frame with 'self' in the local variables.
- if 'self' not in frame.f_locals:
- continue
-
- # Look only at a frame in a subclass of spack.Package
- obj = frame.f_locals['self']
- if type(obj) != spack.Package and isinstance(obj, spack.Package):
- break
- else:
- # Didn't find anything
- return lines
-
- # Build a message showing where in install we failed.
- lines.append("%s:%d, in %s:" % (
- inspect.getfile(frame.f_code), frame.f_lineno, frame.f_code.co_name
- ))
-
- sourcelines, start = inspect.getsourcelines(frame)
- for i, line in enumerate(sourcelines):
- mark = ">> " if start + i == frame.f_lineno else " "
- lines.append(" %s%-5d%s" % (mark, start + i, line.rstrip()))
-
- return lines
+ """ProcessErrors are raised when Executables exit with an error code."""
diff --git a/lib/spack/spack/util/multiproc.py b/lib/spack/spack/util/multiproc.py
index 038cd90121..6a25c45713 100644
--- a/lib/spack/spack/util/multiproc.py
+++ b/lib/spack/spack/util/multiproc.py
@@ -32,18 +32,21 @@ from itertools import izip
__all__ = ['spawn', 'parmap', 'Barrier']
+
def spawn(f):
- def fun(pipe,x):
+ def fun(pipe, x):
pipe.send(f(x))
pipe.close()
return fun
-def parmap(f,X):
- pipe=[Pipe() for x in X]
- proc=[Process(target=spawn(f),args=(c,x)) for x,(p,c) in izip(X,pipe)]
+
+def parmap(f, X):
+ pipe = [Pipe() for x in X]
+ proc = [Process(target=spawn(f), args=(c, x))
+ for x, (p, c) in izip(X, pipe)]
[p.start() for p in proc]
[p.join() for p in proc]
- return [p.recv() for (p,c) in pipe]
+ return [p.recv() for (p, c) in pipe]
class Barrier:
@@ -53,6 +56,7 @@ class Barrier:
See http://greenteapress.com/semaphores/downey08semaphores.pdf, p. 41.
"""
+
def __init__(self, n, timeout=None):
self.n = n
self.to = timeout
@@ -61,7 +65,6 @@ class Barrier:
self.turnstile1 = Semaphore(0)
self.turnstile2 = Semaphore(1)
-
def wait(self):
if not self.mutex.acquire(timeout=self.to):
raise BarrierTimeoutError()
@@ -90,4 +93,5 @@ class Barrier:
self.turnstile2.release()
-class BarrierTimeoutError: pass
+class BarrierTimeoutError:
+ pass
diff --git a/lib/spack/spack/util/naming.py b/lib/spack/spack/util/naming.py
index 2d9762942d..9a5cdee411 100644
--- a/lib/spack/spack/util/naming.py
+++ b/lib/spack/spack/util/naming.py
@@ -31,9 +31,15 @@ from StringIO import StringIO
import spack
-__all__ = ['mod_to_class', 'spack_module_to_python_module', 'valid_module_name',
- 'valid_fully_qualified_module_name', 'validate_fully_qualified_module_name',
- 'validate_module_name', 'possible_spack_module_names', 'NamespaceTrie']
+__all__ = [
+ 'mod_to_class',
+ 'spack_module_to_python_module',
+ 'valid_module_name',
+ 'valid_fully_qualified_module_name',
+ 'validate_fully_qualified_module_name',
+ 'validate_module_name',
+ 'possible_spack_module_names',
+ 'NamespaceTrie']
# Valid module names can contain '-' but can't start with it.
_valid_module_re = r'^\w[\w-]*$'
@@ -67,8 +73,8 @@ def mod_to_class(mod_name):
class_name = string.capwords(class_name, '-')
class_name = class_name.replace('-', '')
- # If a class starts with a number, prefix it with Number_ to make it a valid
- # Python class name.
+ # If a class starts with a number, prefix it with Number_ to make it
+ # a valid Python class name.
if re.match(r'^[0-9]', class_name):
class_name = "_%s" % class_name
@@ -126,6 +132,7 @@ def validate_fully_qualified_module_name(mod_name):
class InvalidModuleNameError(spack.error.SpackError):
"""Raised when we encounter a bad module name."""
+
def __init__(self, name):
super(InvalidModuleNameError, self).__init__(
"Invalid module name: " + name)
@@ -134,6 +141,7 @@ class InvalidModuleNameError(spack.error.SpackError):
class InvalidFullyQualifiedModuleNameError(spack.error.SpackError):
"""Raised when we encounter a bad full package name."""
+
def __init__(self, name):
super(InvalidFullyQualifiedModuleNameError, self).__init__(
"Invalid fully qualified package name: " + name)
@@ -141,17 +149,17 @@ class InvalidFullyQualifiedModuleNameError(spack.error.SpackError):
class NamespaceTrie(object):
+
class Element(object):
+
def __init__(self, value):
self.value = value
-
def __init__(self, separator='.'):
self._subspaces = {}
self._value = None
self._sep = separator
-
def __setitem__(self, namespace, value):
first, sep, rest = namespace.partition(self._sep)
@@ -164,7 +172,6 @@ class NamespaceTrie(object):
self._subspaces[first][rest] = value
-
def _get_helper(self, namespace, full_name):
first, sep, rest = namespace.partition(self._sep)
if not first:
@@ -176,13 +183,12 @@ class NamespaceTrie(object):
else:
return self._subspaces[first]._get_helper(rest, full_name)
-
def __getitem__(self, namespace):
return self._get_helper(namespace, namespace)
-
def is_prefix(self, namespace):
- """True if the namespace has a value, or if it's the prefix of one that does."""
+ """True if the namespace has a value, or if it's the prefix of one that
+ does."""
first, sep, rest = namespace.partition(self._sep)
if not first:
return True
@@ -191,7 +197,6 @@ class NamespaceTrie(object):
else:
return self._subspaces[first].is_prefix(rest)
-
def is_leaf(self, namespace):
"""True if this namespace has no children in the trie."""
first, sep, rest = namespace.partition(self._sep)
@@ -202,7 +207,6 @@ class NamespaceTrie(object):
else:
return self._subspaces[first].is_leaf(rest)
-
def has_value(self, namespace):
"""True if there is a value set for the given namespace."""
first, sep, rest = namespace.partition(self._sep)
@@ -213,20 +217,17 @@ class NamespaceTrie(object):
else:
return self._subspaces[first].has_value(rest)
-
def __contains__(self, namespace):
"""Returns whether a value has been set for the namespace."""
return self.has_value(namespace)
-
def _str_helper(self, stream, level=0):
indent = (level * ' ')
for name in sorted(self._subspaces):
stream.write(indent + name + '\n')
if self._value:
stream.write(indent + ' ' + repr(self._value.value))
- stream.write(self._subspaces[name]._str_helper(stream, level+1))
-
+ stream.write(self._subspaces[name]._str_helper(stream, level + 1))
def __str__(self):
stream = StringIO()
diff --git a/lib/spack/spack/util/path.py b/lib/spack/spack/util/path.py
new file mode 100644
index 0000000000..7235f6b756
--- /dev/null
+++ b/lib/spack/spack/util/path.py
@@ -0,0 +1,72 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+"""Utilities for managing paths in Spack.
+"""
+import os
+import re
+import spack
+import getpass
+import tempfile
+
+__all__ = [
+ 'substitute_config_variables',
+ 'canonicalize_path']
+
+# Substitutions to perform
+replacements = {
+ 'spack': spack.prefix,
+ 'user': getpass.getuser(),
+ 'tempdir': tempfile.gettempdir(),
+}
+
+
+def substitute_config_variables(path):
+ """Substitute placeholders into paths.
+
+ Spack allows paths in configs to have some placeholders, as follows:
+
+ - $spack The Spack instance's prefix
+ - $user The current user's username
+ - $tempdir Default temporary directory returned by tempfile.gettempdir()
+
+ These are substituted case-insensitively into the path, and users can
+ use either ``$var`` or ``${var}`` syntax for the variables.
+
+ """
+ # Look up replacements for re.sub in the replacements dict.
+ def repl(match):
+ m = match.group(0).strip('${}')
+ return replacements.get(m.lower(), match.group(0))
+
+ # Replace $var or ${var}.
+ return re.sub(r'(\$\w+\b|\$\{\w+\})', repl, path)
+
+
+def canonicalize_path(path):
+ """Substitute config vars, expand user home, take abspath."""
+ path = substitute_config_variables(path)
+ path = os.path.expanduser(path)
+ path = os.path.abspath(path)
+ return path
diff --git a/lib/spack/spack/util/pattern.py b/lib/spack/spack/util/pattern.py
index 6d4bcb1039..b5731ccf08 100644
--- a/lib/spack/spack/util/pattern.py
+++ b/lib/spack/spack/util/pattern.py
@@ -28,42 +28,52 @@ import functools
def composite(interface=None, method_list=None, container=list):
- """
- Returns a class decorator that patches a class adding all the methods it needs to be a composite for a given
- interface.
+ """Returns a class decorator that patches a class adding all the methods
+ it needs to be a composite for a given interface.
- :param interface: class exposing the interface to which the composite object must conform. Only non-private and
- non-special methods will be taken into account
+ :param interface: class exposing the interface to which the composite \
+ object must conform. Only non-private and non-special methods will \
+ be taken into account
:param method_list: names of methods that should be part of the composite
- :param container: container for the composite object (default = list). Must fulfill the MutableSequence contract.
- The composite class will expose the container API to manage object composition
+ :param container: container for the composite object (default = list). \
+ Must fulfill the MutableSequence contract. The composite class will \
+ expose the container API to manage object composition
:return: class decorator
"""
- # Check if container fulfills the MutableSequence contract and raise an exception if it doesn't
- # The patched class returned by the decorator will inherit from the container class to expose the
- # interface needed to manage objects composition
+ # Check if container fulfills the MutableSequence contract and raise an
+ # exception if it doesn't. The patched class returned by the decorator will
+ # inherit from the container class to expose the interface needed to manage
+ # objects composition
if not issubclass(container, collections.MutableSequence):
raise TypeError("Container must fulfill the MutableSequence contract")
- # Check if at least one of the 'interface' or the 'method_list' arguments are defined
+ # Check if at least one of the 'interface' or the 'method_list' arguments
+ # are defined
if interface is None and method_list is None:
- raise TypeError("Either 'interface' or 'method_list' must be defined on a call to composite")
+ raise TypeError(
+ "Either 'interface' or 'method_list' must be defined on a call "
+ "to composite")
def cls_decorator(cls):
- # Retrieve the base class of the composite. Inspect its methods and decide which ones will be overridden
+ # Retrieve the base class of the composite. Inspect its methods and
+ # decide which ones will be overridden
def no_special_no_private(x):
return inspect.ismethod(x) and not x.__name__.startswith('_')
- # Patch the behavior of each of the methods in the previous list. This is done associating an instance of the
- # descriptor below to any method that needs to be patched.
+ # Patch the behavior of each of the methods in the previous list.
+ # This is done associating an instance of the descriptor below to
+ # any method that needs to be patched.
class IterateOver(object):
+ """Decorator used to patch methods in a composite.
+
+ It iterates over all the items in the instance containing the
+ associated attribute and calls for each of them an attribute
+ with the same name
"""
- Decorator used to patch methods in a composite. It iterates over all the items in the instance containing the
- associated attribute and calls for each of them an attribute with the same name
- """
+
def __init__(self, name, func=None):
self.name = name
self.func = func
@@ -72,8 +82,9 @@ def composite(interface=None, method_list=None, container=list):
def getter(*args, **kwargs):
for item in instance:
getattr(item, self.name)(*args, **kwargs)
- # If we are using this descriptor to wrap a method from an interface, then we must conditionally
- # use the `functools.wraps` decorator to set the appropriate fields.
+ # If we are using this descriptor to wrap a method from an
+ # interface, then we must conditionally use the
+ # `functools.wraps` decorator to set the appropriate fields
if self.func is not None:
getter = functools.wraps(self.func)(getter)
return getter
@@ -81,7 +92,8 @@ def composite(interface=None, method_list=None, container=list):
dictionary_for_type_call = {}
# Construct a dictionary with the methods explicitly passed as name
if method_list is not None:
- # python@2.7: method_list_dict = {name: IterateOver(name) for name in method_list}
+ # python@2.7: method_list_dict = {name: IterateOver(name) for name
+ # in method_list}
method_list_dict = {}
for name in method_list:
method_list_dict[name] = IterateOver(name)
@@ -89,28 +101,48 @@ def composite(interface=None, method_list=None, container=list):
# Construct a dictionary with the methods inspected from the interface
if interface is not None:
##########
- # python@2.7: interface_methods = {name: method for name, method in inspect.getmembers(interface, predicate=no_special_no_private)}
+ # python@2.7: interface_methods = {name: method for name, method in
+ # inspect.getmembers(interface, predicate=no_special_no_private)}
interface_methods = {}
- for name, method in inspect.getmembers(interface, predicate=no_special_no_private):
+ for name, method in inspect.getmembers(
+ interface, predicate=no_special_no_private):
interface_methods[name] = method
##########
- # python@2.7: interface_methods_dict = {name: IterateOver(name, method) for name, method in interface_methods.iteritems()}
+ # python@2.7: interface_methods_dict = {name: IterateOver(name,
+ # method) for name, method in interface_methods.iteritems()}
interface_methods_dict = {}
for name, method in interface_methods.iteritems():
interface_methods_dict[name] = IterateOver(name, method)
##########
dictionary_for_type_call.update(interface_methods_dict)
- # Get the methods that are defined in the scope of the composite class and override any previous definition
+ # Get the methods that are defined in the scope of the composite
+ # class and override any previous definition
##########
- # python@2.7: cls_method = {name: method for name, method in inspect.getmembers(cls, predicate=inspect.ismethod)}
+ # python@2.7: cls_method = {name: method for name, method in
+ # inspect.getmembers(cls, predicate=inspect.ismethod)}
cls_method = {}
- for name, method in inspect.getmembers(cls, predicate=inspect.ismethod):
+ for name, method in inspect.getmembers(
+ cls, predicate=inspect.ismethod):
cls_method[name] = method
##########
dictionary_for_type_call.update(cls_method)
# Generate the new class on the fly and return it
# FIXME : inherit from interface if we start to use ABC classes?
- wrapper_class = type(cls.__name__, (cls, container), dictionary_for_type_call)
+ wrapper_class = type(cls.__name__, (cls, container),
+ dictionary_for_type_call)
return wrapper_class
return cls_decorator
+
+
+class Bunch(object):
+ """Carries a bunch of named attributes (from Alex Martelli bunch)"""
+
+ def __init__(self, **kwargs):
+ self.__dict__.update(kwargs)
+
+
+class Args(Bunch):
+ """Subclass of Bunch to write argparse args more naturally."""
+ def __init__(self, *flags, **kwargs):
+ super(Args, self).__init__(flags=tuple(flags), kwargs=kwargs)
diff --git a/lib/spack/spack/util/prefix.py b/lib/spack/spack/util/prefix.py
index e1a0f2958b..985d862269 100644
--- a/lib/spack/spack/util/prefix.py
+++ b/lib/spack/spack/util/prefix.py
@@ -27,6 +27,7 @@ This file contains utilities to help with installing packages.
"""
from llnl.util.filesystem import join_path
+
class Prefix(str):
"""This class represents an installation prefix, but provides useful
attributes for referring to directories inside the prefix.
diff --git a/lib/spack/spack/util/spack_json.py b/lib/spack/spack/util/spack_json.py
new file mode 100644
index 0000000000..236eef8983
--- /dev/null
+++ b/lib/spack/spack/util/spack_json.py
@@ -0,0 +1,76 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+"""Simple wrapper around JSON to guarantee consistent use of load/dump. """
+import json
+import spack.error
+
+__all__ = ['load', 'dump', 'SpackJSONError']
+
+_json_dump_args = {
+ 'indent': True,
+ 'separators': (',', ': ')
+}
+
+
+def load(stream):
+ """Spack JSON needs to be ordered to support specs."""
+ if isinstance(stream, basestring):
+ return _byteify(json.loads(stream, object_hook=_byteify),
+ ignore_dicts=True)
+ else:
+ return _byteify(json.load(stream, object_hook=_byteify),
+ ignore_dicts=True)
+
+
+def dump(data, stream=None):
+ """Dump JSON with a reasonable amount of indentation and separation."""
+ if stream is None:
+ return json.dumps(data, **_json_dump_args)
+ else:
+ return json.dump(data, stream, **_json_dump_args)
+
+
+def _byteify(data, ignore_dicts=False):
+ # if this is a unicode string, return its string representation
+ if isinstance(data, unicode):
+ return data.encode('utf-8')
+ # if this is a list of values, return list of byteified values
+ if isinstance(data, list):
+ return [_byteify(item, ignore_dicts=True) for item in data]
+ # if this is a dictionary, return dictionary of byteified keys and values
+ # but only if we haven't already byteified it
+ if isinstance(data, dict) and not ignore_dicts:
+ return dict((_byteify(key, ignore_dicts=True),
+ _byteify(value, ignore_dicts=True)) for key, value in
+ data.iteritems())
+ # if it's anything else, return it in its original form
+ return data
+
+
+class SpackJSONError(spack.error.SpackError):
+ """Raised when there are issues with JSON parsing."""
+
+ def __init__(self, msg, yaml_error):
+ super(SpackJSONError, self).__init__(msg, str(yaml_error))
diff --git a/lib/spack/spack/util/spack_yaml.py b/lib/spack/spack/util/spack_yaml.py
index 909f9a57a8..9d4c607908 100644
--- a/lib/spack/spack/util/spack_yaml.py
+++ b/lib/spack/spack/util/spack_yaml.py
@@ -32,32 +32,41 @@
"""
import yaml
+from yaml import Loader, Dumper
from yaml.nodes import *
from yaml.constructor import ConstructorError
-from yaml.representer import SafeRepresenter
from ordereddict_backport import OrderedDict
+import spack.error
+
# Only export load and dump
-__all__ = ['load', 'dump']
+__all__ = ['load', 'dump', 'SpackYAMLError']
# Make new classes so we can add custom attributes.
# Also, use OrderedDict instead of just dict.
+
+
class syaml_dict(OrderedDict):
def __repr__(self):
- mappings = ('%r: %r' % (k,v) for k,v in self.items())
+ mappings = ('%r: %r' % (k, v) for k, v in self.items())
return '{%s}' % ', '.join(mappings)
+
+
class syaml_list(list):
__repr__ = list.__repr__
+
+
class syaml_str(str):
__repr__ = str.__repr__
+
def mark(obj, node):
"""Add start and end markers to an object."""
obj._start_mark = node.start_mark
obj._end_mark = node.end_mark
-class OrderedLineLoader(yaml.Loader):
+class OrderedLineLoader(Loader):
"""YAML loader that preserves order and line numbers.
Mappings read in by this loader behave like an ordered dict.
@@ -73,24 +82,25 @@ class OrderedLineLoader(yaml.Loader):
# The standard YAML constructors return empty instances and fill
# in with mappings later. We preserve this behavior.
#
+
def construct_yaml_str(self, node):
value = self.construct_scalar(node)
try:
value = value.encode('ascii')
except UnicodeEncodeError:
pass
+
value = syaml_str(value)
+
mark(value, node)
return value
-
def construct_yaml_seq(self, node):
data = syaml_list()
mark(data, node)
yield data
data.extend(self.construct_sequence(node))
-
def construct_yaml_map(self, node):
data = syaml_dict()
mark(data, node)
@@ -104,22 +114,23 @@ class OrderedLineLoader(yaml.Loader):
#
def construct_sequence(self, node, deep=False):
if not isinstance(node, SequenceNode):
- raise ConstructorError(None, None,
- "expected a sequence node, but found %s" % node.id,
- node.start_mark)
- value = syaml_list(self.construct_object(child, deep=deep)
- for child in node.value)
+ raise ConstructorError(
+ None, None,
+ "expected a sequence node, but found %s" % node.id,
+ node.start_mark)
+ value = syaml_list(self.construct_object(child, deep=deep)
+ for child in node.value)
mark(value, node)
return value
-
def construct_mapping(self, node, deep=False):
"""Store mappings as OrderedDicts instead of as regular python
dictionaries to preserve file ordering."""
if not isinstance(node, MappingNode):
- raise ConstructorError(None, None,
- "expected a mapping node, but found %s" % node.id,
- node.start_mark)
+ raise ConstructorError(
+ None, None,
+ "expected a mapping node, but found %s" % node.id,
+ node.start_mark)
mapping = syaml_dict()
for key_node, value_node in node.value:
@@ -127,25 +138,30 @@ class OrderedLineLoader(yaml.Loader):
try:
hash(key)
except TypeError, exc:
- raise ConstructorError("while constructing a mapping", node.start_mark,
- "found unacceptable key (%s)" % exc, key_node.start_mark)
+ raise ConstructorError(
+ "while constructing a mapping", node.start_mark,
+ "found unacceptable key (%s)" % exc, key_node.start_mark)
value = self.construct_object(value_node, deep=deep)
if key in mapping:
- raise ConstructorError("while constructing a mapping", node.start_mark,
- "found already in-use key (%s)" % key, key_node.start_mark)
+ raise ConstructorError(
+ "while constructing a mapping", node.start_mark,
+ "found already in-use key (%s)" % key, key_node.start_mark)
mapping[key] = value
mark(mapping, node)
return mapping
-# register above new constructors
-OrderedLineLoader.add_constructor(u'tag:yaml.org,2002:map', OrderedLineLoader.construct_yaml_map)
-OrderedLineLoader.add_constructor(u'tag:yaml.org,2002:seq', OrderedLineLoader.construct_yaml_seq)
-OrderedLineLoader.add_constructor(u'tag:yaml.org,2002:str', OrderedLineLoader.construct_yaml_str)
+# register above new constructors
+OrderedLineLoader.add_constructor(
+ u'tag:yaml.org,2002:map', OrderedLineLoader.construct_yaml_map)
+OrderedLineLoader.add_constructor(
+ u'tag:yaml.org,2002:seq', OrderedLineLoader.construct_yaml_seq)
+OrderedLineLoader.add_constructor(
+ u'tag:yaml.org,2002:str', OrderedLineLoader.construct_yaml_str)
-class OrderedLineDumper(yaml.Dumper):
+class OrderedLineDumper(Dumper):
"""Dumper that preserves ordering and formats ``syaml_*`` objects.
This dumper preserves insertion ordering ``syaml_dict`` objects
@@ -154,6 +170,7 @@ class OrderedLineDumper(yaml.Dumper):
regular Python equivalents, instead of ugly YAML pyobjects.
"""
+
def represent_mapping(self, tag, mapping, flow_style=None):
value = []
node = MappingNode(tag, value, flow_style=flow_style)
@@ -173,7 +190,8 @@ class OrderedLineDumper(yaml.Dumper):
node_value = self.represent_data(item_value)
if not (isinstance(node_key, ScalarNode) and not node_key.style):
best_style = False
- if not (isinstance(node_value, ScalarNode) and not node_value.style):
+ if not (isinstance(node_value, ScalarNode) and
+ not node_value.style):
best_style = False
value.append((node_key, node_value))
if flow_style is None:
@@ -183,6 +201,11 @@ class OrderedLineDumper(yaml.Dumper):
node.flow_style = best_style
return node
+ def ignore_aliases(self, _data):
+ """Make the dumper NEVER print YAML aliases."""
+ return True
+
+
# Make our special objects look like normal YAML ones.
OrderedLineDumper.add_representer(syaml_dict, OrderedLineDumper.represent_dict)
OrderedLineDumper.add_representer(syaml_list, OrderedLineDumper.represent_list)
@@ -199,3 +222,9 @@ def load(*args, **kwargs):
def dump(*args, **kwargs):
kwargs['Dumper'] = OrderedLineDumper
return yaml.dump(*args, **kwargs)
+
+
+class SpackYAMLError(spack.error.SpackError):
+ """Raised when there are issues with YAML parsing."""
+ def __init__(self, msg, yaml_error):
+ super(SpackYAMLError, self).__init__(msg, str(yaml_error))
diff --git a/lib/spack/spack/util/string.py b/lib/spack/spack/util/string.py
index ce017b8126..dae7afbf46 100644
--- a/lib/spack/spack/util/string.py
+++ b/lib/spack/spack/util/string.py
@@ -23,6 +23,7 @@
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
+
def comma_list(sequence, article=''):
if type(sequence) != list:
sequence = list(sequence)
@@ -32,7 +33,7 @@ def comma_list(sequence, article=''):
elif len(sequence) == 1:
return sequence[0]
else:
- out = ', '.join(str(s) for s in sequence[:-1])
+ out = ', '.join(str(s) for s in sequence[:-1])
if len(sequence) != 2:
out += ',' # oxford comma
out += ' '
@@ -41,6 +42,7 @@ def comma_list(sequence, article=''):
out += str(sequence[-1])
return out
+
def comma_or(sequence):
return comma_list(sequence, 'or')
diff --git a/lib/spack/spack/util/web.py b/lib/spack/spack/util/web.py
index 47abc507e0..935532266f 100644
--- a/lib/spack/spack/util/web.py
+++ b/lib/spack/spack/util/web.py
@@ -25,8 +25,7 @@
import re
import os
import sys
-import subprocess
-import urllib2, cookielib
+import urllib2
import urlparse
from multiprocessing import Pool
from HTMLParser import HTMLParser, HTMLParseError
@@ -44,6 +43,7 @@ TIMEOUT = 10
class LinkParser(HTMLParser):
"""This parser just takes an HTML page and strips out the hrefs on the
links. Good enough for a really simple spider. """
+
def __init__(self):
HTMLParser.__init__(self)
self.links = []
@@ -84,7 +84,7 @@ def _spider(args):
req.get_method = lambda: "HEAD"
resp = urllib2.urlopen(req, timeout=TIMEOUT)
- if not "Content-type" in resp.headers:
+ if "Content-type" not in resp.headers:
tty.debug("ignoring page " + url)
return pages, links
@@ -109,7 +109,7 @@ def _spider(args):
while link_parser.links:
raw_link = link_parser.links.pop()
- abs_link = urlparse.urljoin(response_url, raw_link)
+ abs_link = urlparse.urljoin(response_url, raw_link.strip())
links.add(abs_link)
@@ -125,11 +125,11 @@ def _spider(args):
if abs_link in visited:
continue
- # If we're not at max depth, follow links.
- if depth < max_depth:
- subcalls.append((abs_link, visited, root, None,
- depth+1, max_depth, raise_on_error))
- visited.add(abs_link)
+ # If we're not at max depth, follow links.
+ if depth < max_depth:
+ subcalls.append((abs_link, visited, root, None,
+ depth + 1, max_depth, raise_on_error))
+ visited.add(abs_link)
if subcalls:
try:
@@ -142,22 +142,22 @@ def _spider(args):
pool.terminate()
pool.join()
- except urllib2.URLError, e:
+ except urllib2.URLError as e:
tty.debug(e)
if raise_on_error:
raise spack.error.NoNetworkConnectionError(str(e), url)
- except HTMLParseError, e:
+ except HTMLParseError as e:
# This error indicates that Python's HTML parser sucks.
msg = "Got an error parsing HTML."
# Pre-2.7.3 Pythons in particular have rather prickly HTML parsing.
- if sys.version_info[:3] < (2,7,3):
+ if sys.version_info[:3] < (2, 7, 3):
msg += " Use Python 2.7.3 or newer for better HTML parsing."
tty.warn(msg, url, "HTMLParseError: " + str(e))
- except Exception, e:
+ except Exception as e:
# Other types of errors are completely ignored, except in debug mode.
tty.debug("Error in _spider: %s" % e)
@@ -173,7 +173,8 @@ def spider(root_url, **kwargs):
performance over a sequential fetch.
"""
max_depth = kwargs.setdefault('depth', 1)
- pages, links = _spider((root_url, set(), root_url, None, 1, max_depth, False))
+ pages, links = _spider((root_url, set(), root_url, None,
+ 1, max_depth, False))
return pages, links
@@ -226,7 +227,16 @@ def find_versions_of_archive(*archive_urls, **kwargs):
# We'll be a bit more liberal and just look for the archive
# part, not the full path.
- regexes.append(os.path.basename(url_regex))
+ url_regex = os.path.basename(url_regex)
+
+ # We need to add a $ anchor to the end of the regex to prevent
+ # Spack from picking up signature files like:
+ # .asc
+ # .md5
+ # .sha256
+ # .sig
+ # However, SourceForge downloads still need to end in '/download'.
+ regexes.append(url_regex + '(\/download)?$')
# Build a dict version -> URL from any links that match the wildcards.
versions = {}
@@ -235,7 +245,7 @@ def find_versions_of_archive(*archive_urls, **kwargs):
try:
ver = spack.url.parse_version(url)
versions[ver] = url
- except spack.url.UndetectableVersionError as e:
+ except spack.url.UndetectableVersionError:
continue
return versions
diff --git a/lib/spack/spack/variant.py b/lib/spack/spack/variant.py
index 20686d44b2..b2c1a73489 100644
--- a/lib/spack/spack/variant.py
+++ b/lib/spack/spack/variant.py
@@ -29,8 +29,10 @@ currently variants are just flags.
"""
+
class Variant(object):
"""Represents a variant on a build. Can be either on or off."""
+
def __init__(self, default, description):
- self.default = bool(default)
+ self.default = default
self.description = str(description)
diff --git a/lib/spack/spack/version.py b/lib/spack/spack/version.py
index 247f6d2362..0d68a709e8 100644
--- a/lib/spack/spack/version.py
+++ b/lib/spack/spack/version.py
@@ -43,16 +43,20 @@ be called on any of the types::
intersection
concrete
"""
-import os
-import sys
import re
+import numbers
from bisect import bisect_left
from functools import wraps
+
from functools_backport import total_ordering
+from spack.util.spack_yaml import syaml_dict
+
+__all__ = ['Version', 'VersionRange', 'VersionList', 'ver']
# Valid version characters
VALID_VERSION = r'[A-Za-z0-9_.-]'
+
def int_if_int(string):
"""Convert a string to int if possible. Otherwise, return a string."""
try:
@@ -62,10 +66,11 @@ def int_if_int(string):
def coerce_versions(a, b):
- """Convert both a and b to the 'greatest' type between them, in this order:
+ """
+ Convert both a and b to the 'greatest' type between them, in this order:
Version < VersionRange < VersionList
- This is used to simplify comparison operations below so that we're always
- comparing things that are of the same type.
+ This is used to simplify comparison operations below so that we're always
+ comparing things that are of the same type.
"""
order = (Version, VersionRange, VersionList)
ta, tb = type(a), type(b)
@@ -102,9 +107,14 @@ def coerced(method):
return coercing_method
+def _numeric_lt(self0, other):
+ """Compares two versions, knowing they're both numeric"""
+
+
@total_ordering
class Version(object):
"""Class to represent versions"""
+
def __init__(self, string):
string = str(string)
@@ -124,6 +134,21 @@ class Version(object):
# last element of separators is ''
self.separators = tuple(re.split(segment_regex, string)[1:-1])
+ @property
+ def dotted(self):
+ return '.'.join(str(x) for x in self.version)
+
+ @property
+ def underscored(self):
+ return '_'.join(str(x) for x in self.version)
+
+ @property
+ def dashed(self):
+ return '-'.join(str(x) for x in self.version)
+
+ @property
+ def joined(self):
+ return ''.join(str(x) for x in self.version)
def up_to(self, index):
"""Return a version string up to the specified component, exclusive.
@@ -131,27 +156,45 @@ class Version(object):
"""
return '.'.join(str(x) for x in self[:index])
-
def lowest(self):
return self
-
def highest(self):
return self
+ def isnumeric(self):
+ """Tells if this version is numeric (vs. a non-numeric version). A
+ version will be numeric as long as the first section of it is,
+ even if it contains non-numerica portions.
+
+ Some numeric versions:
+ 1
+ 1.1
+ 1.1a
+ 1.a.1b
+ Some non-numeric versions:
+ develop
+ system
+ myfavoritebranch
+ """
+ return isinstance(self.version[0], numbers.Integral)
+
+ def isdevelop(self):
+ """Triggers on the special case of the `@develop` version."""
+ return self.string == 'develop'
@coerced
def satisfies(self, other):
- """A Version 'satisfies' another if it is at least as specific and has a
- common prefix. e.g., we want gcc@4.7.3 to satisfy a request for
- gcc@4.7 so that when a user asks to build with gcc@4.7, we can find
- a suitable compiler.
+ """A Version 'satisfies' another if it is at least as specific and has
+ a common prefix. e.g., we want gcc@4.7.3 to satisfy a request for
+ gcc@4.7 so that when a user asks to build with gcc@4.7, we can find
+ a suitable compiler.
"""
- nself = len(self.version)
+
+ nself = len(self.version)
nother = len(other.version)
return nother <= nself and self.version[:nother] == other.version
-
def wildcard(self):
"""Create a regex that will match variants of this version string."""
def a_or_n(seg):
@@ -181,27 +224,56 @@ class Version(object):
wc += '(?:[a-z]|alpha|beta)?)?' * (len(segments) - 1)
return wc
-
def __iter__(self):
return iter(self.version)
-
def __getitem__(self, idx):
- return tuple(self.version[idx])
-
+ cls = type(self)
+ if isinstance(idx, numbers.Integral):
+ return self.version[idx]
+ elif isinstance(idx, slice):
+ # Currently len(self.separators) == len(self.version) - 1
+ extendend_separators = self.separators + ('',)
+ string_arg = []
+ for token, sep in zip(self.version, extendend_separators)[idx]:
+ string_arg.append(str(token))
+ string_arg.append(str(sep))
+ string_arg.pop() # We don't need the last separator
+ string_arg = ''.join(string_arg)
+ return cls(string_arg)
+ message = '{cls.__name__} indices must be integers'
+ raise TypeError(message.format(cls=cls))
def __repr__(self):
- return self.string
-
+ return 'Version(' + repr(self.string) + ')'
def __str__(self):
return self.string
-
@property
def concrete(self):
return self
+ def _numeric_lt(self, other):
+ """Compares two versions, knowing they're both numeric"""
+ # Standard comparison of two numeric versions
+ for a, b in zip(self.version, other.version):
+ if a == b:
+ continue
+ else:
+ # Numbers are always "newer" than letters.
+ # This is for consistency with RPM. See patch
+ # #60884 (and details) from bugzilla #50977 in
+ # the RPM project at rpm.org. Or look at
+ # rpmvercmp.c if you want to see how this is
+ # implemented there.
+ if type(a) != type(b):
+ return type(b) == int
+ else:
+ return a < b
+ # If the common prefix is equal, the one
+ # with more segments is bigger.
+ return len(self.version) < len(other.version)
@coerced
def __lt__(self, other):
@@ -218,45 +290,51 @@ class Version(object):
if self.version == other.version:
return False
- for a, b in zip(self.version, other.version):
- if a == b:
- continue
- else:
- # Numbers are always "newer" than letters. This is for
- # consistency with RPM. See patch #60884 (and details)
- # from bugzilla #50977 in the RPM project at rpm.org.
- # Or look at rpmvercmp.c if you want to see how this is
- # implemented there.
- if type(a) != type(b):
- return type(b) == int
- else:
- return a < b
-
- # If the common prefix is equal, the one with more segments is bigger.
- return len(self.version) < len(other.version)
-
+ # First priority: anything < develop
+ sdev = self.isdevelop()
+ if sdev:
+ return False # source = develop, it can't be < anything
+
+ # Now we know !sdev
+ odev = other.isdevelop()
+ if odev:
+ return True # src < dst
+
+ # now we know neither self nor other isdevelop().
+
+ # Principle: Non-numeric is less than numeric
+ # (so numeric will always be preferred by default)
+ if self.isnumeric():
+ if other.isnumeric():
+ return self._numeric_lt(other)
+ else: # self = numeric; other = non-numeric
+ # Numeric > Non-numeric (always)
+ return False
+ else:
+ if other.isnumeric(): # self = non-numeric, other = numeric
+ # non-numeric < numeric (always)
+ return True
+ else: # Both non-numeric
+ # Maybe consider other ways to compare here...
+ return self.string < other.string
@coerced
def __eq__(self, other):
return (other is not None and
type(other) == Version and self.version == other.version)
-
def __ne__(self, other):
return not (self == other)
-
def __hash__(self):
return hash(self.version)
-
@coerced
def __contains__(self, other):
if other is None:
return False
return other.version[:len(self.version)] == self.version
-
def is_predecessor(self, other):
"""True if the other version is the immediate predecessor of this one.
That is, NO versions v exist such that:
@@ -269,16 +347,13 @@ class Version(object):
ol = other.version[-1]
return type(sl) == int and type(ol) == int and (ol - sl == 1)
-
def is_successor(self, other):
return other.is_predecessor(self)
-
@coerced
def overlaps(self, other):
return self in other or other in self
-
@coerced
def union(self, other):
if self == other or other in self:
@@ -288,7 +363,6 @@ class Version(object):
else:
return VersionList([self, other])
-
@coerced
def intersection(self, other):
if self == other:
@@ -299,6 +373,7 @@ class Version(object):
@total_ordering
class VersionRange(object):
+
def __init__(self, start, end):
if isinstance(start, basestring):
start = Version(start)
@@ -310,15 +385,12 @@ class VersionRange(object):
if start and end and end < start:
raise ValueError("Invalid Version range: %s" % self)
-
def lowest(self):
return self.start
-
def highest(self):
return self.end
-
@coerced
def __lt__(self, other):
"""Sort VersionRanges lexicographically so that they are ordered first
@@ -331,28 +403,24 @@ class VersionRange(object):
s, o = self, other
if s.start != o.start:
- return s.start is None or (o.start is not None and s.start < o.start)
-
+ return s.start is None or (
+ o.start is not None and s.start < o.start)
return (s.end != o.end and
o.end is None or (s.end is not None and s.end < o.end))
-
@coerced
def __eq__(self, other):
return (other is not None and
type(other) == VersionRange and
self.start == other.start and self.end == other.end)
-
def __ne__(self, other):
return not (self == other)
-
@property
def concrete(self):
return self.start if self.start == self.end else None
-
@coerced
def __contains__(self, other):
if other is None:
@@ -373,57 +441,56 @@ class VersionRange(object):
other.end in self.end)))
return in_upper
-
@coerced
def satisfies(self, other):
"""A VersionRange satisfies another if some version in this range
- would satisfy some version in the other range. To do this it must
- either:
- a) Overlap with the other range
- b) The start of this range satisfies the end of the other range.
-
- This is essentially the same as overlaps(), but overlaps assumes
- that its arguments are specific. That is, 4.7 is interpreted as
- 4.7.0.0.0.0... . This funciton assumes that 4.7 woudl be satisfied
- by 4.7.3.5, etc.
-
- Rationale:
- If a user asks for gcc@4.5:4.7, and a package is only compatible with
- gcc@4.7.3:4.8, then that package should be able to build under the
- constraints. Just using overlaps() would not work here.
-
- Note that we don't need to check whether the end of this range
- would satisfy the start of the other range, because overlaps()
- already covers that case.
-
- Note further that overlaps() is a symmetric operation, while
- satisfies() is not.
+ would satisfy some version in the other range. To do this it must
+ either:
+
+ a) Overlap with the other range
+ b) The start of this range satisfies the end of the other range.
+
+ This is essentially the same as overlaps(), but overlaps assumes
+ that its arguments are specific. That is, 4.7 is interpreted as
+ 4.7.0.0.0.0... . This funciton assumes that 4.7 woudl be satisfied
+ by 4.7.3.5, etc.
+
+ Rationale:
+
+ If a user asks for gcc@4.5:4.7, and a package is only compatible with
+ gcc@4.7.3:4.8, then that package should be able to build under the
+ constraints. Just using overlaps() would not work here.
+
+ Note that we don't need to check whether the end of this range
+ would satisfy the start of the other range, because overlaps()
+ already covers that case.
+
+ Note further that overlaps() is a symmetric operation, while
+ satisfies() is not.
"""
return (self.overlaps(other) or
# if either self.start or other.end are None, then this can't
# satisfy, or overlaps() would've taken care of it.
self.start and other.end and self.start.satisfies(other.end))
-
@coerced
def overlaps(self, other):
- return ((self.start == None or other.end is None or
+ return ((self.start is None or other.end is None or
self.start <= other.end or
other.end in self.start or self.start in other.end) and
- (other.start is None or self.end == None or
+ (other.start is None or self.end is None or
other.start <= self.end or
other.start in self.end or self.end in other.start))
-
@coerced
def union(self, other):
if not self.overlaps(other):
if (self.end is not None and other.start is not None and
- self.end.is_predecessor(other.start)):
+ self.end.is_predecessor(other.start)):
return VersionRange(self.start, other.end)
if (other.end is not None and self.start is not None and
- other.end.is_predecessor(self.start)):
+ other.end.is_predecessor(self.start)):
return VersionRange(other.start, self.end)
return VersionList([self, other])
@@ -442,13 +509,12 @@ class VersionRange(object):
else:
end = self.end
# TODO: See note in intersection() about < and in discrepancy.
- if not other.end in self.end:
+ if other.end not in self.end:
if end in other.end or other.end > self.end:
end = other.end
return VersionRange(start, end)
-
@coerced
def intersection(self, other):
if self.overlaps(other):
@@ -470,7 +536,7 @@ class VersionRange(object):
# 1.6 < 1.6.5 = True (lexicographic)
# Should 1.6 NOT be less than 1.6.5? Hm.
# Here we test (not end in other.end) first to avoid paradox.
- if other.end is not None and not end in other.end:
+ if other.end is not None and end not in other.end:
if other.end < end or other.end in end:
end = other.end
@@ -479,15 +545,12 @@ class VersionRange(object):
else:
return VersionList()
-
def __hash__(self):
return hash((self.start, self.end))
-
def __repr__(self):
return self.__str__()
-
def __str__(self):
out = ""
if self.start:
@@ -501,6 +564,7 @@ class VersionRange(object):
@total_ordering
class VersionList(object):
"""Sorted, non-redundant list of Versions and VersionRanges."""
+
def __init__(self, vlist=None):
self.versions = []
if vlist is not None:
@@ -515,7 +579,6 @@ class VersionList(object):
for v in vlist:
self.add(ver(v))
-
def add(self, version):
if type(version) in (Version, VersionRange):
# This normalizes single-value version ranges.
@@ -524,9 +587,9 @@ class VersionList(object):
i = bisect_left(self, version)
- while i-1 >= 0 and version.overlaps(self[i-1]):
- version = version.union(self[i-1])
- del self.versions[i-1]
+ while i - 1 >= 0 and version.overlaps(self[i - 1]):
+ version = version.union(self[i - 1])
+ del self.versions[i - 1]
i -= 1
while i < len(self) and version.overlaps(self[i]):
@@ -542,7 +605,6 @@ class VersionList(object):
else:
raise TypeError("Can't add %s to VersionList" % type(version))
-
@property
def concrete(self):
if len(self) == 1:
@@ -550,11 +612,9 @@ class VersionList(object):
else:
return None
-
def copy(self):
return VersionList(self)
-
def lowest(self):
"""Get the lowest version in the list."""
if not self:
@@ -562,7 +622,6 @@ class VersionList(object):
else:
return self[0].lowest()
-
def highest(self):
"""Get the highest version in the list."""
if not self:
@@ -570,7 +629,6 @@ class VersionList(object):
else:
return self[-1].highest()
-
@coerced
def overlaps(self, other):
if not other or not self:
@@ -586,14 +644,16 @@ class VersionList(object):
o += 1
return False
-
def to_dict(self):
"""Generate human-readable dict for YAML."""
if self.concrete:
- return { 'version' : str(self[0]) }
+ return syaml_dict([
+ ('version', str(self[0]))
+ ])
else:
- return { 'versions' : [str(v) for v in self] }
-
+ return syaml_dict([
+ ('versions', [str(v) for v in self])
+ ])
@staticmethod
def from_dict(dictionary):
@@ -605,7 +665,6 @@ class VersionList(object):
else:
raise ValueError("Dict must have 'version' or 'versions' in it.")
-
@coerced
def satisfies(self, other, strict=False):
"""A VersionList satisfies another if some version in the list
@@ -633,20 +692,17 @@ class VersionList(object):
o += 1
return False
-
@coerced
def update(self, other):
for v in other.versions:
self.add(v)
-
@coerced
def union(self, other):
result = self.copy()
result.update(other)
return result
-
@coerced
def intersection(self, other):
# TODO: make this faster. This is O(n^2).
@@ -656,7 +712,6 @@ class VersionList(object):
result.add(s.intersection(o))
return result
-
@coerced
def intersect(self, other):
"""Intersect this spec's list with other.
@@ -678,50 +733,40 @@ class VersionList(object):
if i == 0:
if version not in self[0]:
return False
- elif all(version not in v for v in self[i-1:]):
+ elif all(version not in v for v in self[i - 1:]):
return False
return True
-
def __getitem__(self, index):
return self.versions[index]
-
def __iter__(self):
return iter(self.versions)
-
def __reversed__(self):
return reversed(self.versions)
-
def __len__(self):
return len(self.versions)
-
@coerced
def __eq__(self, other):
return other is not None and self.versions == other.versions
-
def __ne__(self, other):
return not (self == other)
-
@coerced
def __lt__(self, other):
return other is not None and self.versions < other.versions
-
def __hash__(self):
return hash(tuple(self.versions))
-
def __str__(self):
return ",".join(str(v) for v in self.versions)
-
def __repr__(self):
return str(self.versions)
@@ -730,7 +775,7 @@ def _string_to_version(string):
"""Converts a string to a Version, VersionList, or VersionRange.
This is private. Client code should use ver().
"""
- string = string.replace(' ','')
+ string = string.replace(' ', '')
if ',' in string:
return VersionList(string.split(','))
@@ -738,7 +783,7 @@ def _string_to_version(string):
elif ':' in string:
s, e = string.split(':')
start = Version(s) if s else None
- end = Version(e) if e else None
+ end = Version(e) if e else None
return VersionRange(start, end)
else:
diff --git a/lib/spack/spack/virtual.py b/lib/spack/spack/virtual.py
deleted file mode 100644
index 91ad77c8fd..0000000000
--- a/lib/spack/spack/virtual.py
+++ /dev/null
@@ -1,161 +0,0 @@
-##############################################################################
-# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
-# Produced at the Lawrence Livermore National Laboratory.
-#
-# This file is part of Spack.
-# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
-# LLNL-CODE-647188
-#
-# For details, see https://github.com/llnl/spack
-# Please also see the LICENSE file for our notice and the LGPL.
-#
-# This program is free software; you can redistribute it and/or modify
-# it under the terms of the GNU Lesser General Public License (as
-# published by the Free Software Foundation) version 2.1, February 1999.
-#
-# This program is distributed in the hope that it will be useful, but
-# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
-# conditions of the GNU Lesser General Public License for more details.
-#
-# You should have received a copy of the GNU Lesser General Public
-# License along with this program; if not, write to the Free Software
-# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
-##############################################################################
-"""
-The ``virtual`` module contains utility classes for virtual dependencies.
-"""
-import spack.spec
-import itertools
-
-class ProviderIndex(object):
- """This is a dict of dicts used for finding providers of particular
- virtual dependencies. The dict of dicts looks like:
-
- { vpkg name :
- { full vpkg spec : set(packages providing spec) } }
-
- Callers can use this to first find which packages provide a vpkg,
- then find a matching full spec. e.g., in this scenario:
-
- { 'mpi' :
- { mpi@:1.1 : set([mpich]),
- mpi@:2.3 : set([mpich2@1.9:]) } }
-
- Calling providers_for(spec) will find specs that provide a
- matching implementation of MPI.
- """
- def __init__(self, specs, **kwargs):
- # TODO: come up with another name for this. This "restricts" values to
- # the verbatim impu specs (i.e., it doesn't pre-apply package's constraints, and
- # keeps things as broad as possible, so it's really the wrong name)
- self.restrict = kwargs.setdefault('restrict', False)
-
- self.providers = {}
-
- for spec in specs:
- if not isinstance(spec, spack.spec.Spec):
- spec = spack.spec.Spec(spec)
-
- if spec.virtual:
- continue
-
- self.update(spec)
-
-
- def update(self, spec):
- if type(spec) != spack.spec.Spec:
- spec = spack.spec.Spec(spec)
-
- assert(not spec.virtual)
-
- pkg = spec.package
- for provided_spec, provider_spec in pkg.provided.iteritems():
- if provider_spec.satisfies(spec, deps=False):
- provided_name = provided_spec.name
-
- provider_map = self.providers.setdefault(provided_name, {})
- if not provided_spec in provider_map:
- provider_map[provided_spec] = set()
-
- if self.restrict:
- provider_set = provider_map[provided_spec]
-
- # If this package existed in the index before,
- # need to take the old versions out, as they're
- # now more constrained.
- old = set([s for s in provider_set if s.name == spec.name])
- provider_set.difference_update(old)
-
- # Now add the new version.
- provider_set.add(spec)
-
- else:
- # Before putting the spec in the map, constrain it so that
- # it provides what was asked for.
- constrained = spec.copy()
- constrained.constrain(provider_spec)
- provider_map[provided_spec].add(constrained)
-
-
- def providers_for(self, *vpkg_specs):
- """Gives specs of all packages that provide virtual packages
- with the supplied specs."""
- providers = set()
- for vspec in vpkg_specs:
- # Allow string names to be passed as input, as well as specs
- if type(vspec) == str:
- vspec = spack.spec.Spec(vspec)
-
- # Add all the providers that satisfy the vpkg spec.
- if vspec.name in self.providers:
- for provider_spec, spec_set in self.providers[vspec.name].items():
- if provider_spec.satisfies(vspec, deps=False):
- providers.update(spec_set)
-
- # Return providers in order
- return sorted(providers)
-
-
- # TODO: this is pretty darned nasty, and inefficient, but there
- # are not that many vdeps in most specs.
- def _cross_provider_maps(self, lmap, rmap):
- result = {}
- for lspec, rspec in itertools.product(lmap, rmap):
- try:
- constrained = lspec.constrained(rspec)
- except spack.spec.UnsatisfiableSpecError:
- continue
-
- # lp and rp are left and right provider specs.
- for lp_spec, rp_spec in itertools.product(lmap[lspec], rmap[rspec]):
- if lp_spec.name == rp_spec.name:
- try:
- const = lp_spec.constrained(rp_spec, deps=False)
- result.setdefault(constrained, set()).add(const)
- except spack.spec.UnsatisfiableSpecError:
- continue
- return result
-
-
- def __contains__(self, name):
- """Whether a particular vpkg name is in the index."""
- return name in self.providers
-
-
- def satisfies(self, other):
- """Check that providers of virtual specs are compatible."""
- common = set(self.providers) & set(other.providers)
- if not common:
- return True
-
- # This ensures that some provider in other COULD satisfy the
- # vpkg constraints on self.
- result = {}
- for name in common:
- crossed = self._cross_provider_maps(self.providers[name],
- other.providers[name])
- if crossed:
- result[name] = crossed
-
- return all(c in result for c in common)
diff --git a/pytest.ini b/pytest.ini
new file mode 100644
index 0000000000..0d8d2b271f
--- /dev/null
+++ b/pytest.ini
@@ -0,0 +1,5 @@
+# content of pytest.ini
+[pytest]
+addopts = --durations=20 -ra
+testpaths = lib/spack/spack/test
+python_files = *.py \ No newline at end of file
diff --git a/share/spack/csh/spack.csh b/share/spack/csh/spack.csh
index d64ce8935b..5acd190449 100644
--- a/share/spack/csh/spack.csh
+++ b/share/spack/csh/spack.csh
@@ -74,25 +74,25 @@ case unload:
# tool's commands to add/remove the result from the environment.
switch ($_sp_subcommand)
case "use":
- set _sp_full_spec = ( "`\spack $_sp_flags module find dotkit $_sp_spec`" )
+ set _sp_full_spec = ( "`\spack $_sp_flags module find --module-type dotkit $_sp_spec`" )
if ( $? == 0 ) then
use $_sp_module_args $_sp_full_spec
endif
breaksw
case "unuse":
- set _sp_full_spec = ( "`\spack $_sp_flags module find dotkit $_sp_spec`" )
+ set _sp_full_spec = ( "`\spack $_sp_flags module find --module-type dotkit $_sp_spec`" )
if ( $? == 0 ) then
unuse $_sp_module_args $_sp_full_spec
endif
breaksw
case "load":
- set _sp_full_spec = ( "`\spack $_sp_flags module find tcl $_sp_spec`" )
+ set _sp_full_spec = ( "`\spack $_sp_flags module find --module-type tcl $_sp_spec`" )
if ( $? == 0 ) then
module load $_sp_module_args $_sp_full_spec
endif
breaksw
case "unload":
- set _sp_full_spec = ( "`\spack $_sp_flags module find tcl $_sp_spec`" )
+ set _sp_full_spec = ( "`\spack $_sp_flags module find --module-type tcl $_sp_spec`" )
if ( $? == 0 ) then
module unload $_sp_module_args $_sp_full_spec
endif
diff --git a/share/spack/qa/check_dependencies b/share/spack/qa/check_dependencies
new file mode 100755
index 0000000000..e999463b03
--- /dev/null
+++ b/share/spack/qa/check_dependencies
@@ -0,0 +1,96 @@
+#!/usr/bin/env bash
+#
+# Description:
+# Check to see if dependencies are installed.
+# If not, warn the user and tell them how to
+# install these dependencies.
+#
+# Usage:
+# check-deps <dep> ...
+#
+# Options:
+# One or more dependencies. Must use name of binary.
+
+for dep in "$@"; do
+ if ! which $dep &> /dev/null; then
+ # Map binary name to package name
+ case $dep in
+ sphinx-apidoc|sphinx-build)
+ spack_package=py-sphinx
+ pip_package=sphinx
+ ;;
+ coverage)
+ spack_package=py-coverage
+ pip_package=coverage
+ ;;
+ flake8)
+ spack_package=py-flake8
+ pip_package=flake8
+ ;;
+ dot)
+ spack_package=graphviz
+ ;;
+ git)
+ spack_package=git
+ ;;
+ hg)
+ spack_package=mercurial
+ pip_package=mercurial
+ ;;
+ svn)
+ spack_package=subversion
+ ;;
+ *)
+ spack_package=$dep
+ pip_package=$dep
+ ;;
+ esac
+
+ echo "ERROR: $dep is required to run this script."
+ echo
+
+ if [[ $spack_package ]]; then
+ echo "To install with Spack, run:"
+ echo " $ spack install $spack_package"
+ fi
+
+ if [[ $pip_package ]]; then
+ echo "To install with pip, run:"
+ echo " $ pip install $pip_package"
+ fi
+
+ if [[ $spack_package || $pip_package ]]; then
+ echo "Then add the bin directory to your PATH."
+ fi
+
+ exit 1
+ fi
+
+ # Flake8 and Sphinx require setuptools in order to run.
+ # Otherwise, they print out this error message:
+ #
+ # Traceback (most recent call last):
+ # File: "/usr/bin/flake8", line 5, in <module>
+ # from pkg_resources import load_entry_point
+ # ImportError: No module named pkg_resources
+ #
+ # Print a more useful error message if setuptools not found.
+ if [[ $dep == flake8 || $dep == sphinx* ]]; then
+ # Find which Python is being run
+ # Spack-installed packages have a hard-coded shebang
+ python_cmd=$(head -n 1 $(which $dep) | cut -c 3-)
+ # May not have a shebang
+ if [[ $python_cmd != *python* ]]; then
+ python_cmd=python
+ fi
+ # Check if setuptools is in the PYTHONPATH
+ if ! $python_cmd -c "import setuptools" 2> /dev/null; then
+ echo "ERROR: setuptools is required to run $dep."
+ echo "Please add it to your PYTHONPATH."
+
+ exit 1
+ fi
+ fi
+done
+
+echo "Dependencies found."
diff --git a/share/spack/qa/run-doc-tests b/share/spack/qa/run-doc-tests
new file mode 100755
index 0000000000..ca892d7eb4
--- /dev/null
+++ b/share/spack/qa/run-doc-tests
@@ -0,0 +1,41 @@
+#!/usr/bin/env bash
+#
+# Description:
+# Builds Spack documentation and checks for
+# possible syntax errors. Treats warnings as
+# fatal errors.
+#
+# Usage:
+# run-doc-tests
+#
+# Notes:
+# Requires sphinx, graphviz, git, mercurial, and subversion.
+#
+
+QA_DIR="$(dirname "$0")"
+SPACK_ROOT="$QA_DIR/../../.."
+DOC_DIR="$SPACK_ROOT/lib/spack/docs"
+
+# Array of dependencies
+deps=(
+ sphinx-apidoc
+ sphinx-build
+ dot
+ git
+ hg
+ svn
+)
+
+# Check for dependencies
+"$QA_DIR/check_dependencies" "${deps[@]}" || exit 1
+
+# Add Spack to the PATH.
+export PATH="$SPACK_ROOT/bin:$PATH"
+
+# Move to documentation directory
+# Allows script to be run from anywhere
+cd "$DOC_DIR"
+
+# Treat warnings as fatal errors
+make clean --silent
+make SPHINXOPTS=-W JOBS=1
diff --git a/share/spack/qa/run-flake8 b/share/spack/qa/run-flake8
deleted file mode 100755
index 722c7fcba6..0000000000
--- a/share/spack/qa/run-flake8
+++ /dev/null
@@ -1,55 +0,0 @@
-#!/bin/bash
-#
-# This script runs source code style checks on Spack.
-#
-# It should be executed from the top-level directory of the repo,
-# e.g.:
-#
-# share/spack/qa/run-flake8
-#
-# To run it, you'll need to have the Python flake8 installed locally.
-#
-PYTHONPATH=./lib/spack:$PYTHONPATH
-
-flake8="$(which flake8)"
-if [[ ! $flake8 ]]; then
- echo "ERROR: flake8 is required to run this script."
- exit 1
-fi
-
-# Check if changed files are flake8 conformant [framework]
-changed=$(git diff --name-only develop... | grep '.py$')
-
-# Exempt url lines in changed packages from overlong line errors.
-for file in $changed; do
- if [[ $file = *package.py ]]; then
- perl -i~ -pe 's/^(\s*url\s*=.*)$/\1 # NOQA: ignore=E501/' $file;
- fi
-done
-
-return_code=0
-if [[ $changed ]]; then
- echo =======================================================
- echo flake8: running flake8 code checks on spack.
- echo
- echo Modified files:
- echo $changed | perl -pe 's/^/ /;s/ +/\n /g'
- echo =======================================================
- if flake8 --format pylint $changed; then
- echo "Flake8 checks were clean."
- else
- echo "Flake8 found errors."
- return_code=1
- fi
-else
- echo No core framework files modified.
-fi
-
-# Restore original package files after modifying them.
-for file in $changed; do
- if [[ $file = *package.py ]]; then
- mv "${file}~" "${file}"
- fi
-done
-
-exit $return_code
diff --git a/share/spack/qa/run-flake8-tests b/share/spack/qa/run-flake8-tests
new file mode 100755
index 0000000000..83469eeb9d
--- /dev/null
+++ b/share/spack/qa/run-flake8-tests
@@ -0,0 +1,29 @@
+#!/usr/bin/env bash
+#
+# Description:
+# Runs source code style checks on Spack.
+# See $SPACK_ROOT/.flake8 for a list of
+# approved exceptions.
+#
+# Usage:
+# run-flake8-tests
+#
+# Notes:
+# Requires flake8.
+#
+
+QA_DIR="$(dirname "$0")"
+SPACK_ROOT="$QA_DIR/../../.."
+
+# Array of dependencies
+deps=(
+ flake8
+)
+
+# Check for dependencies
+"$QA_DIR/check_dependencies" "${deps[@]}" || exit 1
+
+# Add Spack to the PATH.
+export PATH="$SPACK_ROOT/bin:$PATH"
+
+exec spack flake8
diff --git a/share/spack/qa/run-unit-tests b/share/spack/qa/run-unit-tests
new file mode 100755
index 0000000000..d2ce9647af
--- /dev/null
+++ b/share/spack/qa/run-unit-tests
@@ -0,0 +1,51 @@
+#!/usr/bin/env bash
+#
+# Description:
+# Runs Spack unit tests.
+#
+# Usage:
+# run-unit-tests [test ...]
+#
+# Options:
+# Optionally add one or more unit tests
+# to only run these tests.
+#
+# Notes:
+# Requires coverage, git, mercurial, and subversion.
+#
+
+QA_DIR="$(dirname "$0")"
+SPACK_ROOT="$QA_DIR/../../.."
+
+# Array of dependencies
+deps=(
+ coverage
+ git
+ hg
+ svn
+)
+
+# Check for dependencies
+"$QA_DIR/check_dependencies" "${deps[@]}" || exit 1
+
+# Add Spack to the PATH.
+export PATH="$SPACK_ROOT/bin:$PATH"
+
+# Move to root directory of Spack
+# Allows script to be run from anywhere
+cd "$SPACK_ROOT"
+
+# Run integration tests
+# TODO: should these be separated into a different test suite?
+source "$SPACK_ROOT/share/spack/setup-env.sh"
+spack compilers
+spack config get compilers
+
+# Run unit tests with code coverage
+if [[ "$TRAVIS_PYTHON_VERSION" == 2.7 ]]; then
+ coverage run bin/spack install -v libdwarf
+ coverage run bin/spack test "$@" && coverage combine
+else
+ spack install -v libdwarf
+ spack test "$@"
+fi
diff --git a/share/spack/setup-env.sh b/share/spack/setup-env.sh
index 8aa259cf15..943db72612 100755
--- a/share/spack/setup-env.sh
+++ b/share/spack/setup-env.sh
@@ -57,6 +57,11 @@
########################################################################
function spack {
+ # Zsh does not do word splitting by default, this enables it for this function only
+ if [ -n "${ZSH_VERSION:-}" ]; then
+ emulate -L sh
+ fi
+
# save raw arguments into an array before butchering them
args=( "$@" )
@@ -93,11 +98,18 @@ function spack {
;;
"use"|"unuse"|"load"|"unload")
# Shift any other args for use off before parsing spec.
+ _sp_subcommand_args=""
_sp_module_args=""
- if [[ "$1" =~ ^- ]]; then
- _sp_module_args="$1"; shift
- _sp_spec="$@"
- fi
+ while [[ "$1" =~ ^- ]]; do
+ if [ "$1" = "-r" -o "$1" = "--dependencies" ]; then
+ _sp_subcommand_args="$_sp_subcommand_args $1"
+ else
+ _sp_module_args="$_sp_module_args $1"
+ fi
+ shift
+ done
+
+ _sp_spec="$@"
# Here the user has run use or unuse with a spec. Find a matching
# spec using 'spack module find', then use the appropriate module
@@ -105,19 +117,19 @@ function spack {
# If spack module command comes back with an error, do nothing.
case $_sp_subcommand in
"use")
- if _sp_full_spec=$(command spack $_sp_flags module find dotkit $_sp_spec); then
+ if _sp_full_spec=$(command spack $_sp_flags module loads --input-only $_sp_subcommand_args --module-type dotkit $_sp_spec); then
use $_sp_module_args $_sp_full_spec
fi ;;
"unuse")
- if _sp_full_spec=$(command spack $_sp_flags module find dotkit $_sp_spec); then
+ if _sp_full_spec=$(command spack $_sp_flags module loads --input-only $_sp_subcommand_args --module-type dotkit $_sp_spec); then
unuse $_sp_module_args $_sp_full_spec
fi ;;
"load")
- if _sp_full_spec=$(command spack $_sp_flags module find tcl $_sp_spec); then
+ if _sp_full_spec=$(command spack $_sp_flags module loads --input-only $_sp_subcommand_args --module-type tcl $_sp_spec); then
module load $_sp_module_args $_sp_full_spec
fi ;;
"unload")
- if _sp_full_spec=$(command spack $_sp_flags module find tcl $_sp_spec); then
+ if _sp_full_spec=$(command spack $_sp_flags module loads --input-only $_sp_subcommand_args --module-type tcl $_sp_spec); then
module unload $_sp_module_args $_sp_full_spec
fi ;;
esac
@@ -177,5 +189,7 @@ _sp_prefix=$(cd "$(dirname $(dirname $_sp_share_dir))" && pwd)
_spack_pathadd PATH "${_sp_prefix%/}/bin"
_sp_sys_type=$(spack-python -c 'print(spack.architecture.sys_type())')
-_spack_pathadd DK_NODE "${_sp_share_dir%/}/dotkit/$_sp_sys_type"
-_spack_pathadd MODULEPATH "${_sp_share_dir%/}/modules/$_sp_sys_type"
+_sp_dotkit_root=$(spack-python -c "print(spack.util.path.canonicalize_path(spack.config.get_config('config').get('module_roots', {}).get('dotkit')))")
+_sp_tcl_root=$(spack-python -c "print(spack.util.path.canonicalize_path(spack.config.get_config('config').get('module_roots', {}).get('tcl')))")
+_spack_pathadd DK_NODE "${_sp_dotkit_root%/}/$_sp_sys_type"
+_spack_pathadd MODULEPATH "${_sp_tcl_root%/}/$_sp_sys_type"
diff --git a/var/spack/mock_configs/site_spackconfig/compilers.yaml b/var/spack/mock_configs/site_spackconfig/compilers.yaml
new file mode 100644
index 0000000000..5f8b38007b
--- /dev/null
+++ b/var/spack/mock_configs/site_spackconfig/compilers.yaml
@@ -0,0 +1,40 @@
+compilers:
+ all:
+ clang@3.3:
+ cc: /path/to/clang
+ cxx: /path/to/clang++
+ f77: None
+ fc: None
+ modules: None
+ strategy: PATH
+ gcc@4.5.0:
+ cc: /path/to/gcc
+ cxx: /path/to/g++
+ f77: /path/to/gfortran
+ fc: /path/to/gfortran
+ modules: None
+ strategy: PATH
+ gcc@5.2.0:
+ cc: cc
+ cxx: CC
+ f77: ftn
+ fc: ftn
+ modules:
+ - PrgEnv-gnu
+ - gcc/5.2.0
+ strategy: MODULES
+ intel@15.0.1:
+ cc: cc
+ ccx: CC
+ f77: ftn
+ fc: ftn
+ modules:
+ - PrgEnv-intel
+ - intel/15.0.1
+ strategy: MODULES
+ intel@15.1.2:
+ cc: /path/to/icc
+ cxx: /path/to/ic++
+ f77: /path/to/ifort
+ fc: /path/to/ifort
+ strategy: PATH \ No newline at end of file
diff --git a/var/spack/repos/builtin.mock/packages/a/package.py b/var/spack/repos/builtin.mock/packages/a/package.py
index 40b92240fc..0d75ee1256 100644
--- a/var/spack/repos/builtin.mock/packages/a/package.py
+++ b/var/spack/repos/builtin.mock/packages/a/package.py
@@ -24,6 +24,7 @@
##############################################################################
from spack import *
+
class A(Package):
"""Simple package with no dependencies"""
diff --git a/var/spack/repos/builtin.mock/packages/b/package.py b/var/spack/repos/builtin.mock/packages/b/package.py
index c447a56b48..5729f24e79 100644
--- a/var/spack/repos/builtin.mock/packages/b/package.py
+++ b/var/spack/repos/builtin.mock/packages/b/package.py
@@ -24,6 +24,7 @@
##############################################################################
from spack import *
+
class B(Package):
"""Simple package with no dependencies"""
diff --git a/var/spack/repos/builtin.mock/packages/c/package.py b/var/spack/repos/builtin.mock/packages/c/package.py
index 5b6079c4e3..80777a05bb 100644
--- a/var/spack/repos/builtin.mock/packages/c/package.py
+++ b/var/spack/repos/builtin.mock/packages/c/package.py
@@ -24,6 +24,7 @@
##############################################################################
from spack import *
+
class C(Package):
"""Simple package with no dependencies"""
diff --git a/var/spack/repos/builtin.mock/packages/callpath/package.py b/var/spack/repos/builtin.mock/packages/callpath/package.py
index c297a123b8..56b969df98 100644
--- a/var/spack/repos/builtin.mock/packages/callpath/package.py
+++ b/var/spack/repos/builtin.mock/packages/callpath/package.py
@@ -24,6 +24,7 @@
##############################################################################
from spack import *
+
class Callpath(Package):
homepage = "https://github.com/tgamblin/callpath"
url = "http://github.com/tgamblin/callpath-1.0.tar.gz"
diff --git a/var/spack/repos/builtin.mock/packages/cmake-client/package.py b/var/spack/repos/builtin.mock/packages/cmake-client/package.py
index 355689a2d2..51704e3f4b 100644
--- a/var/spack/repos/builtin.mock/packages/cmake-client/package.py
+++ b/var/spack/repos/builtin.mock/packages/cmake-client/package.py
@@ -25,30 +25,28 @@
from spack import *
import os
+
def check(condition, msg):
"""Raise an install error if condition is False."""
if not condition:
raise InstallError(msg)
-class CmakeClient(Package):
+class CmakeClient(CMakePackage):
"""A dumy package that uses cmake."""
homepage = 'https://www.example.com'
url = 'https://www.example.com/cmake-client-1.0.tar.gz'
version('1.0', '4cb3ff35b2472aae70f542116d616e63')
- depends_on('cmake')
-
-
def setup_environment(self, spack_env, run_env):
spack_cc # Ensure spack module-scope variable is avaiabl
check(from_cmake == "from_cmake",
"setup_environment couldn't read global set by cmake.")
check(self.spec['cmake'].link_arg == "test link arg",
- "link arg on dependency spec not readable from setup_environment.")
-
+ "link arg on dependency spec not readable from "
+ "setup_environment.")
def setup_dependent_environment(self, spack_env, run_env, dspec):
spack_cc # Ensure spack module-scope variable is avaiable
@@ -56,8 +54,8 @@ class CmakeClient(Package):
"setup_dependent_environment couldn't read global set by cmake.")
check(self.spec['cmake'].link_arg == "test link arg",
- "link arg on dependency spec not readable from setup_dependent_environment.")
-
+ "link arg on dependency spec not readable from "
+ "setup_dependent_environment.")
def setup_dependent_package(self, module, dspec):
spack_cc # Ensure spack module-scope variable is avaiable
@@ -65,9 +63,13 @@ class CmakeClient(Package):
"setup_dependent_package couldn't read global set by cmake.")
check(self.spec['cmake'].link_arg == "test link arg",
- "link arg on dependency spec not readable from setup_dependent_package.")
+ "link arg on dependency spec not readable from "
+ "setup_dependent_package.")
+ def cmake(self, spec, prefix):
+ pass
+ build = cmake
def install(self, spec, prefix):
# check that cmake is in the global scope.
diff --git a/var/spack/repos/builtin.mock/packages/cmake/package.py b/var/spack/repos/builtin.mock/packages/cmake/package.py
index 0356cf8afb..c8b6464e69 100644
--- a/var/spack/repos/builtin.mock/packages/cmake/package.py
+++ b/var/spack/repos/builtin.mock/packages/cmake/package.py
@@ -25,6 +25,7 @@
from spack import *
import os
+
def check(condition, msg):
"""Raise an install error if condition is False."""
if not condition:
@@ -39,7 +40,6 @@ class Cmake(Package):
version('3.4.3', '4cb3ff35b2472aae70f542116d616e63',
url='https://cmake.org/files/v3.4/cmake-3.4.3.tar.gz')
-
def setup_environment(self, spack_env, run_env):
spack_cc # Ensure spack module-scope variable is avaiable
spack_env.set('for_install', 'for_install')
@@ -48,7 +48,6 @@ class Cmake(Package):
spack_cc # Ensure spack module-scope variable is avaiable
spack_env.set('from_cmake', 'from_cmake')
-
def setup_dependent_package(self, module, dspec):
spack_cc # Ensure spack module-scope variable is avaiable
@@ -57,7 +56,6 @@ class Cmake(Package):
self.spec.link_arg = "test link arg"
-
def install(self, spec, prefix):
mkdirp(prefix.bin)
diff --git a/var/spack/repos/builtin.mock/packages/develop-test/package.py b/var/spack/repos/builtin.mock/packages/develop-test/package.py
new file mode 100644
index 0000000000..0c693c60fb
--- /dev/null
+++ b/var/spack/repos/builtin.mock/packages/develop-test/package.py
@@ -0,0 +1,37 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class DevelopTest(Package):
+ """Dummy package with develop version"""
+ homepage = "http://www.openblas.net"
+ url = "http://github.com/xianyi/OpenBLAS/archive/v0.2.15.tar.gz"
+
+ version('develop', git='https://github.com/dummy/repo.git')
+ version('0.2.15', 'b1190f3d3471685f17cfd1ec1d252ac9')
+
+ def install(self, spec, prefix):
+ pass
diff --git a/var/spack/repos/builtin.mock/packages/direct_mpich/package.py b/var/spack/repos/builtin.mock/packages/direct-mpich/package.py
index 663908d56c..f38589ad4d 100644
--- a/var/spack/repos/builtin.mock/packages/direct_mpich/package.py
+++ b/var/spack/repos/builtin.mock/packages/direct-mpich/package.py
@@ -24,6 +24,7 @@
##############################################################################
from spack import *
+
class DirectMpich(Package):
homepage = "http://www.example.com"
url = "http://www.example.com/direct_mpich-1.0.tar.gz"
diff --git a/var/spack/repos/builtin.mock/packages/dt-diamond-bottom/package.py b/var/spack/repos/builtin.mock/packages/dt-diamond-bottom/package.py
new file mode 100644
index 0000000000..0c9fc1164a
--- /dev/null
+++ b/var/spack/repos/builtin.mock/packages/dt-diamond-bottom/package.py
@@ -0,0 +1,36 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class DtDiamondBottom(Package):
+ """This package has an indirect diamond dependency on dt-diamond-bottom"""
+ homepage = "http://www.example.com"
+ url = "http://www.example.com/dt-diamond-bottom-1.0.tar.gz"
+
+ version('1.0', '0123456789abcdef0123456789abcdef')
+
+ def install(self, spec, prefix):
+ pass
diff --git a/var/spack/repos/builtin.mock/packages/dt-diamond-left/package.py b/var/spack/repos/builtin.mock/packages/dt-diamond-left/package.py
new file mode 100644
index 0000000000..40b65266d4
--- /dev/null
+++ b/var/spack/repos/builtin.mock/packages/dt-diamond-left/package.py
@@ -0,0 +1,38 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class DtDiamondLeft(Package):
+ """This package has an indirect diamond dependency on dt-diamond-bottom"""
+ homepage = "http://www.example.com"
+ url = "http://www.example.com/dt-diamond-left-1.0.tar.gz"
+
+ version('1.0', '0123456789abcdef0123456789abcdef')
+
+ depends_on('dt-diamond-bottom', type='build')
+
+ def install(self, spec, prefix):
+ pass
diff --git a/var/spack/repos/builtin.mock/packages/dt-diamond-right/package.py b/var/spack/repos/builtin.mock/packages/dt-diamond-right/package.py
new file mode 100644
index 0000000000..7b6e4abe5f
--- /dev/null
+++ b/var/spack/repos/builtin.mock/packages/dt-diamond-right/package.py
@@ -0,0 +1,38 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class DtDiamondRight(Package):
+ """This package has an indirect diamond dependency on dt-diamond-bottom"""
+ homepage = "http://www.example.com"
+ url = "http://www.example.com/dt-diamond-right-1.0.tar.gz"
+
+ version('1.0', '0123456789abcdef0123456789abcdef')
+
+ depends_on('dt-diamond-bottom', type=('build', 'link', 'run'))
+
+ def install(self, spec, prefix):
+ pass
diff --git a/var/spack/repos/builtin.mock/packages/dt-diamond/package.py b/var/spack/repos/builtin.mock/packages/dt-diamond/package.py
new file mode 100644
index 0000000000..0b0f300b35
--- /dev/null
+++ b/var/spack/repos/builtin.mock/packages/dt-diamond/package.py
@@ -0,0 +1,39 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class DtDiamond(Package):
+ """This package has an indirect diamond dependency on dt-diamond-bottom"""
+ homepage = "http://www.example.com"
+ url = "http://www.example.com/dt-diamond-1.0.tar.gz"
+
+ version('1.0', '0123456789abcdef0123456789abcdef')
+
+ depends_on('dt-diamond-left')
+ depends_on('dt-diamond-right')
+
+ def install(self, spec, prefix):
+ pass
diff --git a/var/spack/repos/builtin.mock/packages/dtbuild1/package.py b/var/spack/repos/builtin.mock/packages/dtbuild1/package.py
new file mode 100644
index 0000000000..8d3b28b539
--- /dev/null
+++ b/var/spack/repos/builtin.mock/packages/dtbuild1/package.py
@@ -0,0 +1,42 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class Dtbuild1(Package):
+ """Package for use as a build tool for deptypes testing which has its own
+ deptree"""
+
+ homepage = "http://www.example.com"
+ url = "http://www.example.com/dtbuild1-1.0.tar.gz"
+
+ version('1.0', '0123456789abcdef0123456789abcdef')
+
+ depends_on('dtbuild2', type='build')
+ depends_on('dtlink2')
+ depends_on('dtrun2', type='run')
+
+ def install(self, spec, prefix):
+ pass
diff --git a/var/spack/repos/builtin.mock/packages/dtbuild2/package.py b/var/spack/repos/builtin.mock/packages/dtbuild2/package.py
new file mode 100644
index 0000000000..9ea65735ff
--- /dev/null
+++ b/var/spack/repos/builtin.mock/packages/dtbuild2/package.py
@@ -0,0 +1,37 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class Dtbuild2(Package):
+ """Simple package which acts as a build dependency"""
+
+ homepage = "http://www.example.com"
+ url = "http://www.example.com/dtbuild2-1.0.tar.gz"
+
+ version('1.0', '0123456789abcdef0123456789abcdef')
+
+ def install(self, spec, prefix):
+ pass
diff --git a/var/spack/repos/builtin.mock/packages/dtbuild3/package.py b/var/spack/repos/builtin.mock/packages/dtbuild3/package.py
new file mode 100644
index 0000000000..261c69e01e
--- /dev/null
+++ b/var/spack/repos/builtin.mock/packages/dtbuild3/package.py
@@ -0,0 +1,37 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class Dtbuild3(Package):
+ """Simple package which acts as a build dependency"""
+
+ homepage = "http://www.example.com"
+ url = "http://www.example.com/dtbuild3-1.0.tar.gz"
+
+ version('1.0', '0123456789abcdef0123456789abcdef')
+
+ def install(self, spec, prefix):
+ pass
diff --git a/var/spack/repos/builtin.mock/packages/dtlink1/package.py b/var/spack/repos/builtin.mock/packages/dtlink1/package.py
new file mode 100644
index 0000000000..0269e08b65
--- /dev/null
+++ b/var/spack/repos/builtin.mock/packages/dtlink1/package.py
@@ -0,0 +1,39 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class Dtlink1(Package):
+ """Simple package which acts as a link dependency"""
+
+ homepage = "http://www.example.com"
+ url = "http://www.example.com/dtlink1-1.0.tar.gz"
+
+ version('1.0', '0123456789abcdef0123456789abcdef')
+
+ depends_on('dtlink3')
+
+ def install(self, spec, prefix):
+ pass
diff --git a/var/spack/repos/builtin.mock/packages/dtlink2/package.py b/var/spack/repos/builtin.mock/packages/dtlink2/package.py
new file mode 100644
index 0000000000..ad55c5ad48
--- /dev/null
+++ b/var/spack/repos/builtin.mock/packages/dtlink2/package.py
@@ -0,0 +1,37 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class Dtlink2(Package):
+ """Simple package which acts as a link dependency"""
+
+ homepage = "http://www.example.com"
+ url = "http://www.example.com/dtlink2-1.0.tar.gz"
+
+ version('1.0', '0123456789abcdef0123456789abcdef')
+
+ def install(self, spec, prefix):
+ pass
diff --git a/var/spack/repos/builtin.mock/packages/dtlink3/package.py b/var/spack/repos/builtin.mock/packages/dtlink3/package.py
new file mode 100644
index 0000000000..2b425103bd
--- /dev/null
+++ b/var/spack/repos/builtin.mock/packages/dtlink3/package.py
@@ -0,0 +1,40 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class Dtlink3(Package):
+ """Simple package which acts as a link dependency"""
+
+ homepage = "http://www.example.com"
+ url = "http://www.example.com/dtlink3-1.0.tar.gz"
+
+ version('1.0', '0123456789abcdef0123456789abcdef')
+
+ depends_on('dtbuild2', type='build')
+ depends_on('dtlink4')
+
+ def install(self, spec, prefix):
+ pass
diff --git a/var/spack/repos/builtin.mock/packages/dtlink4/package.py b/var/spack/repos/builtin.mock/packages/dtlink4/package.py
new file mode 100644
index 0000000000..d7af5ecbfc
--- /dev/null
+++ b/var/spack/repos/builtin.mock/packages/dtlink4/package.py
@@ -0,0 +1,37 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class Dtlink4(Package):
+ """Simple package which acts as a link dependency"""
+
+ homepage = "http://www.example.com"
+ url = "http://www.example.com/dtlink4-1.0.tar.gz"
+
+ version('1.0', '0123456789abcdef0123456789abcdef')
+
+ def install(self, spec, prefix):
+ pass
diff --git a/var/spack/repos/builtin.mock/packages/dtlink5/package.py b/var/spack/repos/builtin.mock/packages/dtlink5/package.py
new file mode 100644
index 0000000000..a9a22734cd
--- /dev/null
+++ b/var/spack/repos/builtin.mock/packages/dtlink5/package.py
@@ -0,0 +1,37 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class Dtlink5(Package):
+ """Simple package which acts as a link dependency"""
+
+ homepage = "http://www.example.com"
+ url = "http://www.example.com/dtlink5-1.0.tar.gz"
+
+ version('1.0', '0123456789abcdef0123456789abcdef')
+
+ def install(self, spec, prefix):
+ pass
diff --git a/var/spack/repos/builtin.mock/packages/dtrun1/package.py b/var/spack/repos/builtin.mock/packages/dtrun1/package.py
new file mode 100644
index 0000000000..af9539ba68
--- /dev/null
+++ b/var/spack/repos/builtin.mock/packages/dtrun1/package.py
@@ -0,0 +1,40 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class Dtrun1(Package):
+ """Simple package which acts as a run dependency"""
+
+ homepage = "http://www.example.com"
+ url = "http://www.example.com/dtrun1-1.0.tar.gz"
+
+ version('1.0', '0123456789abcdef0123456789abcdef')
+
+ depends_on('dtlink5')
+ depends_on('dtrun3', type='run')
+
+ def install(self, spec, prefix):
+ pass
diff --git a/var/spack/repos/builtin.mock/packages/dtrun2/package.py b/var/spack/repos/builtin.mock/packages/dtrun2/package.py
new file mode 100644
index 0000000000..a6cf0110b3
--- /dev/null
+++ b/var/spack/repos/builtin.mock/packages/dtrun2/package.py
@@ -0,0 +1,37 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class Dtrun2(Package):
+ """Simple package which acts as a run dependency"""
+
+ homepage = "http://www.example.com"
+ url = "http://www.example.com/dtrun2-1.0.tar.gz"
+
+ version('1.0', '0123456789abcdef0123456789abcdef')
+
+ def install(self, spec, prefix):
+ pass
diff --git a/var/spack/repos/builtin.mock/packages/dtrun3/package.py b/var/spack/repos/builtin.mock/packages/dtrun3/package.py
new file mode 100644
index 0000000000..426320c247
--- /dev/null
+++ b/var/spack/repos/builtin.mock/packages/dtrun3/package.py
@@ -0,0 +1,39 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class Dtrun3(Package):
+ """Simple package which acts as a run dependency"""
+
+ homepage = "http://www.example.com"
+ url = "http://www.example.com/dtrun3-1.0.tar.gz"
+
+ version('1.0', '0123456789abcdef0123456789abcdef')
+
+ depends_on('dtbuild3', type='build')
+
+ def install(self, spec, prefix):
+ pass
diff --git a/var/spack/repos/builtin.mock/packages/dttop/package.py b/var/spack/repos/builtin.mock/packages/dttop/package.py
new file mode 100644
index 0000000000..99c86523e1
--- /dev/null
+++ b/var/spack/repos/builtin.mock/packages/dttop/package.py
@@ -0,0 +1,41 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class Dttop(Package):
+ """Package with a complicated dependency tree"""
+
+ homepage = "http://www.example.com"
+ url = "http://www.example.com/dttop-1.0.tar.gz"
+
+ version('1.0', '0123456789abcdef0123456789abcdef')
+
+ depends_on('dtbuild1', type='build')
+ depends_on('dtlink1')
+ depends_on('dtrun1', type='run')
+
+ def install(self, spec, prefix):
+ pass
diff --git a/var/spack/repos/builtin.mock/packages/dtuse/package.py b/var/spack/repos/builtin.mock/packages/dtuse/package.py
new file mode 100644
index 0000000000..c77d700b98
--- /dev/null
+++ b/var/spack/repos/builtin.mock/packages/dtuse/package.py
@@ -0,0 +1,39 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class Dtuse(Package):
+ """Simple package which uses dttop"""
+
+ homepage = "http://www.example.com"
+ url = "http://www.example.com/dtuse-1.0.tar.gz"
+
+ version('1.0', '0123456789abcdef0123456789abcdef')
+
+ depends_on('dttop')
+
+ def install(self, spec, prefix):
+ pass
diff --git a/var/spack/repos/builtin.mock/packages/dyninst/package.py b/var/spack/repos/builtin.mock/packages/dyninst/package.py
index ad486011e2..daf1b82ec6 100644
--- a/var/spack/repos/builtin.mock/packages/dyninst/package.py
+++ b/var/spack/repos/builtin.mock/packages/dyninst/package.py
@@ -24,6 +24,7 @@
##############################################################################
from spack import *
+
class Dyninst(Package):
homepage = "https://paradyn.org"
url = "http://www.paradyn.org/release8.1/DyninstAPI-8.1.1.tgz"
diff --git a/var/spack/repos/builtin.mock/packages/e/package.py b/var/spack/repos/builtin.mock/packages/e/package.py
index b951a3eaa6..c764007563 100644
--- a/var/spack/repos/builtin.mock/packages/e/package.py
+++ b/var/spack/repos/builtin.mock/packages/e/package.py
@@ -24,6 +24,7 @@
##############################################################################
from spack import *
+
class E(Package):
"""Simple package with no dependencies"""
diff --git a/var/spack/repos/builtin.mock/packages/externalmodule/package.py b/var/spack/repos/builtin.mock/packages/externalmodule/package.py
new file mode 100644
index 0000000000..f7c9b056a4
--- /dev/null
+++ b/var/spack/repos/builtin.mock/packages/externalmodule/package.py
@@ -0,0 +1,38 @@
+
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class Externalmodule(Package):
+ homepage = "http://somewhere.com"
+ url = "http://somewhere.com/module-1.0.tar.gz"
+
+ version('1.0', '1234567890abcdef1234567890abcdef')
+
+ depends_on('externalprereq')
+
+ def install(self, spec, prefix):
+ pass
diff --git a/var/spack/repos/builtin.mock/packages/externalprereq/package.py b/var/spack/repos/builtin.mock/packages/externalprereq/package.py
index bd3c4348bf..226742f2cb 100644
--- a/var/spack/repos/builtin.mock/packages/externalprereq/package.py
+++ b/var/spack/repos/builtin.mock/packages/externalprereq/package.py
@@ -24,6 +24,7 @@
##############################################################################
from spack import *
+
class Externalprereq(Package):
homepage = "http://somewhere.com"
url = "http://somewhere.com/prereq-1.0.tar.gz"
diff --git a/var/spack/repos/builtin.mock/packages/externaltest/package.py b/var/spack/repos/builtin.mock/packages/externaltest/package.py
index 2318887aec..252c42556e 100644
--- a/var/spack/repos/builtin.mock/packages/externaltest/package.py
+++ b/var/spack/repos/builtin.mock/packages/externaltest/package.py
@@ -24,6 +24,7 @@
##############################################################################
from spack import *
+
class Externaltest(Package):
homepage = "http://somewhere.com"
url = "http://somewhere.com/test-1.0.tar.gz"
diff --git a/var/spack/repos/builtin.mock/packages/externaltool/package.py b/var/spack/repos/builtin.mock/packages/externaltool/package.py
index 9ff2396f36..d2daddd350 100644
--- a/var/spack/repos/builtin.mock/packages/externaltool/package.py
+++ b/var/spack/repos/builtin.mock/packages/externaltool/package.py
@@ -24,6 +24,7 @@
##############################################################################
from spack import *
+
class Externaltool(Package):
homepage = "http://somewhere.com"
url = "http://somewhere.com/tool-1.0.tar.gz"
diff --git a/var/spack/repos/builtin.mock/packages/externalvirtual/package.py b/var/spack/repos/builtin.mock/packages/externalvirtual/package.py
index e19ef332f0..6310a17bc9 100644
--- a/var/spack/repos/builtin.mock/packages/externalvirtual/package.py
+++ b/var/spack/repos/builtin.mock/packages/externalvirtual/package.py
@@ -24,14 +24,17 @@
##############################################################################
from spack import *
+
class Externalvirtual(Package):
homepage = "http://somewhere.com"
url = "http://somewhere.com/stuff-1.0.tar.gz"
version('1.0', '1234567890abcdef1234567890abcdef')
version('2.0', '234567890abcdef1234567890abcdef1')
+ version('2.1', '34567890abcdef1234567890abcdef12')
+ version('2.2', '4567890abcdef1234567890abcdef123')
- provides('stuff')
+ provides('stuff', when='@1.0:')
def install(self, spec, prefix):
pass
diff --git a/var/spack/repos/builtin.mock/packages/failing-build/package.py b/var/spack/repos/builtin.mock/packages/failing-build/package.py
new file mode 100644
index 0000000000..a36553992e
--- /dev/null
+++ b/var/spack/repos/builtin.mock/packages/failing-build/package.py
@@ -0,0 +1,37 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class FailingBuild(Package):
+ """This package has a trivial install method that fails."""
+
+ homepage = "http://www.example.com/trivial_install"
+ url = "http://www.unit-test-should-replace-this-url/trivial_install-1.0.tar.gz"
+
+ version('1.0', 'foobarbaz')
+
+ def install(self, spec, prefix):
+ raise InstallError("Expected failure.")
diff --git a/var/spack/repos/builtin.mock/packages/fake/package.py b/var/spack/repos/builtin.mock/packages/fake/package.py
index 15aabf1101..b83eec7470 100644
--- a/var/spack/repos/builtin.mock/packages/fake/package.py
+++ b/var/spack/repos/builtin.mock/packages/fake/package.py
@@ -24,6 +24,7 @@
##############################################################################
from spack import *
+
class Fake(Package):
homepage = "http://www.fake-spack-example.org"
url = "http://www.fake-spack-example.org/downloads/fake-1.0.tar.gz"
diff --git a/var/spack/repos/builtin.mock/packages/git-test/package.py b/var/spack/repos/builtin.mock/packages/git-test/package.py
index aeea41146f..730e71ac6b 100644
--- a/var/spack/repos/builtin.mock/packages/git-test/package.py
+++ b/var/spack/repos/builtin.mock/packages/git-test/package.py
@@ -24,6 +24,7 @@
##############################################################################
from spack import *
+
class GitTest(Package):
"""Mock package that uses git for fetching."""
homepage = "http://www.git-fetch-example.com"
diff --git a/var/spack/repos/builtin.mock/packages/hg-test/package.py b/var/spack/repos/builtin.mock/packages/hg-test/package.py
index 64719eb53c..70a9b7f2c7 100644
--- a/var/spack/repos/builtin.mock/packages/hg-test/package.py
+++ b/var/spack/repos/builtin.mock/packages/hg-test/package.py
@@ -24,6 +24,7 @@
##############################################################################
from spack import *
+
class HgTest(Package):
"""Test package that does fetching with mercurial."""
homepage = "http://www.hg-fetch-example.com"
diff --git a/var/spack/repos/builtin.mock/packages/hypre/package.py b/var/spack/repos/builtin.mock/packages/hypre/package.py
index 3aedea9bf2..b9e31b09dc 100644
--- a/var/spack/repos/builtin.mock/packages/hypre/package.py
+++ b/var/spack/repos/builtin.mock/packages/hypre/package.py
@@ -24,6 +24,7 @@
##############################################################################
from spack import *
+
class Hypre(Package):
"""Hypre is included here as an example of a package that depends on
both LAPACK and BLAS."""
diff --git a/var/spack/repos/builtin.mock/packages/indirect_mpich/package.py b/var/spack/repos/builtin.mock/packages/indirect-mpich/package.py
index 6ed779889b..77b8022b1c 100644
--- a/var/spack/repos/builtin.mock/packages/indirect_mpich/package.py
+++ b/var/spack/repos/builtin.mock/packages/indirect-mpich/package.py
@@ -24,6 +24,7 @@
##############################################################################
from spack import *
+
class IndirectMpich(Package):
"""Test case for a package that depends on MPI and one of its
dependencies requires a *particular version* of MPI.
@@ -35,7 +36,7 @@ class IndirectMpich(Package):
version(1.0, 'foobarbaz')
depends_on('mpi')
- depends_on('direct_mpich')
+ depends_on('direct-mpich')
def install(self, spec, prefix):
pass
diff --git a/var/spack/repos/builtin.mock/packages/libdwarf/package.py b/var/spack/repos/builtin.mock/packages/libdwarf/package.py
index b53e295e23..0fcbe4a62e 100644
--- a/var/spack/repos/builtin.mock/packages/libdwarf/package.py
+++ b/var/spack/repos/builtin.mock/packages/libdwarf/package.py
@@ -23,11 +23,11 @@
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
from spack import *
-import os
# Only build certain parts of dwarf because the other ones break.
dwarf_dirs = ['libdwarf', 'dwarfdump2']
+
class Libdwarf(Package):
homepage = "http://www.prevanders.net/dwarf.html"
url = "http://www.prevanders.net/libdwarf-20130729.tar.gz"
diff --git a/var/spack/repos/builtin.mock/packages/libelf/package.py b/var/spack/repos/builtin.mock/packages/libelf/package.py
index f52d8cefe1..90d00ad339 100644
--- a/var/spack/repos/builtin.mock/packages/libelf/package.py
+++ b/var/spack/repos/builtin.mock/packages/libelf/package.py
@@ -24,6 +24,7 @@
##############################################################################
from spack import *
+
class Libelf(Package):
homepage = "http://www.mr511.de/software/english.html"
url = "http://www.mr511.de/software/libelf-0.8.13.tar.gz"
diff --git a/var/spack/repos/builtin.mock/packages/mpich/package.py b/var/spack/repos/builtin.mock/packages/mpich/package.py
index f278f26b8b..936127398c 100644
--- a/var/spack/repos/builtin.mock/packages/mpich/package.py
+++ b/var/spack/repos/builtin.mock/packages/mpich/package.py
@@ -24,6 +24,7 @@
##############################################################################
from spack import *
+
class Mpich(Package):
homepage = "http://www.mpich.org"
url = "http://www.mpich.org/static/downloads/3.0.4/mpich-3.0.4.tar.gz"
diff --git a/var/spack/repos/builtin.mock/packages/mpich2/package.py b/var/spack/repos/builtin.mock/packages/mpich2/package.py
index e6b68d2490..c92b4ba43a 100644
--- a/var/spack/repos/builtin.mock/packages/mpich2/package.py
+++ b/var/spack/repos/builtin.mock/packages/mpich2/package.py
@@ -24,6 +24,7 @@
##############################################################################
from spack import *
+
class Mpich2(Package):
homepage = "http://www.mpich.org"
url = "http://www.mpich.org/static/downloads/1.5/mpich2-1.5.tar.gz"
diff --git a/var/spack/repos/builtin.mock/packages/mpileaks/package.py b/var/spack/repos/builtin.mock/packages/mpileaks/package.py
index bc26f539ba..10fbf3845e 100644
--- a/var/spack/repos/builtin.mock/packages/mpileaks/package.py
+++ b/var/spack/repos/builtin.mock/packages/mpileaks/package.py
@@ -24,6 +24,7 @@
##############################################################################
from spack import *
+
class Mpileaks(Package):
homepage = "http://www.llnl.gov"
url = "http://www.llnl.gov/mpileaks-1.0.tar.gz"
@@ -35,6 +36,8 @@ class Mpileaks(Package):
variant('debug', default=False, description='Debug variant')
variant('opt', default=False, description='Optimized variant')
+ variant('shared', default=True, description='Build shared library')
+ variant('static', default=True, description='Build static library')
depends_on("mpi")
depends_on("callpath")
diff --git a/var/spack/repos/builtin.mock/packages/multi-provider-mpi/package.py b/var/spack/repos/builtin.mock/packages/multi-provider-mpi/package.py
new file mode 100644
index 0000000000..5f85dec9b5
--- /dev/null
+++ b/var/spack/repos/builtin.mock/packages/multi-provider-mpi/package.py
@@ -0,0 +1,51 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class MultiProviderMpi(Package):
+ """This is a fake MPI package used to test packages providing multiple
+ virtuals at the same version."""
+ homepage = "http://www.spack-fake-mpi.org"
+ url = "http://www.spack-fake-mpi.org/downloads/multi-mpi-1.0.tar.gz"
+
+ version('2.0.0', 'foobarbaz')
+ version('1.10.3', 'foobarbaz')
+ version('1.10.2', 'foobarbaz')
+ version('1.10.1', 'foobarbaz')
+ version('1.10.0', 'foobarbaz')
+ version('1.8.8', 'foobarbaz')
+ version('1.6.5', 'foobarbaz')
+
+ provides('mpi@3.1', when='@2.0.0')
+ provides('mpi@3.0', when='@1.10.3')
+ provides('mpi@3.0', when='@1.10.2')
+ provides('mpi@3.0', when='@1.10.1')
+ provides('mpi@3.0', when='@1.10.0')
+ provides('mpi@3.0', when='@1.8.8')
+ provides('mpi@2.2', when='@1.6.5')
+
+ def install(self, spec, prefix):
+ pass
diff --git a/var/spack/repos/builtin.mock/packages/multimethod/package.py b/var/spack/repos/builtin.mock/packages/multimethod/package.py
index 2d15722470..fa3f815135 100644
--- a/var/spack/repos/builtin.mock/packages/multimethod/package.py
+++ b/var/spack/repos/builtin.mock/packages/multimethod/package.py
@@ -23,6 +23,7 @@
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
from spack import *
+import spack.architecture
class Multimethod(Package):
@@ -49,7 +50,6 @@ class Multimethod(Package):
def no_version_2(self):
return 4
-
#
# These functions overlap, so there is ambiguity, but we'll take
# the first one.
@@ -62,7 +62,6 @@ class Multimethod(Package):
def version_overlap(self):
return 2
-
#
# More complicated case with cascading versions.
#
@@ -81,7 +80,6 @@ class Multimethod(Package):
def mpi_version(self):
return 1
-
#
# Use these to test whether the default method is called when no
# match is found. This also tests whether we can switch methods
@@ -98,31 +96,25 @@ class Multimethod(Package):
def has_a_default(self):
return 'intel'
-
-
#
- # Make sure we can switch methods on different architectures
+ # Make sure we can switch methods on different target
#
- @when('=x86_64')
- def different_by_architecture(self):
- return 'x86_64'
-
- @when('=ppc64')
- def different_by_architecture(self):
- return 'ppc64'
-
- @when('=ppc32')
- def different_by_architecture(self):
- return 'ppc32'
-
- @when('=arm64')
- def different_by_architecture(self):
- return 'arm64'
-
-
+ platform = spack.architecture.platform()
+ targets = platform.targets.values()
+ if len(targets) > 1:
+ targets = targets[:-1]
+
+ for target in targets:
+ @when('target=' + target.name)
+ def different_by_target(self):
+ if isinstance(self.spec.architecture.target, basestring):
+ return self.spec.architecture.target
+ else:
+ return self.spec.architecture.target.name
#
# Make sure we can switch methods on different dependencies
#
+
@when('^mpich')
def different_by_dep(self):
return 'mpich'
@@ -131,7 +123,6 @@ class Multimethod(Package):
def different_by_dep(self):
return 'zmpi'
-
#
# Make sure we can switch on virtual dependencies
#
diff --git a/var/spack/repos/builtin.mock/packages/netlib-blas/package.py b/var/spack/repos/builtin.mock/packages/netlib-blas/package.py
index 9d567f2e9b..0a5b1d0e6a 100644
--- a/var/spack/repos/builtin.mock/packages/netlib-blas/package.py
+++ b/var/spack/repos/builtin.mock/packages/netlib-blas/package.py
@@ -24,6 +24,7 @@
##############################################################################
from spack import *
+
class NetlibBlas(Package):
homepage = "http://www.netlib.org/lapack/"
url = "http://www.netlib.org/lapack/lapack-3.5.0.tgz"
diff --git a/var/spack/repos/builtin.mock/packages/netlib-lapack/package.py b/var/spack/repos/builtin.mock/packages/netlib-lapack/package.py
index 46d6ae43dc..755d3001a4 100644
--- a/var/spack/repos/builtin.mock/packages/netlib-lapack/package.py
+++ b/var/spack/repos/builtin.mock/packages/netlib-lapack/package.py
@@ -24,6 +24,7 @@
##############################################################################
from spack import *
+
class NetlibLapack(Package):
homepage = "http://www.netlib.org/lapack/"
url = "http://www.netlib.org/lapack/lapack-3.5.0.tgz"
diff --git a/var/spack/repos/builtin.mock/packages/openblas-with-lapack/package.py b/var/spack/repos/builtin.mock/packages/openblas-with-lapack/package.py
index b36237c1e2..0f14fbaa61 100644
--- a/var/spack/repos/builtin.mock/packages/openblas-with-lapack/package.py
+++ b/var/spack/repos/builtin.mock/packages/openblas-with-lapack/package.py
@@ -24,6 +24,7 @@
##############################################################################
from spack import *
+
class OpenblasWithLapack(Package):
"""Dummy version of OpenBLAS that also provides LAPACK, for testing."""
homepage = "http://www.openblas.net"
diff --git a/var/spack/repos/builtin.mock/packages/openblas/package.py b/var/spack/repos/builtin.mock/packages/openblas/package.py
index 5b39447e83..f6cdeeea49 100644
--- a/var/spack/repos/builtin.mock/packages/openblas/package.py
+++ b/var/spack/repos/builtin.mock/packages/openblas/package.py
@@ -24,6 +24,7 @@
##############################################################################
from spack import *
+
class Openblas(Package):
"""OpenBLAS: An optimized BLAS library"""
homepage = "http://www.openblas.net"
diff --git a/var/spack/repos/builtin.mock/packages/optional-dep-test-2/package.py b/var/spack/repos/builtin.mock/packages/optional-dep-test-2/package.py
index f97959c763..337f54e24e 100644
--- a/var/spack/repos/builtin.mock/packages/optional-dep-test-2/package.py
+++ b/var/spack/repos/builtin.mock/packages/optional-dep-test-2/package.py
@@ -24,6 +24,7 @@
##############################################################################
from spack import *
+
class OptionalDepTest2(Package):
"""Depends on the optional-dep-test package"""
diff --git a/var/spack/repos/builtin.mock/packages/optional-dep-test-3/package.py b/var/spack/repos/builtin.mock/packages/optional-dep-test-3/package.py
index d8fe33c3da..2904b3782d 100644
--- a/var/spack/repos/builtin.mock/packages/optional-dep-test-3/package.py
+++ b/var/spack/repos/builtin.mock/packages/optional-dep-test-3/package.py
@@ -24,6 +24,7 @@
##############################################################################
from spack import *
+
class OptionalDepTest3(Package):
"""Depends on the optional-dep-test package"""
diff --git a/var/spack/repos/builtin.mock/packages/optional-dep-test/package.py b/var/spack/repos/builtin.mock/packages/optional-dep-test/package.py
index 80c1da55f8..2c07e61769 100644
--- a/var/spack/repos/builtin.mock/packages/optional-dep-test/package.py
+++ b/var/spack/repos/builtin.mock/packages/optional-dep-test/package.py
@@ -24,6 +24,7 @@
##############################################################################
from spack import *
+
class OptionalDepTest(Package):
"""Description"""
diff --git a/var/spack/repos/builtin.mock/packages/othervirtual/package.py b/var/spack/repos/builtin.mock/packages/othervirtual/package.py
new file mode 100644
index 0000000000..83bc07df98
--- /dev/null
+++ b/var/spack/repos/builtin.mock/packages/othervirtual/package.py
@@ -0,0 +1,37 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class Othervirtual(Package):
+ homepage = "http://somewhere.com"
+ url = "http://somewhere.com/stuff-1.0.tar.gz"
+
+ version('1.0', '67890abcdef1234567890abcdef12345')
+
+ provides('stuff')
+
+ def install(self, spec, prefix):
+ pass
diff --git a/var/spack/repos/builtin.mock/packages/python/package.py b/var/spack/repos/builtin.mock/packages/python/package.py
index dc21b475e5..a5290161ad 100644
--- a/var/spack/repos/builtin.mock/packages/python/package.py
+++ b/var/spack/repos/builtin.mock/packages/python/package.py
@@ -24,6 +24,7 @@
##############################################################################
from spack import *
+
class Python(Package):
"""Dummy Python package to demonstrate preferred versions."""
homepage = "http://www.python.org"
@@ -40,4 +41,3 @@ class Python(Package):
def install(self, spec, prefix):
pass
-
diff --git a/var/spack/repos/builtin.mock/packages/simple-inheritance/package.py b/var/spack/repos/builtin.mock/packages/simple-inheritance/package.py
new file mode 100644
index 0000000000..3f135b002e
--- /dev/null
+++ b/var/spack/repos/builtin.mock/packages/simple-inheritance/package.py
@@ -0,0 +1,24 @@
+from spack import *
+
+
+class BaseWithDirectives(Package):
+
+ depends_on('cmake', type='build')
+ depends_on('mpi')
+ variant('openblas', description='Activates openblas', default=True)
+ provides('service1')
+
+
+class SimpleInheritance(BaseWithDirectives):
+ """Simple package which acts as a build dependency"""
+
+ homepage = "http://www.example.com"
+ url = "http://www.example.com/simple-1.0.tar.gz"
+
+ version('1.0', '0123456789abcdef0123456789abcdef')
+
+ depends_on('openblas', when='+openblas')
+ provides('lapack', when='+openblas')
+
+ def install(self, spec, prefix):
+ pass
diff --git a/var/spack/repos/builtin.mock/packages/svn-test/package.py b/var/spack/repos/builtin.mock/packages/svn-test/package.py
index 2f197593e0..01d0929c28 100644
--- a/var/spack/repos/builtin.mock/packages/svn-test/package.py
+++ b/var/spack/repos/builtin.mock/packages/svn-test/package.py
@@ -24,6 +24,7 @@
##############################################################################
from spack import *
+
class SvnTest(Package):
"""Mock package that uses svn for fetching."""
url = "http://www.example.com/svn-test-1.0.tar.gz"
diff --git a/var/spack/repos/builtin.mock/packages/trivial_install_test_package/package.py b/var/spack/repos/builtin.mock/packages/trivial-install-test-package/package.py
index 7c65909ad2..2129d9788b 100644
--- a/var/spack/repos/builtin.mock/packages/trivial_install_test_package/package.py
+++ b/var/spack/repos/builtin.mock/packages/trivial-install-test-package/package.py
@@ -24,6 +24,7 @@
##############################################################################
from spack import *
+
class TrivialInstallTestPackage(Package):
"""This package is a stub with a trivial install method. It allows us
to test the install and uninstall logic of spack."""
diff --git a/var/spack/repos/builtin.mock/packages/zmpi/package.py b/var/spack/repos/builtin.mock/packages/zmpi/package.py
index fcd3afe93b..b6a5b33011 100644
--- a/var/spack/repos/builtin.mock/packages/zmpi/package.py
+++ b/var/spack/repos/builtin.mock/packages/zmpi/package.py
@@ -24,6 +24,7 @@
##############################################################################
from spack import *
+
class Zmpi(Package):
"""This is a fake MPI package used to demonstrate virtual package providers
with dependencies."""
diff --git a/var/spack/repos/builtin/packages/ImageMagick/package.py b/var/spack/repos/builtin/packages/ImageMagick/package.py
deleted file mode 100644
index f8173169e1..0000000000
--- a/var/spack/repos/builtin/packages/ImageMagick/package.py
+++ /dev/null
@@ -1,63 +0,0 @@
-##############################################################################
-# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
-# Produced at the Lawrence Livermore National Laboratory.
-#
-# This file is part of Spack.
-# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
-# LLNL-CODE-647188
-#
-# For details, see https://github.com/llnl/spack
-# Please also see the LICENSE file for our notice and the LGPL.
-#
-# This program is free software; you can redistribute it and/or modify
-# it under the terms of the GNU Lesser General Public License (as
-# published by the Free Software Foundation) version 2.1, February 1999.
-#
-# This program is distributed in the hope that it will be useful, but
-# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
-# conditions of the GNU Lesser General Public License for more details.
-#
-# You should have received a copy of the GNU Lesser General Public
-# License along with this program; if not, write to the Free Software
-# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
-##############################################################################
-from spack import *
-
-
-class Imagemagick(Package):
- """ImageMagick is a image processing library"""
- homepage = "http://www.imagemagic.org"
-
- # -------------------------------------------------------------------------
- # ImageMagick does not keep around anything but *-10 versions, so
- # this URL may change. If you want the bleeding edge, you can
- # uncomment it and see if it works but you may need to try to
- # fetch a newer version (-6, -7, -8, -9, etc.) or you can stick
- # wtih the older, stable, archived -10 versions below.
- #
- # TODO: would be nice if spack had a way to recommend avoiding a
- # TODO: bleeding edge version, but not comment it out.
- # -------------------------------------------------------------------------
- # version('6.9.0-6', 'c1bce7396c22995b8bdb56b7797b4a1b',
- # url="http://www.imagemagick.org/download/ImageMagick-6.9.0-6.tar.bz2")
-
- # -------------------------------------------------------------------------
- # *-10 versions are archived, so these versions should fetch reliably.
- # -------------------------------------------------------------------------
- version(
- '6.8.9-10',
- 'aa050bf9785e571c956c111377bbf57c',
- url="http://sourceforge.net/projects/imagemagick/files/old-sources/6.x/6.8/ImageMagick-6.8.9-10.tar.gz/download")
-
- depends_on('jpeg')
- depends_on('libtool')
- depends_on('libpng')
- depends_on('freetype')
- depends_on('fontconfig')
- depends_on('libtiff')
-
- def install(self, spec, prefix):
- configure("--prefix=%s" % prefix)
- make()
- make("install")
diff --git a/var/spack/repos/builtin/packages/R/package.py b/var/spack/repos/builtin/packages/R/package.py
deleted file mode 100644
index 0177fe1a2b..0000000000
--- a/var/spack/repos/builtin/packages/R/package.py
+++ /dev/null
@@ -1,130 +0,0 @@
-##############################################################################
-# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
-# Produced at the Lawrence Livermore National Laboratory.
-#
-# This file is part of Spack.
-# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
-# LLNL-CODE-647188
-#
-# For details, see https://github.com/llnl/spack
-# Please also see the LICENSE file for our notice and the LGPL.
-#
-# This program is free software; you can redistribute it and/or modify
-# it under the terms of the GNU Lesser General Public License (as
-# published by the Free Software Foundation) version 2.1, February 1999.
-#
-# This program is distributed in the hope that it will be useful, but
-# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
-# conditions of the GNU Lesser General Public License for more details.
-#
-# You should have received a copy of the GNU Lesser General Public
-# License along with this program; if not, write to the Free Software
-# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
-##############################################################################
-import functools
-import glob
-import inspect
-import os
-import re
-from contextlib import closing
-
-import spack
-from llnl.util.lang import match_predicate
-from spack import *
-from spack.util.environment import *
-
-
-class R(Package):
- """
- R is 'GNU S', a freely available language and environment for statistical computing and graphics which provides a
- wide variety of statistical and graphical techniques: linear and nonlinear modelling, statistical tests, time series
- analysis, classification, clustering, etc. Please consult the R project homepage for further information.
- """
- homepage = "https://www.r-project.org"
- url = "http://cran.cnr.berkeley.edu/src/base/R-3/R-3.1.2.tar.gz"
-
- extendable = True
-
- version('3.2.3', '1ba3dac113efab69e706902810cc2970')
- version('3.2.2', '57cef5c2e210a5454da1979562a10e5b')
- version('3.2.1', 'c2aac8b40f84e08e7f8c9068de9239a3')
- version('3.2.0', '66fa17ad457d7e618191aa0f52fc402e')
- version('3.1.3', '53a85b884925aa6b5811dfc361d73fc4')
- version('3.1.2', '3af29ec06704cbd08d4ba8d69250ae74')
-
- variant('external-lapack', default=False, description='Links to externally installed BLAS/LAPACK')
-
- # Virtual dependencies
- depends_on('blas', when='+external-lapack')
- depends_on('lapack', when='+external-lapack')
-
- # Concrete dependencies
- depends_on('readline')
- depends_on('ncurses')
- depends_on('icu')
- depends_on('glib')
- depends_on('zlib')
- depends_on('libtiff')
- depends_on('jpeg')
- depends_on('cairo')
- depends_on('pango')
- depends_on('freetype')
- depends_on('tcl')
- depends_on('tk')
-
- def install(self, spec, prefix):
- rlibdir = join_path(prefix, 'rlib')
- options = ['--prefix=%s' % prefix,
- '--libdir=%s' % rlibdir,
- '--enable-R-shlib',
- '--enable-BLAS-shlib',
- '--enable-R-framework=no']
- if '+external-lapack' in spec:
- options.extend(['--with-blas', '--with-lapack'])
-
- configure(*options)
- make()
- make('install')
-
- # ========================================================================
- # Set up environment to make install easy for R extensions.
- # ========================================================================
-
- @property
- def r_lib_dir(self):
- return os.path.join('rlib', 'R', 'library')
-
- def setup_dependent_environment(self, spack_env, run_env, extension_spec):
- # Set R_LIBS to include the library dir for the
- # extension and any other R extensions it depends on.
- r_libs_path = []
- for d in extension_spec.traverse():
- if d.package.extends(self.spec):
- r_libs_path.append(os.path.join(d.prefix, self.r_lib_dir))
-
- r_libs_path = ':'.join(r_libs_path)
- spack_env.set('R_LIBS', r_libs_path)
-
- # For run time environment set only the path for extension_spec and prepend it to R_LIBS
- if extension_spec.package.extends(self.spec):
- run_env.prepend_path('R_LIBS', os.path.join(extension_spec.prefix, self.r_lib_dir))
-
-
- def setup_dependent_package(self, module, ext_spec):
- """
- Called before R modules' install() methods.
-
- In most cases, extensions will only need to have one line::
-
- R('CMD', 'INSTALL', '--library=%s' % self.module.r_lib_dir, '%s' % self.stage.archive_file)
- """
- # R extension builds can have a global R executable function
- module.R = Executable(join_path(self.spec.prefix.bin, 'R'))
-
- # Add variable for library directry
- module.r_lib_dir = os.path.join(ext_spec.prefix, self.r_lib_dir)
-
- # Make the site packages directory for extensions, if it does not exist already.
- if ext_spec.package.is_extension:
- mkdirp(module.r_lib_dir)
diff --git a/var/spack/repos/builtin/packages/abinit/package.py b/var/spack/repos/builtin/packages/abinit/package.py
new file mode 100644
index 0000000000..76fa044982
--- /dev/null
+++ b/var/spack/repos/builtin/packages/abinit/package.py
@@ -0,0 +1,175 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+#
+# Author: Matteo Giantomassi <matteo.giantomassiNOSPAM AT uclouvain.be>
+# Date: October 11, 2016
+
+from spack import *
+
+
+class Abinit(Package):
+ """ABINIT is a package whose main program allows one to find the total
+ energy, charge density and electronic structure of systems made of
+ electrons and nuclei (molecules and periodic solids) within
+ Density Functional Theory (DFT), using pseudopotentials and a planewave
+ or wavelet basis. ABINIT also includes options to optimize the geometry
+ according to the DFT forces and stresses, or to perform molecular dynamics
+ simulations using these forces, or to generate dynamical matrices,
+ Born effective charges, and dielectric tensors, based on Density-Functional
+ Perturbation Theory, and many more properties. Excited states can be
+ computed within the Many-Body Perturbation Theory (the GW approximation and
+ the Bethe-Salpeter equation), and Time-Dependent Density Functional Theory
+ (for molecules). In addition to the main ABINIT code, different utility
+ programs are provided.
+ """
+
+ homepage = "http://www.abinit.org"
+ url = "http://ftp.abinit.org/abinit-8.0.8b.tar.gz"
+
+ # Versions before 8.0.8b are not supported.
+ version("8.0.8b", "abc9e303bfa7f9f43f95598f87d84d5d")
+
+ variant('mpi', default=True,
+ description='Builds with MPI support. Requires MPI2+')
+ variant('openmp', default=False,
+ description='Enables OpenMP threads. Use threaded FFTW3')
+ variant('scalapack', default=False,
+ description='Enables scalapack support. Requires MPI')
+ # variant('elpa', default=False,
+ # description='Uses elpa instead of scalapack. Requires MPI')
+
+ # TODO: To be tested.
+ # It was working before the last `git pull` but now all tests crash.
+ # For the time being, the default is netcdf3 and the internal fallbacks
+ variant('hdf5', default=False,
+ description='Enables HDF5+Netcdf4 with MPI. WARNING: experimental')
+
+ # Add dependencies
+ # currently one cannot forward options to virtual packages, see #1712.
+ # depends_on("blas", when="~openmp")
+ # depends_on("blas+openmp", when="+openmp")
+ depends_on('blas')
+ depends_on("lapack")
+
+ # Require MPI2+
+ depends_on("mpi@2:", when="+mpi")
+
+ depends_on("scalapack", when="+scalapack+mpi")
+ # depends_on("elpa", when="+elpa+mpi~openmp")
+ # depends_on("elpa+openmp", when="+elpa+mpi+openmp")
+
+ depends_on("fftw+float", when="~openmp")
+ depends_on("fftw+float+openmp", when="+openmp")
+
+ depends_on("netcdf-fortran", when="+hdf5")
+ depends_on("hdf5+mpi", when='+mpi+hdf5') # required for NetCDF-4 support
+
+ # pin libxc version
+ depends_on("libxc@2.2.1")
+
+ def validate(self, spec):
+ """
+ Checks if incompatible variants have been activated at the same time
+
+ :param spec: spec of the package
+ :raises RuntimeError: in case of inconsistencies
+ """
+ error = 'you cannot ask for \'+{variant}\' when \'+mpi\' is not active'
+
+ if '+scalapack' in spec and '~mpi' in spec:
+ raise RuntimeError(error.format(variant='scalapack'))
+
+ if '+elpa' in spec and ('~mpi' in spec or '~scalapack' in spec):
+ raise RuntimeError(error.format(variant='elpa'))
+
+ def install(self, spec, prefix):
+ self.validate(spec)
+
+ options = ['--prefix=%s' % prefix]
+ oapp = options.append
+
+ if '+mpi' in spec:
+ # MPI version:
+ # let the configure script auto-detect MPI support from mpi_prefix
+ oapp("--with-mpi-prefix=%s" % spec["mpi"].prefix)
+ oapp("--enable-mpi=yes")
+ oapp("--enable-mpi-io=yes")
+
+ # Activate OpenMP in Abinit Fortran code.
+ if '+openmp' in spec:
+ oapp('--enable-openmp=yes')
+
+ # BLAS/LAPACK
+ if '+scalapack' in spec:
+ oapp("--with-linalg-flavor=custom+scalapack")
+ linalg = (spec['scalapack'].scalapack_libs +
+ spec['lapack'].lapack_libs + spec['blas'].blas_libs)
+
+ # elif '+elpa' in spec:
+ else:
+ oapp("--with-linalg-flavor=custom")
+ linalg = spec['lapack'].lapack_libs + spec['blas'].blas_libs
+
+ oapp("--with-linalg-libs=%s" % linalg.ld_flags)
+
+ # FFTW3: use sequential or threaded version if +openmp
+ fftflavor, fftlibs = "fftw3", "-lfftw3 -lfftw3f"
+ if '+openmp' in spec:
+ fftflavor = "fftw3-threads"
+ fftlibs = "-lfftw3_omp -lfftw3 -lfftw3f"
+
+ options.extend([
+ "--with-fft-flavor=%s" % fftflavor,
+ "--with-fft-incs=-I%s" % spec["fftw"].prefix.include,
+ "--with-fft-libs=-L%s %s" % (spec["fftw"].prefix.lib, fftlibs),
+ ])
+ oapp("--with-dft-flavor=atompaw+libxc")
+
+ # LibXC library
+ options.extend([
+ "with_libxc_incs=-I%s" % spec["libxc"].prefix.include,
+ "with_libxc_libs=-L%s -lxcf90 -lxc" % spec["libxc"].prefix.lib,
+ ])
+
+ # Netcdf4/HDF5
+ if "+hdf5" in spec:
+ oapp("--with-trio-flavor=netcdf")
+ hdf_libs = "-L%s -lhdf5_hl -lhdf5" % spec["hdf5"].prefix.lib
+ options.extend([
+ "--with-netcdf-incs=-I%s" % (
+ spec["netcdf-fortran"].prefix.include),
+ "--with-netcdf-libs=-L%s -lnetcdff -lnetcdf %s" % (
+ spec["netcdf-fortran"].prefix.lib, hdf_libs),
+ ])
+ else:
+ # Use internal fallbacks (netcdf3)
+ oapp("--with-trio-flavor=netcdf-fallback")
+
+ configure(*options)
+ make()
+
+ # make("check")
+ # make("tests_in")
+ make("install")
diff --git a/var/spack/repos/builtin/packages/ack/package.py b/var/spack/repos/builtin/packages/ack/package.py
new file mode 100644
index 0000000000..70249aebac
--- /dev/null
+++ b/var/spack/repos/builtin/packages/ack/package.py
@@ -0,0 +1,51 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class Ack(Package):
+ """ack 2.14 is a tool like grep, optimized for programmers.
+
+ Designed for programmers with large heterogeneous trees of
+ source code, ack is written purely in portable Perl 5 and takes
+ advantage of the power of Perl's regular expressions."""
+
+ homepage = "http://beyondgrep.com/"
+ url = "http://beyondgrep.com/ack-2.14-single-file"
+
+ version('2.14', 'e74150a1609d28a70b450ef9cc2ed56b', expand=False)
+
+ depends_on('perl')
+
+ def install(self, spec, prefix):
+ mkdirp(prefix.bin)
+ ack = 'ack-{0}-single-file'.format(self.version)
+
+ # rewrite the script's #! line to call the perl dependency
+ shbang = '#!' + join_path(spec['perl'].prefix.bin, 'perl')
+ filter_file(r'^#!/usr/bin/env perl', shbang, ack)
+
+ install(ack, join_path(prefix.bin, "ack"))
+ set_executable(join_path(prefix.bin, "ack"))
diff --git a/var/spack/repos/builtin/packages/activeharmony/package.py b/var/spack/repos/builtin/packages/activeharmony/package.py
index 9d15bd71d9..6a4e67a1ca 100644
--- a/var/spack/repos/builtin/packages/activeharmony/package.py
+++ b/var/spack/repos/builtin/packages/activeharmony/package.py
@@ -24,8 +24,10 @@
##############################################################################
from spack import *
+
class Activeharmony(Package):
- """Active Harmony: a framework for auto-tuning (the automated search for values to improve the performance of a target application)."""
+ """Active Harmony: a framework for auto-tuning (the automated search for
+ values to improve the performance of a target application)."""
homepage = "http://www.dyninst.org/harmony"
url = "http://www.dyninst.org/sites/default/files/downloads/harmony/ah-4.5.tar.gz"
@@ -34,6 +36,3 @@ class Activeharmony(Package):
def install(self, spec, prefix):
make("CFLAGS=-O3")
make("install", 'PREFIX=%s' % prefix)
-
-from spack import *
-
diff --git a/var/spack/repos/builtin/packages/adept-utils/package.py b/var/spack/repos/builtin/packages/adept-utils/package.py
index 7b6c3702af..1a6998fd96 100644
--- a/var/spack/repos/builtin/packages/adept-utils/package.py
+++ b/var/spack/repos/builtin/packages/adept-utils/package.py
@@ -24,6 +24,7 @@
##############################################################################
from spack import *
+
class AdeptUtils(Package):
"""Utility libraries for LLNL performance tools."""
@@ -35,6 +36,7 @@ class AdeptUtils(Package):
depends_on("boost")
depends_on("mpi")
+ depends_on('cmake', type='build')
def install(self, spec, prefix):
cmake(*std_cmake_args)
diff --git a/var/spack/repos/builtin/packages/adios/adios_1100.patch b/var/spack/repos/builtin/packages/adios/adios_1100.patch
new file mode 100644
index 0000000000..7a9f857c32
--- /dev/null
+++ b/var/spack/repos/builtin/packages/adios/adios_1100.patch
@@ -0,0 +1,29 @@
+From 3b21a8a4150962c6938baeceacd04f619cea2fbc Mon Sep 17 00:00:00 2001
+From: Norbert Podhorszki <pnorbert@ornl.gov>
+Date: Thu, 1 Sep 2016 16:26:23 -0400
+Subject: [PATCH] ifdef around 'bool' type. hdf5 1.10 defines bool and breaks
+ compiling bp2h5.c
+
+---
+ utils/bp2h5/bp2h5.c | 8 +++++---
+ 1 file changed, 5 insertions(+), 3 deletions(-)
+
+diff --git a/utils/bp2h5/bp2h5.c b/utils/bp2h5/bp2h5.c
+index 9c500c7..fa746bd 100644
+--- a/utils/bp2h5/bp2h5.c
++++ b/utils/bp2h5/bp2h5.c
+@@ -43,9 +43,11 @@
+ #include "dmalloc.h"
+ #endif
+
+-typedef int bool;
+-#define false 0
+-#define true 1
++#ifndef bool
++ typedef int bool;
++# define false 0
++# define true 1
++#endif
+
+ bool noindex = false; // do no print array indices with data
+ bool printByteAsChar = false; // print 8 bit integer arrays as string
diff --git a/var/spack/repos/builtin/packages/adios/package.py b/var/spack/repos/builtin/packages/adios/package.py
new file mode 100644
index 0000000000..e240ce0858
--- /dev/null
+++ b/var/spack/repos/builtin/packages/adios/package.py
@@ -0,0 +1,131 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+
+from spack import *
+
+
+class Adios(Package):
+ """The Adaptable IO System (ADIOS) provides a simple,
+ flexible way for scientists to describe the
+ data in their code that may need to be written,
+ read, or processed outside of the running simulation.
+ """
+
+ homepage = "http://www.olcf.ornl.gov/center-projects/adios/"
+ url = "https://github.com/ornladios/ADIOS/archive/v1.10.0.tar.gz"
+
+ version('develop', git='https://github.com/ornladios/ADIOS.git',
+ branch='master')
+ version('1.10.0', 'eff450a4c0130479417cfd63186957f3')
+ version('1.9.0', '310ff02388bbaa2b1c1710ee970b5678')
+
+ variant('shared', default=True,
+ description='Builds a shared version of the library')
+
+ variant('fortran', default=False,
+ description='Enable Fortran bindings support')
+
+ variant('mpi', default=True, description='Enable MPI support')
+ variant('infiniband', default=False, description='Enable infiniband support')
+
+ # transforms
+ variant('zlib', default=True, description='Enable szip transform support')
+ variant('szip', default=False, description='Enable szip transform support')
+ # transports and serial file converters
+ variant('hdf5', default=False, description='Enable parallel HDF5 transport and serial bp2h5 converter')
+
+ # Lots of setting up here for this package
+ # module swap PrgEnv-intel PrgEnv-$COMP
+ # module load cray-hdf5/1.8.14
+ # module load python/2.7.10
+
+ depends_on('autoconf', type='build')
+ depends_on('automake', type='build')
+ depends_on('libtool', type='build')
+ depends_on('python', type='build')
+
+ depends_on('mpi', when='+mpi')
+ depends_on('mxml@2.9:')
+ # optional transformations
+ depends_on('zlib', when='+zlib')
+ depends_on('szip', when='+szip')
+ # optional transports & file converters
+ depends_on('hdf5@1.8:+mpi', when='+hdf5')
+
+ # Fix ADIOS <=1.10.0 compile error on HDF5 1.10+
+ # https://github.com/ornladios/ADIOS/commit/3b21a8a41509
+ # https://github.com/LLNL/spack/issues/1683
+ patch('adios_1100.patch', when='@:1.10.0^hdf5@1.10:')
+
+ def validate(self, spec):
+ """
+ Checks if incompatible variants have been activated at the same time
+ :param spec: spec of the package
+ :raises RuntimeError: in case of inconsistencies
+ """
+ if '+fortran' in spec and not self.compiler.fc:
+ msg = 'cannot build a fortran variant without a fortran compiler'
+ raise RuntimeError(msg)
+
+ def install(self, spec, prefix):
+ self.validate(spec)
+ # Handle compilation after spec validation
+ extra_args = []
+
+ # required, otherwise building its python bindings on ADIOS will fail
+ extra_args.append("CFLAGS=-fPIC")
+
+ # always build external MXML, even in ADIOS 1.10.0+
+ extra_args.append('--with-mxml=%s' % spec['mxml'].prefix)
+
+ if '+shared' in spec:
+ extra_args.append('--enable-shared')
+
+ if '+mpi' in spec:
+ extra_args.append('--with-mpi')
+ if '+infiniband' in spec:
+ extra_args.append('--with-infiniband')
+ else:
+ extra_args.append('--with-infiniband=no')
+
+ if '+fortran' in spec:
+ extra_args.append('--enable-fortran')
+ else:
+ extra_args.append('--disable-fortran')
+
+ if '+zlib' in spec:
+ extra_args.append('--with-zlib=%s' % spec['zlib'].prefix)
+ if '+szip' in spec:
+ extra_args.append('--with-szip=%s' % spec['szip'].prefix)
+ if '+hdf5' in spec:
+ extra_args.append('--with-phdf5=%s' % spec['hdf5'].prefix)
+
+ sh = which('sh')
+ sh('./autogen.sh')
+
+ configure("--prefix=%s" % prefix,
+ *extra_args)
+ make()
+ make("install")
diff --git a/var/spack/repos/builtin/packages/adol-c/openmp_exam.patch b/var/spack/repos/builtin/packages/adol-c/openmp_exam_261.patch
index 8e21c72d92..8e21c72d92 100644
--- a/var/spack/repos/builtin/packages/adol-c/openmp_exam.patch
+++ b/var/spack/repos/builtin/packages/adol-c/openmp_exam_261.patch
diff --git a/var/spack/repos/builtin/packages/adol-c/package.py b/var/spack/repos/builtin/packages/adol-c/package.py
index a6052ad7bd..5c8d894757 100644
--- a/var/spack/repos/builtin/packages/adol-c/package.py
+++ b/var/spack/repos/builtin/packages/adol-c/package.py
@@ -23,22 +23,26 @@
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
from spack import *
-import sys
+
class AdolC(Package):
- """A package for the automatic differentiation of first and higher derivatives of vector functions in C and C++ programs by operator overloading."""
+ """A package for the automatic differentiation of first and higher
+ derivatives of vector functions in C and C++ programs by operator
+ overloading."""
homepage = "https://projects.coin-or.org/ADOL-C"
url = "http://www.coin-or.org/download/source/ADOL-C/ADOL-C-2.6.1.tgz"
- version('head', svn='https://projects.coin-or.org/svn/ADOL-C/trunk/')
+ version('develop', svn='https://projects.coin-or.org/svn/ADOL-C/trunk/')
+ version('2.6.2', '0f9547584c99c0673e4f81cf64e8d865')
version('2.6.1', '1032b28427d6e399af4610e78c0f087b')
-
+
variant('doc', default=True, description='Install documentation')
variant('openmp', default=False, description='Enable OpenMP support')
variant('sparse', default=False, description='Enable sparse drivers')
- variant('tests', default=True, description='Build all included examples as a test case')
-
- patch('openmp_exam.patch')
+ variant('tests', default=True,
+ description='Build all included examples as a test case')
+
+ patch('openmp_exam_261.patch', when='@2.6.1')
def install(self, spec, prefix):
make_args = ['--prefix=%s' % prefix]
@@ -49,10 +53,14 @@ class AdolC(Package):
if '+openmp' in spec:
if spec.satisfies('%gcc'):
make_args.extend([
- '--with-openmp-flag=-fopenmp' # FIXME: Is this required? -I <path to omp.h> -L <LLVM OpenMP library path>
+ # FIXME: Is this required? -I <path to omp.h> -L <LLVM
+ # OpenMP library path>
+ '--with-openmp-flag=-fopenmp'
])
else:
- raise InstallError("OpenMP flags for compilers other than GCC are not implemented.")
+ raise InstallError(
+ "OpenMP flags for compilers other than GCC "
+ "are not implemented.")
if '+sparse' in spec:
make_args.extend([
@@ -63,7 +71,7 @@ class AdolC(Package):
# whether Adol-C works as expected
if '+tests' in spec:
make_args.extend([
- '--enable-docexa', # Documeted examples
+ '--enable-docexa', # Documeted examples
'--enable-addexa' # Additional examples
])
if '+openmp' in spec:
@@ -74,31 +82,36 @@ class AdolC(Package):
configure(*make_args)
make()
make("install")
-
+
# Copy the config.h file, as some packages might require it
source_directory = self.stage.source_path
- config_h = join_path(source_directory,'ADOL-C','src','config.h')
- install(config_h, join_path(prefix.include,'adolc'))
-
+ config_h = join_path(source_directory, 'ADOL-C', 'src', 'config.h')
+ install(config_h, join_path(prefix.include, 'adolc'))
+
# Install documentation to {prefix}/share
if '+doc' in spec:
- install_tree(join_path('ADOL-C','doc'),
- join_path(prefix.share,'doc'))
-
+ install_tree(join_path('ADOL-C', 'doc'),
+ join_path(prefix.share, 'doc'))
+
# Install examples to {prefix}/share
if '+tests' in spec:
- install_tree(join_path('ADOL-C','examples'),
- join_path(prefix.share,'examples'))
-
+ install_tree(join_path('ADOL-C', 'examples'),
+ join_path(prefix.share, 'examples'))
+
# Run some examples that don't require user input
# TODO: Check that bundled examples produce the correct results
- with working_dir(join_path(source_directory,'ADOL-C','examples')):
+ with working_dir(join_path(
+ source_directory, 'ADOL-C', 'examples')):
Executable('./tapeless_scalar')()
Executable('./tapeless_vector')()
-
- with working_dir(join_path(source_directory,'ADOL-C','examples','additional_examples')):
+
+ with working_dir(join_path(
+ source_directory,
+ 'ADOL-C', 'examples', 'additional_examples')):
Executable('./checkpointing/checkpointing')()
-
+
if '+openmp' in spec:
- with working_dir(join_path(source_directory,'ADOL-C','examples','additional_examples')):
+ with working_dir(join_path(
+ source_directory,
+ 'ADOL-C', 'examples', 'additional_examples')):
Executable('./checkpointing/checkpointing')()
diff --git a/var/spack/repos/builtin/packages/ant/package.py b/var/spack/repos/builtin/packages/ant/package.py
new file mode 100644
index 0000000000..81a0e089e5
--- /dev/null
+++ b/var/spack/repos/builtin/packages/ant/package.py
@@ -0,0 +1,44 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class Ant(Package):
+ """Apache Ant is a Java library and command-line tool whose mission is to
+ drive processes described in build files as targets and extension points
+ dependent upon each other
+ """
+
+ homepage = "http://ant.apache.org/"
+ url = "http://apache.claz.org/ant/source/apache-ant-1.9.7-src.tar.gz"
+
+ version('1.9.7', 'a2fd9458c76700b7be51ef12f07d4bb1')
+
+ depends_on('jdk')
+
+ def install(self, spec, prefix):
+ env['ANT_HOME'] = self.prefix
+ bash = which('bash')
+ bash('./build.sh', 'install')
diff --git a/var/spack/repos/builtin/packages/antlr/package.py b/var/spack/repos/builtin/packages/antlr/package.py
index eb67facaf3..88653a8ea9 100644
--- a/var/spack/repos/builtin/packages/antlr/package.py
+++ b/var/spack/repos/builtin/packages/antlr/package.py
@@ -24,14 +24,17 @@
##############################################################################
from spack import *
-class Antlr(Package):
-
+
+class Antlr(AutotoolsPackage):
+ """ANTLR (ANother Tool for Language Recognition) is a powerful parser
+ generator for reading, processing, executing, or translating structured
+ text or binary files. It's widely used to build languages, tools, and
+ frameworks. From a grammar, ANTLR generates a parser that can build and
+ walk parse trees."""
+
homepage = "http://www.antlr.org"
url = "https://github.com/antlr/antlr/tarball/v2.7.7"
- # NOTE: This requires that a system Java be available.
- # Spack does not yet know how to install Java compilers
-
# Notes from http://nco.sourceforge.net/#bld
# The first steps to build (i.e., compile, for the most part) NCO from
# source code are to install the pre-requisites: ANTLR version 2.7.7
@@ -41,31 +44,23 @@ class Antlr(Package):
# CharScanner.hpp must include this line: #include <cstring> or else
# ncap2 will not compile (this tarball is already patched).
version('2.7.7', '914865e853fe8e1e61a9f23d045cb4ab',
- # Patched version as described above
- url='http://dust.ess.uci.edu/tmp/antlr-2.7.7.tar.gz')
- # Unpatched version
- # url='http://dust.ess.uci.edu/nco/antlr-2.7.7.tar.gz')
+ # Patched version as described above
+ url='http://dust.ess.uci.edu/tmp/antlr-2.7.7.tar.gz')
+ # Unpatched version
+ # url='http://dust.ess.uci.edu/nco/antlr-2.7.7.tar.gz')
- variant('cxx', default=False, description='Enable ANTLR for C++')
- variant('java', default=False, description='Enable ANTLR for Java')
+ variant('cxx', default=True, description='Enable ANTLR for C++')
+ variant('java', default=False, description='Enable ANTLR for Java')
variant('python', default=False, description='Enable ANTLR for Python')
- variant('csharp', default=False, description='Enable ANTLR for Csharp')
-
- def install(self, spec, prefix):
- # Check for future enabling of variants
- for v in ('+java', '+python', '+csharp'):
- if v in spec:
- raise Error('Illegal variant %s; for now, Spack only knows how to build antlr or antlr+cxx')
+ extends('python', when='+python')
+ depends_on('jdk', type=('build', 'run'), when='+java')
- config_args = [
- '--prefix=%s' % prefix,
- '--%s-cxx' % ('enable' if '+cxx' in spec else 'disable'),
- '--%s-java' % ('enable' if '+java' in spec else 'disable'),
- '--%s-python' % ('enable' if '+python' in spec else 'disable'),
- '--%s-csharp' % ('enable' if '+csharp' in spec else 'disable')]
+ def configure_args(self):
+ spec = self.spec
- # which('autoreconf')('-iv')
- configure(*config_args)
- make()
- make("install")
+ return [
+ '--{0}-cxx'.format('enable' if '+cxx' in spec else 'disable'),
+ '--{0}-java'.format('enable' if '+java' in spec else 'disable'),
+ '--{0}-python'.format('enable' if '+python' in spec else 'disable')
+ ]
diff --git a/var/spack/repos/builtin/packages/ape/package.py b/var/spack/repos/builtin/packages/ape/package.py
new file mode 100644
index 0000000000..48e436804f
--- /dev/null
+++ b/var/spack/repos/builtin/packages/ape/package.py
@@ -0,0 +1,62 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class Ape(Package):
+ """A tool for generating atomic pseudopotentials within a Density-Functional
+ Theory framework"""
+
+ homepage = "http://www.tddft.org/programs/APE/"
+ url = "http://www.tddft.org/programs/APE/sites/default/files/ape-2.2.1.tar.gz"
+
+ version('2.2.1', 'ab81da85bd749c0c136af088c7f9ad58')
+
+ depends_on('gsl')
+ depends_on('libxc')
+
+ def install(self, spec, prefix):
+ args = []
+ args.extend([
+ '--prefix=%s' % prefix,
+ '--with-gsl-prefix=%s' % spec['gsl'].prefix,
+ '--with-libxc-prefix=%s' % spec['libxc'].prefix
+ ])
+
+ # When preprocessor expands macros (i.e. CFLAGS) defined as quoted
+ # strings the result may be > 132 chars and is terminated.
+ # This will look to a compiler as an Unterminated character constant
+ # and produce Line truncated errors. To vercome this, add flags to
+ # let compiler know that the entire line is meaningful.
+ # TODO: For the lack of better approach, assume that clang is mixed
+ # with GNU fortran.
+ if spec.satisfies('%clang') or spec.satisfies('%gcc'):
+ args.extend([
+ 'FCFLAGS=-O2 -ffree-line-length-none'
+ ])
+
+ configure(*args)
+ make()
+ make('install')
diff --git a/var/spack/repos/builtin/packages/apex/package.py b/var/spack/repos/builtin/packages/apex/package.py
index 20cbd8e8c6..832e10a1ec 100644
--- a/var/spack/repos/builtin/packages/apex/package.py
+++ b/var/spack/repos/builtin/packages/apex/package.py
@@ -25,6 +25,7 @@
from spack import *
from spack.util.environment import *
+
class Apex(Package):
homepage = "http://github.com/khuck/xpress-apex"
url = "http://github.com/khuck/xpress-apex/archive/v0.1.tar.gz"
@@ -33,23 +34,23 @@ class Apex(Package):
depends_on("binutils+libiberty")
depends_on("boost@1.54:")
- depends_on("cmake@2.8.12:")
+ depends_on('cmake@2.8.12:', type='build')
depends_on("activeharmony@4.5:")
depends_on("ompt-openmp")
def install(self, spec, prefix):
- path=get_path("PATH")
+ path = get_path("PATH")
path.remove(spec["binutils"].prefix.bin)
path_set("PATH", path)
with working_dir("build", create=True):
cmake('-DBOOST_ROOT=%s' % spec['boost'].prefix,
- '-DUSE_BFD=TRUE',
- '-DBFD_ROOT=%s' % spec['binutils'].prefix,
- '-DUSE_ACTIVEHARMONY=TRUE',
- '-DACTIVEHARMONY_ROOT=%s' % spec['activeharmony'].prefix,
- '-DUSE_OMPT=TRUE',
- '-DOMPT_ROOT=%s' % spec['ompt-openmp'].prefix,
- '..', *std_cmake_args)
+ '-DUSE_BFD=TRUE',
+ '-DBFD_ROOT=%s' % spec['binutils'].prefix,
+ '-DUSE_ACTIVEHARMONY=TRUE',
+ '-DACTIVEHARMONY_ROOT=%s' % spec['activeharmony'].prefix,
+ '-DUSE_OMPT=TRUE',
+ '-DOMPT_ROOT=%s' % spec['ompt-openmp'].prefix,
+ '..', *std_cmake_args)
make()
make("install")
diff --git a/var/spack/repos/builtin/packages/applewmproto/package.py b/var/spack/repos/builtin/packages/applewmproto/package.py
new file mode 100644
index 0000000000..8d7e360bfb
--- /dev/null
+++ b/var/spack/repos/builtin/packages/applewmproto/package.py
@@ -0,0 +1,46 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class Applewmproto(Package):
+ """Apple Rootless Window Management Extension.
+
+ This extension defines a protcol that allows X window managers
+ to better interact with the Mac OS X Aqua user interface when
+ running X11 in a rootless mode."""
+
+ homepage = "http://cgit.freedesktop.org/xorg/proto/applewmproto"
+ url = "https://www.x.org/archive/individual/proto/applewmproto-1.4.2.tar.gz"
+
+ version('1.4.2', 'ecc8a4424a893ce120f5652dba62e9e6')
+
+ depends_on('pkg-config@0.9.0:', type='build')
+ depends_on('util-macros', type='build')
+
+ def install(self, spec, prefix):
+ configure('--prefix={0}'.format(prefix))
+
+ make('install')
diff --git a/var/spack/repos/builtin/packages/appres/package.py b/var/spack/repos/builtin/packages/appres/package.py
new file mode 100644
index 0000000000..47a9c5bb54
--- /dev/null
+++ b/var/spack/repos/builtin/packages/appres/package.py
@@ -0,0 +1,50 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class Appres(Package):
+ """The appres program prints the resources seen by an application (or
+ subhierarchy of an application) with the specified class and instance
+ names. It can be used to determine which resources a particular
+ program will load."""
+
+ homepage = "http://cgit.freedesktop.org/xorg/app/appres"
+ url = "https://www.x.org/archive/individual/app/appres-1.0.4.tar.gz"
+
+ version('1.0.4', 'f82aabe6bbb8960781b63c6945bb361b')
+
+ depends_on('libx11')
+ depends_on('libxt')
+
+ depends_on('xproto@7.0.17:', type='build')
+ depends_on('pkg-config@0.9.0:', type='build')
+ depends_on('util-macros', type='build')
+
+ def install(self, spec, prefix):
+ configure('--prefix={0}'.format(prefix))
+
+ make()
+ make('install')
diff --git a/var/spack/repos/builtin/packages/apr-util/package.py b/var/spack/repos/builtin/packages/apr-util/package.py
index 05dc670aed..8e01d3bbdd 100644
--- a/var/spack/repos/builtin/packages/apr-util/package.py
+++ b/var/spack/repos/builtin/packages/apr-util/package.py
@@ -24,6 +24,7 @@
##############################################################################
from spack import *
+
class AprUtil(Package):
"""Apache Portable Runtime Utility"""
homepage = 'https://apr.apache.org/'
diff --git a/var/spack/repos/builtin/packages/apr/package.py b/var/spack/repos/builtin/packages/apr/package.py
index 398e1c323d..0cd51f52e3 100644
--- a/var/spack/repos/builtin/packages/apr/package.py
+++ b/var/spack/repos/builtin/packages/apr/package.py
@@ -24,6 +24,7 @@
##############################################################################
from spack import *
+
class Apr(Package):
"""Apache portable runtime."""
homepage = 'https://apr.apache.org/'
diff --git a/var/spack/repos/builtin/packages/armadillo/package.py b/var/spack/repos/builtin/packages/armadillo/package.py
new file mode 100644
index 0000000000..2336da4520
--- /dev/null
+++ b/var/spack/repos/builtin/packages/armadillo/package.py
@@ -0,0 +1,72 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class Armadillo(Package):
+ """Armadillo is a high quality linear algebra library (matrix maths)
+ for the C++ language, aiming towards a good balance between speed and
+ ease of use."""
+
+ homepage = "http://arma.sourceforge.net/"
+ url = "http://sourceforge.net/projects/arma/files/armadillo-7.200.1.tar.xz"
+
+ version('7.500.0', '7d316fdf3c3c7ea92b64704180ae315d')
+ version('7.200.2', 'b21585372d67a8876117fd515d8cf0a2')
+ version('7.200.1', 'ed86d6df0058979e107502e1fe3e469e')
+
+ variant('hdf5', default=False, description='Include HDF5 support')
+
+ depends_on('cmake@2.8:', type='build')
+ depends_on('arpack-ng') # old arpack causes undefined symbols
+ depends_on('blas')
+ depends_on('lapack')
+ depends_on('superlu@5.2:')
+ depends_on('hdf5', when='+hdf5')
+
+ def install(self, spec, prefix):
+ arpack = find_libraries(['libarpack'], root=spec[
+ 'arpack-ng'].prefix.lib, shared=True)
+ superlu = find_libraries(['libsuperlu'], root=spec[
+ 'superlu'].prefix, shared=False, recurse=True)
+ cmake_args = [
+ # ARPACK support
+ '-DARPACK_LIBRARY={0}'.format(arpack.joined()),
+ # BLAS support
+ '-DBLAS_LIBRARY={0}'.format(spec['blas'].blas_libs.joined()),
+ # LAPACK support
+ '-DLAPACK_LIBRARY={0}'.format(spec['lapack'].lapack_libs.joined()),
+ # SuperLU support
+ '-DSuperLU_INCLUDE_DIR={0}'.format(spec['superlu'].prefix.include),
+ '-DSuperLU_LIBRARY={0}'.format(superlu.joined()),
+ # HDF5 support
+ '-DDETECT_HDF5={0}'.format('ON' if '+hdf5' in spec else 'OFF')
+ ]
+
+ cmake_args.extend(std_cmake_args)
+ cmake('.', *cmake_args)
+
+ make()
+ make('install')
diff --git a/var/spack/repos/builtin/packages/arpack-ng/make_install.patch b/var/spack/repos/builtin/packages/arpack-ng/make_install.patch
new file mode 100644
index 0000000000..ad5cffcc19
--- /dev/null
+++ b/var/spack/repos/builtin/packages/arpack-ng/make_install.patch
@@ -0,0 +1,24 @@
+diff --git a/CMakeLists.txt b/CMakeLists.txt
+index 607d221..50426c3 100644
+--- a/CMakeLists.txt
++++ b/CMakeLists.txt
+@@ -389,3 +389,19 @@ target_link_libraries(bug_1323 arpack ${BLAS_LIBRARIES} ${LAPACK_LIBRARIES})
+ add_test(bug_1323 Tests/bug_1323)
+
+ add_dependencies(check dnsimp_test bug_1315_single bug_1315_double bug_1323)
++
++############################
++# install
++############################
++# 'make install' to the correct location
++install(TARGETS arpack
++ ARCHIVE DESTINATION lib
++ LIBRARY DESTINATION lib
++ RUNTIME DESTINATION bin)
++
++if (MPI)
++ install(TARGETS parpack
++ ARCHIVE DESTINATION lib
++ LIBRARY DESTINATION lib
++ RUNTIME DESTINATION bin)
++endif ()
diff --git a/var/spack/repos/builtin/packages/arpack-ng/package.py b/var/spack/repos/builtin/packages/arpack-ng/package.py
index fcd5171a7d..a1c18d8086 100644
--- a/var/spack/repos/builtin/packages/arpack-ng/package.py
+++ b/var/spack/repos/builtin/packages/arpack-ng/package.py
@@ -26,8 +26,8 @@ from spack import *
class ArpackNg(Package):
- """
- ARPACK-NG is a collection of Fortran77 subroutines designed to solve large scale eigenvalue problems.
+ """ARPACK-NG is a collection of Fortran77 subroutines designed to solve large
+ scale eigenvalue problems.
Important Features:
@@ -38,43 +38,85 @@ class ArpackNg(Package):
Generalized Problems.
* Routines for Banded Matrices - Standard or Generalized Problems.
* Routines for The Singular Value Decomposition.
- * Example driver routines that may be used as templates to implement numerous
- Shift-Invert strategies for all problem types, data types and precision.
+ * Example driver routines that may be used as templates to implement
+ numerous Shift-Invert strategies for all problem types, data types and
+ precision.
- This project is a joint project between Debian, Octave and Scilab in order to
- provide a common and maintained version of arpack.
+ This project is a joint project between Debian, Octave and Scilab in order
+ to provide a common and maintained version of arpack.
- Indeed, no single release has been published by Rice university for the last
- few years and since many software (Octave, Scilab, R, Matlab...) forked it and
- implemented their own modifications, arpack-ng aims to tackle this by providing
- a common repository and maintained versions.
+ Indeed, no single release has been published by Rice university for the
+ last few years and since many software (Octave, Scilab, R, Matlab...)
+ forked it and implemented their own modifications, arpack-ng aims to tackle
+ this by providing a common repository and maintained versions.
arpack-ng is replacing arpack almost everywhere.
"""
+
homepage = 'https://github.com/opencollab/arpack-ng'
url = 'https://github.com/opencollab/arpack-ng/archive/3.3.0.tar.gz'
+ version('3.4.0', 'ae9ca13f2143a7ea280cb0e2fd4bfae4')
version('3.3.0', 'ed3648a23f0a868a43ef44c97a21bad5')
- variant('shared', default=True, description='Enables the build of shared libraries')
- variant('mpi', default=False, description='Activates MPI support')
+ variant('shared', default=True,
+ description='Enables the build of shared libraries')
+ variant('mpi', default=True, description='Activates MPI support')
- # The function pdlamch10 does not set the return variable. This is fixed upstream
+ # The function pdlamch10 does not set the return variable.
+ # This is fixed upstream
# see https://github.com/opencollab/arpack-ng/issues/34
- patch('pdlamch10.patch', when='@3.3:')
+ patch('pdlamch10.patch', when='@3.3.0')
+
+ patch('make_install.patch', when='@3.4.0')
+ patch('parpack_cmake.patch', when='@3.4.0')
depends_on('blas')
depends_on('lapack')
- depends_on('automake')
- depends_on('autoconf')
- depends_on('libtool@2.4.2:')
+ depends_on('automake', when='@3.3.0', type='build')
+ depends_on('autoconf', when='@3.3.0', type='build')
+ depends_on('libtool@2.4.2:', when='@3.3.0', type='build')
+ depends_on('cmake@2.8.6:', when='@3.4.0:', type='build')
depends_on('mpi', when='+mpi')
+ @when('@3.4.0:')
+ def install(self, spec, prefix):
+
+ options = ['-DEXAMPLES=ON']
+ options.extend(std_cmake_args)
+ options.append('-DCMAKE_INSTALL_NAME_DIR:PATH=%s/lib' % prefix)
+
+ # Make sure we use Spack's blas/lapack:
+ lapack_libs = spec['lapack'].lapack_libs.joined(';')
+ blas_libs = spec['blas'].blas_libs.joined(';')
+
+ options.extend([
+ '-DLAPACK_FOUND=true',
+ '-DLAPACK_INCLUDE_DIRS={0}'.format(spec['lapack'].prefix.include),
+ '-DLAPACK_LIBRARIES={0}'.format(lapack_libs),
+ '-DBLAS_FOUND=true',
+ '-DBLAS_INCLUDE_DIRS={0}'.format(spec['blas'].prefix.include),
+ '-DBLAS_LIBRARIES={0}'.format(blas_libs)
+ ])
+
+ if '+mpi' in spec:
+ options.append('-DMPI=ON')
+
+ # TODO: -DINTERFACE64=ON
+
+ if '+shared' in spec:
+ options.append('-DBUILD_SHARED_LIBS=ON')
+
+ cmake('.', *options)
+ make()
+ if self.run_tests:
+ make('test')
+ make('install')
+
+ @when('@3.3.0')
def install(self, spec, prefix):
# Apparently autotools are not bootstrapped
- # TODO: switch to use the CMake build in the next version
- # rather than bootstrapping.
which('libtoolize')()
bootstrap = Executable('./bootstrap')
@@ -83,13 +125,19 @@ class ArpackNg(Package):
if '+mpi' in spec:
options.extend([
'--enable-mpi',
- 'F77=mpif77' #FIXME: avoid hardcoding MPI wrapper names
+ 'F77=%s' % spec['mpi'].mpif77
])
- if '~shared' in spec:
+ options.extend([
+ '--with-blas={0}'.format(spec['blas'].blas_libs.ld_flags),
+ '--with-lapack={0}'.format(spec['lapack'].lapack_libs.ld_flags)
+ ])
+ if '+shared' not in spec:
options.append('--enable-shared=no')
bootstrap()
configure(*options)
make()
+ if self.run_tests:
+ make('check')
make('install')
diff --git a/var/spack/repos/builtin/packages/arpack-ng/parpack_cmake.patch b/var/spack/repos/builtin/packages/arpack-ng/parpack_cmake.patch
new file mode 100644
index 0000000000..9b11bea6ac
--- /dev/null
+++ b/var/spack/repos/builtin/packages/arpack-ng/parpack_cmake.patch
@@ -0,0 +1,18 @@
+diff --git a/CMakeLists.txt b/CMakeLists.txt
+index 607d221..345b7fc 100644
+--- a/CMakeLists.txt
++++ b/CMakeLists.txt
+@@ -113,11 +113,12 @@ set_target_properties(arpack PROPERTIES OUTPUT_NAME arpack${LIBSUFFIX})
+
+ if (MPI)
+ # add_library(parpack SHARED
+- add_library(parpack
++ add_library(parpack
+ ${parpacksrc_STAT_SRCS}
+ ${parpackutil_STAT_SRCS})
+
+ target_link_libraries(parpack ${MPI_Fortran_LIBRARIES})
++ target_link_libraries(parpack arpack)
+ set_target_properties(parpack PROPERTIES OUTPUT_NAME parpack${LIBSUFFIX})
+ endif ()
+
diff --git a/var/spack/repos/builtin/packages/arpack/package.py b/var/spack/repos/builtin/packages/arpack/package.py
index 75158776fe..91b5f06a4a 100644
--- a/var/spack/repos/builtin/packages/arpack/package.py
+++ b/var/spack/repos/builtin/packages/arpack/package.py
@@ -24,12 +24,12 @@
##############################################################################
from spack import *
import os
-import shutil
+
class Arpack(Package):
"""A collection of Fortran77 subroutines designed to solve large scale
- eigenvalue problems.
- """
+ eigenvalue problems."""
+
homepage = "http://www.caam.rice.edu/software/ARPACK/"
url = "http://www.caam.rice.edu/software/ARPACK/SRC/arpack96.tar.gz"
@@ -39,27 +39,35 @@ class Arpack(Package):
depends_on('lapack')
def patch(self):
- # Filter the cray makefile to make a spack one.
- shutil.move('ARMAKES/ARmake.CRAY', 'ARmake.inc')
makefile = FileFilter('ARmake.inc')
- # Be sure to use Spack F77 wrapper
- makefile.filter('^FC.*', 'FC = f77')
- makefile.filter('^FFLAGS.*', 'FFLAGS = -O2 -g')
+ # Section 1: Paths and Libraries
+
+ # Change the build directory
+ makefile.filter('^home.*', 'home = %s' % os.getcwd())
+
+ # Use external BLAS/LAPACK
+ makefile.filter('^BLASdir.*',
+ 'BLASdir = %s' % self.spec['blas'].prefix)
+ makefile.filter('^LAPACKdir.*',
+ 'LAPACKdir = %s' % self.spec['lapack'].prefix)
+
+ # Do not include the platform in the library name
+ makefile.filter('^PLAT.*', 'PLAT = ')
+ makefile.filter('^ARPACKLIB.*', 'ARPACKLIB = $(home)/libarpack.a')
- # Set up some variables.
- makefile.filter('^PLAT.*', 'PLAT = ')
- makefile.filter('^home.*', 'home = %s' % os.getcwd())
- makefile.filter('^BLASdir.*', 'BLASdir = %s' % self.spec['blas'].prefix)
- makefile.filter('^LAPACKdir.*', 'LAPACKdir = %s' % self.spec['lapack'].prefix)
+ # Section 2: Compilers
- # build the library in our own prefix.
- makefile.filter('^ARPACKLIB.*', 'ARPACKLIB = %s/libarpack.a' % os.getcwd())
+ # Be sure to use the Spack compiler wrapper
+ makefile.filter('^FC.*', 'FC = {0}'.format(os.environ['F77']))
+ makefile.filter('^FFLAGS.*', 'FFLAGS = -O2 -g -fPIC')
+ if not which('ranlib'):
+ makefile.filter('^RANLIB.*', 'RANLIB = touch')
def install(self, spec, prefix):
with working_dir('SRC'):
make('all')
- mkdirp(prefix.lib)
+ mkdir(prefix.lib)
install('libarpack.a', prefix.lib)
diff --git a/var/spack/repos/builtin/packages/asciidoc/package.py b/var/spack/repos/builtin/packages/asciidoc/package.py
index a846e0ba65..552030d965 100644
--- a/var/spack/repos/builtin/packages/asciidoc/package.py
+++ b/var/spack/repos/builtin/packages/asciidoc/package.py
@@ -24,9 +24,11 @@
##############################################################################
from spack import *
+
class Asciidoc(Package):
- """ A presentable text document format for writing articles, UNIX man
+ """A presentable text document format for writing articles, UNIX man
pages and other small to medium sized documents."""
+
homepage = "http://asciidoc.org"
url = "http://downloads.sourceforge.net/project/asciidoc/asciidoc/8.6.9/asciidoc-8.6.9.tar.gz"
@@ -34,6 +36,8 @@ class Asciidoc(Package):
depends_on('libxml2')
depends_on('libxslt')
+ depends_on('docbook-xml')
+ depends_on('docbook-xsl')
def install(self, spec, prefix):
configure('--prefix=%s' % prefix)
diff --git a/var/spack/repos/builtin/packages/astra/package.py b/var/spack/repos/builtin/packages/astra/package.py
new file mode 100644
index 0000000000..e32e70cada
--- /dev/null
+++ b/var/spack/repos/builtin/packages/astra/package.py
@@ -0,0 +1,41 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class Astra(Package):
+ """A Space Charge Tracking Algorithm."""
+
+ homepage = "http://www.desy.de/~mpyflo/"
+
+ version('2016-11-30', '17135b7a4adbacc1843a50a6a2ae2c25', expand=False,
+ url='http://www.desy.de/~mpyflo/Astra_for_64_Bit_Linux/Astra')
+
+ def install(self, spec, prefix):
+ mkdir(prefix.bin)
+ install('Astra', prefix.bin)
+
+ chmod = which('chmod')
+ chmod('+x', join_path(prefix.bin, 'Astra'))
diff --git a/var/spack/repos/builtin/packages/astyle/package.py b/var/spack/repos/builtin/packages/astyle/package.py
index cd6f1d04f1..16c59469fa 100644
--- a/var/spack/repos/builtin/packages/astyle/package.py
+++ b/var/spack/repos/builtin/packages/astyle/package.py
@@ -23,30 +23,32 @@
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
from spack import *
+import sys
-class Astyle(Package):
- """
- A Free, Fast, and Small Automatic Formatter for C, C++, C++/CLI,
+class Astyle(MakefilePackage):
+ """A Free, Fast, and Small Automatic Formatter for C, C++, C++/CLI,
Objective-C, C#, and Java Source Code.
"""
+
homepage = "http://astyle.sourceforge.net/"
- url = "http://downloads.sourceforge.net/project/astyle/astyle/astyle%202.04/astyle_2.04_linux.tar.gz"
+ url = "http://downloads.sourceforge.net/project/astyle/astyle/astyle%202.04/astyle_2.04_linux.tar.gz"
+ version('2.05.1', '4142d178047d7040da3e0e2f1b030a1a')
version('2.04', '30b1193a758b0909d06e7ee8dd9627f6')
- def install(self, spec, prefix):
+ parallel = False
- with working_dir('src'):
- # we need to edit the makefile in place to set compiler:
- make_file = join_path(self.stage.source_path,
- 'build', 'gcc', 'Makefile')
- filter_file(r'^CXX\s*=.*', 'CXX=%s'.format(spack_cxx), make_file)
+ def build_directory(self):
+ return join_path(self.stage.source_path, 'build', self.compiler.name)
- make('-f',
- make_file,
- parallel=False)
+ def edit(self, spec, prefix):
+ makefile = join_path(self.build_directory(), 'Makefile')
+ filter_file(r'^CXX\s*=.*', 'CXX=%s' % spack_cxx, makefile)
+ # strangely enough install -o $(USER) -g $(USER) stoped working on OSX
+ if sys.platform == 'darwin':
+ filter_file(r'^INSTALL=.*', 'INSTALL=install', makefile)
- mkdirp(self.prefix.bin)
- install(join_path(self.stage.source_path, 'src', 'bin', 'astyle'),
- self.prefix.bin)
+ @property
+ def install_targets(self):
+ return ['install', 'prefix={0}'.format(self.prefix)]
diff --git a/var/spack/repos/builtin/packages/atk/package.py b/var/spack/repos/builtin/packages/atk/package.py
index 1d26145fd1..0a7d48774d 100644
--- a/var/spack/repos/builtin/packages/atk/package.py
+++ b/var/spack/repos/builtin/packages/atk/package.py
@@ -24,6 +24,7 @@
##############################################################################
from spack import *
+
class Atk(Package):
"""ATK provides the set of accessibility interfaces that are
implemented by other toolkits and applications. Using the ATK
@@ -32,9 +33,16 @@ class Atk(Package):
homepage = "https://developer.gnome.org/atk/"
url = "http://ftp.gnome.org/pub/gnome/sources/atk/2.14/atk-2.14.0.tar.xz"
+ version('2.20.0', '5187b0972f4d3905f285540b31395e20')
version('2.14.0', 'ecb7ca8469a5650581b1227d78051b8b')
- depends_on("glib")
+ depends_on('glib')
+ depends_on('pkg-config', type='build')
+
+ def url_for_version(self, version):
+ """Handle atk's version-based custom URLs."""
+ url = 'http://ftp.gnome.org/pub/gnome/sources/atk'
+ return url + '/%s/atk-%s.tar.xz' % (version.up_to(2), version)
def install(self, spec, prefix):
configure("--prefix=%s" % prefix)
diff --git a/var/spack/repos/builtin/packages/atlas/package.py b/var/spack/repos/builtin/packages/atlas/package.py
index c43d92c34f..51f0fced2b 100644
--- a/var/spack/repos/builtin/packages/atlas/package.py
+++ b/var/spack/repos/builtin/packages/atlas/package.py
@@ -23,20 +23,27 @@
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
from spack import *
+from spack.package_test import *
from spack.util.executable import Executable
import os.path
+
class Atlas(Package):
- """
- Automatically Tuned Linear Algebra Software, generic shared ATLAS is an approach for the automatic generation and
- optimization of numerical software. Currently ATLAS supplies optimized versions for the complete set of linear
- algebra kernels known as the Basic Linear Algebra Subroutines (BLAS), and a subset of the linear algebra routines
- in the LAPACK library.
+ """Automatically Tuned Linear Algebra Software, generic shared ATLAS is an
+ approach for the automatic generation and optimization of numerical
+ software. Currently ATLAS supplies optimized versions for the complete set
+ of linear algebra kernels known as the Basic Linear Algebra Subroutines
+ (BLAS), and a subset of the linear algebra routines in the LAPACK library.
"""
homepage = "http://math-atlas.sourceforge.net/"
+ version('3.10.3', 'd6ce4f16c2ad301837cfb3dade2f7cef',
+ url='https://sourceforge.net/projects/math-atlas/files/Stable/3.10.3/atlas3.10.3.tar.bz2')
+
version('3.10.2', 'a4e21f343dec8f22e7415e339f09f6da',
- url='http://downloads.sourceforge.net/project/math-atlas/Stable/3.10.2/atlas3.10.2.tar.bz2', preferred=True)
+ url='https://sourceforge.net/projects/math-atlas/files/Stable/3.10.2/atlas3.10.2.tar.bz2', preferred=True)
+ # not all packages (e.g. Trilinos@12.6.3) stopped using deprecated in 3.6.0
+ # Lapack routines. Stick with 3.5.0 until this is fixed.
resource(name='lapack',
url='http://www.netlib.org/lapack/lapack-3.5.0.tgz',
md5='b1d3e3e425b2e44a06760ff173104bdf',
@@ -44,9 +51,10 @@ class Atlas(Package):
when='@3:')
version('3.11.34', '0b6c5389c095c4c8785fd0f724ec6825',
- url='http://sourceforge.net/projects/math-atlas/files/Developer%20%28unstable%29/3.11.34/atlas3.11.34.tar.bz2/download')
+ url='http://sourceforge.net/projects/math-atlas/files/Developer%20%28unstable%29/3.11.34/atlas3.11.34.tar.bz2')
variant('shared', default=True, description='Builds shared library')
+ variant('pthread', default=False, description='Use multithreaded libraries')
provides('blas')
provides('lapack')
@@ -63,16 +71,32 @@ class Atlas(Package):
# TODO: using, say, MSRs. Or move this to a variant.
def install(self, spec, prefix):
-
+ # reference to other package managers
+ # https://github.com/hpcugent/easybuild-easyblocks/blob/master/easybuild/easyblocks/a/atlas.py
+ # https://github.com/macports/macports-ports/blob/master/math/atlas/Portfile
+ # https://github.com/Homebrew/homebrew-science/pull/3571
options = []
if '+shared' in spec:
- options.append('--shared')
+ options.extend([
+ '--shared'
+ ])
+ # TODO: for non GNU add '-Fa', 'alg', '-fPIC' ?
+
+ # configure for 64-bit build
+ options.extend([
+ '-b', '64'
+ ])
+
+ # set compilers:
+ options.extend([
+ '-C', 'ic', spack_cc,
+ '-C', 'if', spack_f77
+ ])
- # Lapack resource
- lapack_stage = self.stage[1]
- lapack_tarfile = os.path.basename(lapack_stage.fetcher.url)
- lapack_tarfile_path = join_path(lapack_stage.path, lapack_tarfile)
- options.append('--with-netlib-lapack-tarfile=%s' % lapack_tarfile_path)
+ # Lapack resource to provide full lapack build. Note that
+ # ATLAS only provides a few LAPACK routines natively.
+ options.append('--with-netlib-lapack-tarfile=%s' %
+ self.stage[1].archive_file)
with working_dir('spack-build', create=True):
configure = Executable('../configure')
@@ -81,4 +105,48 @@ class Atlas(Package):
make('check')
make('ptcheck')
make('time')
+ if '+shared' in spec:
+ with working_dir('lib'):
+ make('shared_all')
+
make("install")
+ self.install_test()
+
+ @property
+ def blas_libs(self):
+ # libsatlas.[so,dylib,dll ] contains all serial APIs (serial lapack,
+ # serial BLAS), and all ATLAS symbols needed to support them. Whereas
+ # libtatlas.[so,dylib,dll ] is parallel (multithreaded) version.
+ is_threaded = '+pthread' in self.spec
+ if '+shared' in self.spec:
+ to_find = ['libtatlas'] if is_threaded else ['libsatlas']
+ shared = True
+ else:
+ interfaces = [
+ 'libptcblas',
+ 'libptf77blas'
+ ] if is_threaded else [
+ 'libcblas',
+ 'libf77blas'
+ ]
+ to_find = ['liblapack'] + interfaces + ['libatlas']
+ shared = False
+ return find_libraries(
+ to_find, root=self.prefix, shared=shared, recurse=True
+ )
+
+ @property
+ def lapack_libs(self):
+ return self.blas_libs
+
+ def install_test(self):
+ source_file = join_path(os.path.dirname(self.module.__file__),
+ 'test_cblas_dgemm.c')
+ blessed_file = join_path(os.path.dirname(self.module.__file__),
+ 'test_cblas_dgemm.output')
+
+ include_flags = ["-I%s" % self.spec.prefix.include]
+ link_flags = self.lapack_libs.ld_flags.split()
+
+ output = compile_c_and_execute(source_file, include_flags, link_flags)
+ compare_output_file(output, blessed_file)
diff --git a/var/spack/repos/builtin/packages/atlas/test_cblas_dgemm.c b/var/spack/repos/builtin/packages/atlas/test_cblas_dgemm.c
new file mode 100644
index 0000000000..2cb90fb883
--- /dev/null
+++ b/var/spack/repos/builtin/packages/atlas/test_cblas_dgemm.c
@@ -0,0 +1,49 @@
+#include <cblas.h>
+#include <stdio.h>
+
+double m[] = {
+ 3, 1, 3,
+ 1, 5, 9,
+ 2, 6, 5
+};
+
+double x[] = {
+ -1, 3, -3
+};
+
+#ifdef __cplusplus
+extern "C" {
+#endif
+
+ void dgesv_(int *n, int *nrhs, double *a, int *lda,
+ int *ipivot, double *b, int *ldb, int *info);
+
+#ifdef __cplusplus
+}
+#endif
+
+int main(void) {
+ int i;
+ // blas:
+ double A[6] = {1.0, 2.0, 1.0, -3.0, 4.0, -1.0};
+ double B[6] = {1.0, 2.0, 1.0, -3.0, 4.0, -1.0};
+ double C[9] = {.5, .5, .5, .5, .5, .5, .5, .5, .5};
+ cblas_dgemm(CblasColMajor, CblasNoTrans, CblasTrans,
+ 3, 3, 2, 1, A, 3, B, 3, 2, C, 3);
+ for (i = 0; i < 9; i++)
+ printf("%f\n", C[i]);
+
+ // lapack:
+ int ipiv[3];
+ int j;
+ int info;
+ int n = 1;
+ int nrhs = 1;
+ int lda = 3;
+ int ldb = 3;
+ dgesv_(&n,&nrhs, &m[0], &lda, ipiv, &x[0], &ldb, &info);
+ for (i=0; i<3; ++i)
+ printf("%5.1f\n", x[i]);
+
+ return 0;
+}
diff --git a/var/spack/repos/builtin/packages/atlas/test_cblas_dgemm.output b/var/spack/repos/builtin/packages/atlas/test_cblas_dgemm.output
new file mode 100644
index 0000000000..01404462c4
--- /dev/null
+++ b/var/spack/repos/builtin/packages/atlas/test_cblas_dgemm.output
@@ -0,0 +1,12 @@
+11.000000
+-9.000000
+5.000000
+-9.000000
+21.000000
+-1.000000
+5.000000
+-1.000000
+3.000000
+ -0.3
+ 3.0
+ -3.0
diff --git a/var/spack/repos/builtin/packages/atompaw/package.py b/var/spack/repos/builtin/packages/atompaw/package.py
new file mode 100644
index 0000000000..17d0ef8209
--- /dev/null
+++ b/var/spack/repos/builtin/packages/atompaw/package.py
@@ -0,0 +1,63 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+
+from spack import *
+
+
+class Atompaw(Package):
+ """A Projector Augmented Wave (PAW) code for generating
+ atom-centered functions.
+
+ Official website: http://pwpaw.wfu.edu
+
+ User's guide: ~/doc/atompaw-usersguide.pdf
+ """
+ homepage = "http://users.wfu.edu/natalie/papers/pwpaw/man.html"
+ url = "http://users.wfu.edu/natalie/papers/pwpaw/atompaw-4.0.0.13.tar.gz"
+
+ version('4.0.0.13', 'af4a042380356f6780183c4b325aad1d')
+ version('3.1.0.3', 'c996a277e11707887177f47bbb229aa6')
+
+ depends_on("lapack")
+ depends_on("blas")
+
+ # pin libxc version
+ depends_on("libxc@2.2.1")
+
+ def install(self, spec, prefix):
+ options = ['--prefix=%s' % prefix]
+
+ linalg = spec['lapack'].lapack_libs + spec['blas'].blas_libs
+ options.extend([
+ "--with-linalg-libs=%s" % linalg.ld_flags,
+ "--enable-libxc",
+ "--with-libxc-incs=-I%s" % spec["libxc"].prefix.include,
+ "--with-libxc-libs=-L%s -lxcf90 -lxc" % spec["libxc"].prefix.lib,
+ ])
+
+ configure(*options)
+ make(parallel=False) # parallel build fails
+ make("check")
+ make("install")
diff --git a/var/spack/repos/builtin/packages/atop/package.py b/var/spack/repos/builtin/packages/atop/package.py
index 9cacafc634..e3a9d464a9 100644
--- a/var/spack/repos/builtin/packages/atop/package.py
+++ b/var/spack/repos/builtin/packages/atop/package.py
@@ -24,6 +24,7 @@
##############################################################################
from spack import *
+
class Atop(Package):
"""Atop is an ASCII full-screen performance monitor for Linux"""
homepage = "http://www.atoptool.nl/index.php"
@@ -37,4 +38,4 @@ class Atop(Package):
install("atop", join_path(prefix.bin, "atop"))
mkdirp(join_path(prefix.man, "man1"))
install(join_path("man", "atop.1"),
- join_path(prefix.man, "man1", "atop.1"))
+ join_path(prefix.man, "man1", "atop.1"))
diff --git a/var/spack/repos/builtin/packages/autoconf/package.py b/var/spack/repos/builtin/packages/autoconf/package.py
index d920855a2f..d812350ae8 100644
--- a/var/spack/repos/builtin/packages/autoconf/package.py
+++ b/var/spack/repos/builtin/packages/autoconf/package.py
@@ -24,18 +24,32 @@
##############################################################################
from spack import *
-class Autoconf(Package):
+
+class Autoconf(AutotoolsPackage):
"""Autoconf -- system configuration part of autotools"""
- homepage = "https://www.gnu.org/software/autoconf/"
- url = "http://ftp.gnu.org/gnu/autoconf/autoconf-2.69.tar.gz"
+
+ homepage = 'https://www.gnu.org/software/autoconf/'
+ url = 'http://ftp.gnu.org/gnu/autoconf/autoconf-2.69.tar.gz'
version('2.69', '82d05e03b93e45f5a39b828dc9c6c29b')
version('2.62', '6c1f3b3734999035d77da5024aab4fbd')
+ version('2.59', 'd4d45eaa1769d45e59dcb131a4af17a0')
+ version('2.13', '9de56d4a161a723228220b0f425dc711')
- depends_on("m4")
+ depends_on('m4@1.4.6:', type='build')
- def install(self, spec, prefix):
- configure("--prefix=%s" % prefix)
+ def _make_executable(self, name):
+ return Executable(join_path(self.prefix.bin, name))
- make()
- make("install")
+ def setup_dependent_package(self, module, dependent_spec):
+ # Autoconf is very likely to be a build dependency,
+ # so we add the tools it provides to the dependent module
+ executables = ['autoconf',
+ 'autoheader',
+ 'autom4te',
+ 'autoreconf',
+ 'autoscan',
+ 'autoupdate',
+ 'ifnames']
+ for name in executables:
+ setattr(module, name, self._make_executable(name))
diff --git a/var/spack/repos/builtin/packages/automaded/package.py b/var/spack/repos/builtin/packages/automaded/package.py
index fc65a04a06..7e586b2991 100644
--- a/var/spack/repos/builtin/packages/automaded/package.py
+++ b/var/spack/repos/builtin/packages/automaded/package.py
@@ -24,6 +24,7 @@
##############################################################################
from spack import *
+
class Automaded(Package):
"""AutomaDeD (Automata-based Debugging for Dissimilar parallel
tasks) is a tool for automatic diagnosis of performance and
@@ -44,6 +45,7 @@ class Automaded(Package):
depends_on('mpi')
depends_on('boost')
depends_on('callpath')
+ depends_on('cmake', type='build')
def install(self, spec, prefix):
cmake("-DSTATE_TRACKER_WITH_CALLPATH=ON", *std_cmake_args)
diff --git a/var/spack/repos/builtin/packages/automake/package.py b/var/spack/repos/builtin/packages/automake/package.py
index 331b364496..6c0a47ff95 100644
--- a/var/spack/repos/builtin/packages/automake/package.py
+++ b/var/spack/repos/builtin/packages/automake/package.py
@@ -24,19 +24,25 @@
##############################################################################
from spack import *
-class Automake(Package):
+
+class Automake(AutotoolsPackage):
"""Automake -- make file builder part of autotools"""
- homepage = "http://www.gnu.org/software/automake/"
- url = "http://ftp.gnu.org/gnu/automake/automake-1.14.tar.gz"
+
+ homepage = 'http://www.gnu.org/software/automake/'
+ url = 'http://ftp.gnu.org/gnu/automake/automake-1.14.tar.gz'
version('1.15', '716946a105ca228ab545fc37a70df3a3')
version('1.14.1', 'd052a3e884631b9c7892f2efce542d75')
version('1.11.6', '0286dc30295b62985ca51919202ecfcc')
- depends_on('autoconf')
+ depends_on('autoconf', type='build')
- def install(self, spec, prefix):
- configure("--prefix=%s" % prefix)
+ def _make_executable(self, name):
+ return Executable(join_path(self.prefix.bin, name))
- make()
- make("install")
+ def setup_dependent_package(self, module, dependent_spec):
+ # Automake is very likely to be a build dependency,
+ # so we add the tools it provides to the dependent module
+ executables = ['aclocal', 'automake']
+ for name in executables:
+ setattr(module, name, self._make_executable(name))
diff --git a/var/spack/repos/builtin/packages/bamtools/package.py b/var/spack/repos/builtin/packages/bamtools/package.py
new file mode 100644
index 0000000000..7bb1985003
--- /dev/null
+++ b/var/spack/repos/builtin/packages/bamtools/package.py
@@ -0,0 +1,45 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class Bamtools(Package):
+ """C++ API & command-line toolkit for working with BAM data."""
+
+ homepage = "https://github.com/pezmaster31/bamtools"
+ url = "https://github.com/pezmaster31/bamtools/archive/v2.4.0.tar.gz"
+
+ version('2.4.0', '6139d00c1b1fe88fe15d094d8a74d8b9')
+ version('2.3.0', 'd327df4ba037d6eb8beef65d7da75ebc')
+ version('2.2.3', '6eccd3e45e4ba12a68daa3298998e76d')
+
+ depends_on('cmake', type='build')
+
+ def install(self, spec, prefix):
+ with working_dir('spack-build', create=True):
+ cmake('..', *std_cmake_args)
+
+ make()
+ make('install')
diff --git a/var/spack/repos/builtin/packages/bash-completion/package.py b/var/spack/repos/builtin/packages/bash-completion/package.py
new file mode 100644
index 0000000000..666a1bef13
--- /dev/null
+++ b/var/spack/repos/builtin/packages/bash-completion/package.py
@@ -0,0 +1,64 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class BashCompletion(Package):
+ """Programmable completion functions for bash."""
+ homepage = "https://github.com/scop/bash-completion"
+ url = "https://github.com/scop/bash-completion/archive/2.3.tar.gz"
+
+ version('2.3', '67e50f5f3c804350b43f2b664c33dde811d24292')
+ version('develop', git='https://github.com/scop/bash-completion.git')
+
+ # Build dependencies
+ depends_on('automake', type='build')
+ depends_on('autoconf', type='build')
+ depends_on('libtool', type='build')
+
+ # Other dependencies
+ depends_on('bash@4.1:', type='run')
+
+ def install(self, spec, prefix):
+ make_args = ['--prefix=%s' % prefix]
+
+ autoreconf('-i')
+ configure(*make_args)
+ make()
+ # make("check") # optional, requires dejagnu and tcllib
+ make("install",
+ parallel=False)
+
+ # Guidelines for individual user as provided by the author at
+ # https://github.com/scop/bash-completion
+ print('=====================================================')
+ print('Bash completion has been installed. To use it, please')
+ print('include the following lines in your ~/.bash_profile :')
+ print('')
+ print('# Use bash-completion, if available')
+ print('[[ $PS1 && -f %s/share/bash-completion/bash_completion ]] && \ ' % prefix) # NOQA: ignore=E501
+ print(' . %s/share/bash-completion/bash_completion' % prefix)
+ print('')
+ print('=====================================================')
diff --git a/var/spack/repos/builtin/packages/bash/package.py b/var/spack/repos/builtin/packages/bash/package.py
index 5820595be9..e0cd114635 100644
--- a/var/spack/repos/builtin/packages/bash/package.py
+++ b/var/spack/repos/builtin/packages/bash/package.py
@@ -24,6 +24,7 @@
##############################################################################
from spack import *
+
class Bash(Package):
"""The GNU Project's Bourne Again SHell."""
diff --git a/var/spack/repos/builtin/packages/bazel/cc_configure.patch b/var/spack/repos/builtin/packages/bazel/cc_configure.patch
new file mode 100644
index 0000000000..3e108c3b5e
--- /dev/null
+++ b/var/spack/repos/builtin/packages/bazel/cc_configure.patch
@@ -0,0 +1,28 @@
+--- bazel-0.3.1/tools/cpp/cc_configure.bzl 2016-10-13 14:00:32.118358387 +0200
++++ bazel-0.3.1/tools/cpp/cc_configure.bzl 2016-10-13 13:52:45.342610147 +0200
+@@ -173,8 +173,23 @@
+ else:
+ inc_dirs = result.stderr[index1 + 1:index2].strip()
+
+- return [repository_ctx.path(_cxx_inc_convert(p))
+- for p in inc_dirs.split("\n")]
++ default_inc_directories = [
++ repository_ctx.path(_cxx_inc_convert(p))
++ for p in inc_dirs.split("\n")
++ ]
++
++ env = repository_ctx.os.environ
++ if "SPACK_DEPENDENCIES" in env:
++ for dep in env["SPACK_DEPENDENCIES"].split(":"):
++ path = dep + "/include"
++ # path = repository_ctx.os.path.join(dep, "include")
++ # if not repository_ctx.os.path.exists(path):
++ # continue
++ default_inc_directories.append(
++ repository_ctx.path(_cxx_inc_convert(path))
++ )
++
++ return default_inc_directories
+
+ def _add_option_if_supported(repository_ctx, cc, option):
+ """Checks that `option` is supported by the C compiler."""
diff --git a/var/spack/repos/builtin/packages/bazel/fix_env_handling.patch b/var/spack/repos/builtin/packages/bazel/fix_env_handling.patch
new file mode 100644
index 0000000000..9be9f97d69
--- /dev/null
+++ b/var/spack/repos/builtin/packages/bazel/fix_env_handling.patch
@@ -0,0 +1,119 @@
+diff -pu bazel-0.3.1/src/main/java/com/google/devtools/build/lib/bazel/rules/BazelConfiguration.java bazel-0.3.1/src/main/java/com/google/devtools/build/lib/bazel/rules/BazelConfiguration.java
+--- bazel-0.3.1/src/main/java/com/google/devtools/build/lib/bazel/rules/BazelConfiguration.java 2016-09-14 11:56:01.565756979 +0200
++++ bazel-0.3.1/src/main/java/com/google/devtools/build/lib/bazel/rules/BazelConfiguration.java 2016-09-14 12:04:13.292839801 +0200
+@@ -92,5 +92,115 @@ public class BazelConfiguration extends
+ if (tmpdir != null) {
+ builder.put("TMPDIR", tmpdir);
+ }
++
++ String spack_prefix = System.getenv("SPACK_PREFIX");
++ if (spack_prefix != null) {
++ builder.put("SPACK_PREFIX", spack_prefix);
++ }
++
++ String spack_env_path = System.getenv("SPACK_ENV_PATH");
++ if (spack_env_path != null) {
++ builder.put("SPACK_ENV_PATH", spack_env_path);
++ }
++
++ String spack_debug_log_dir = System.getenv("SPACK_DEBUG_LOG_DIR");
++ if (spack_debug_log_dir != null) {
++ builder.put("SPACK_DEBUG_LOG_DIR", spack_debug_log_dir);
++ }
++
++ String spack_compiler_spec = System.getenv("SPACK_COMPILER_SPEC");
++ if (spack_compiler_spec != null) {
++ builder.put("SPACK_COMPILER_SPEC", spack_compiler_spec);
++ }
++
++ String spack_cc_rpath_arg = System.getenv("SPACK_CC_RPATH_ARG");
++ if (spack_cc_rpath_arg != null) {
++ builder.put("SPACK_CC_RPATH_ARG", spack_cc_rpath_arg);
++ }
++
++ String spack_cxx_rpath_arg = System.getenv("SPACK_CXX_RPATH_ARG");
++ if (spack_cxx_rpath_arg != null) {
++ builder.put("SPACK_CXX_RPATH_ARG", spack_cxx_rpath_arg);
++ }
++
++ String spack_f77_rpath_arg = System.getenv("SPACK_F77_RPATH_ARG");
++ if (spack_f77_rpath_arg != null) {
++ builder.put("SPACK_F77_RPATH_ARG", spack_f77_rpath_arg);
++ }
++
++ String spack_fc_rpath_arg = System.getenv("SPACK_FC_RPATH_ARG");
++ if (spack_fc_rpath_arg != null) {
++ builder.put("SPACK_FC_RPATH_ARG", spack_fc_rpath_arg);
++ }
++
++ String spack_short_spec = System.getenv("SPACK_SHORT_SPEC");
++ if (spack_short_spec != null) {
++ builder.put("SPACK_SHORT_SPEC", spack_short_spec);
++ }
++
++ String spack_cc = System.getenv("SPACK_CC");
++ if (spack_cc != null) {
++ builder.put("SPACK_CC", spack_cc);
++ }
++
++ String spack_cxx = System.getenv("SPACK_CXX");
++ if (spack_cxx != null) {
++ builder.put("SPACK_CXX", spack_cxx);
++ }
++
++ String spack_f77 = System.getenv("SPACK_F77");
++ if (spack_f77 != null) {
++ builder.put("SPACK_F77", spack_f77);
++ }
++
++ String spack_fc = System.getenv("SPACK_FC");
++ if (spack_fc != null) {
++ builder.put("SPACK_FC", spack_fc);
++ }
++
++ String spack_cflags = System.getenv("SPACK_CFLAGS");
++ if (spack_cflags != null) {
++ builder.put("SPACK_CFLAGS", spack_cflags);
++ }
++
++ String spack_cxxflags = System.getenv("SPACK_CXXFLAGS");
++ if (spack_cxxflags != null) {
++ builder.put("SPACK_CXXFLAGS", spack_cxxflags);
++ }
++
++ String spack_fcflags = System.getenv("SPACK_FCFLAGS");
++ if (spack_fcflags != null) {
++ builder.put("SPACK_FCFLAGS", spack_fcflags);
++ }
++
++ String spack_fflags = System.getenv("SPACK_FFLAGS");
++ if (spack_fflags != null) {
++ builder.put("SPACK_FFLAGS", spack_fflags);
++ }
++
++ String spack_ldflags = System.getenv("SPACK_LDFLAGS");
++ if (spack_ldflags != null) {
++ builder.put("SPACK_LDFLAGS", spack_ldflags);
++ }
++
++ String spack_ldlibs = System.getenv("SPACK_LDLIBS");
++ if (spack_ldlibs != null) {
++ builder.put("SPACK_LDLIBS", spack_ldlibs);
++ }
++
++ String spack_debug = System.getenv("SPACK_DEBUG");
++ if (spack_debug != null) {
++ builder.put("SPACK_DEBUG", spack_debug);
++ }
++
++ String spack_test_command = System.getenv("SPACK_TEST_COMMAND");
++ if (spack_test_command != null) {
++ builder.put("SPACK_TEST_COMMAND", spack_test_command);
++ }
++
++ String spack_dependencies = System.getenv("SPACK_DEPENDENCIES");
++ if (spack_dependencies != null) {
++ builder.put("SPACK_DEPENDENCIES", spack_dependencies);
++ }
+ }
+ }
diff --git a/var/spack/repos/builtin/packages/bazel/link.patch b/var/spack/repos/builtin/packages/bazel/link.patch
new file mode 100644
index 0000000000..69d100f62c
--- /dev/null
+++ b/var/spack/repos/builtin/packages/bazel/link.patch
@@ -0,0 +1,133 @@
+--- bazel-0.3.1/src/main/java/com/google/devtools/build/lib/rules/cpp/CppLinkAction.java 2016-07-29 10:22:16.000000000 +0200
++++ bazel-0.3.1/src/main/java/com/google/devtools/build/lib/rules/cpp/CppLinkAction.java 2016-10-13 15:21:35.036617890 +0200
+@@ -214,6 +214,130 @@
+ .getParentDirectory()
+ .getPathString());
+ }
++
++ String path = System.getenv("PATH");
++ result.put("PATH", path == null ? "/bin:/usr/bin" : path);
++
++ String ldLibraryPath = System.getenv("LD_LIBRARY_PATH");
++ if (ldLibraryPath != null) {
++ result.put("LD_LIBRARY_PATH", ldLibraryPath);
++ }
++
++ String tmpdir = System.getenv("TMPDIR");
++ if (tmpdir != null) {
++ result.put("TMPDIR", tmpdir);
++ }
++
++ String spack_prefix = System.getenv("SPACK_PREFIX");
++ if (spack_prefix != null) {
++ result.put("SPACK_PREFIX", spack_prefix);
++ }
++
++ String spack_env_path = System.getenv("SPACK_ENV_PATH");
++ if (spack_env_path != null) {
++ result.put("SPACK_ENV_PATH", spack_env_path);
++ }
++
++ String spack_debug_log_dir = System.getenv("SPACK_DEBUG_LOG_DIR");
++ if (spack_debug_log_dir != null) {
++ result.put("SPACK_DEBUG_LOG_DIR", spack_debug_log_dir);
++ }
++
++ String spack_compiler_spec = System.getenv("SPACK_COMPILER_SPEC");
++ if (spack_compiler_spec != null) {
++ result.put("SPACK_COMPILER_SPEC", spack_compiler_spec);
++ }
++
++ String spack_cc_rpath_arg = System.getenv("SPACK_CC_RPATH_ARG");
++ if (spack_cc_rpath_arg != null) {
++ result.put("SPACK_CC_RPATH_ARG", spack_cc_rpath_arg);
++ }
++
++ String spack_cxx_rpath_arg = System.getenv("SPACK_CXX_RPATH_ARG");
++ if (spack_cxx_rpath_arg != null) {
++ result.put("SPACK_CXX_RPATH_ARG", spack_cxx_rpath_arg);
++ }
++
++ String spack_f77_rpath_arg = System.getenv("SPACK_F77_RPATH_ARG");
++ if (spack_f77_rpath_arg != null) {
++ result.put("SPACK_F77_RPATH_ARG", spack_f77_rpath_arg);
++ }
++
++ String spack_fc_rpath_arg = System.getenv("SPACK_FC_RPATH_ARG");
++ if (spack_fc_rpath_arg != null) {
++ result.put("SPACK_FC_RPATH_ARG", spack_fc_rpath_arg);
++ }
++
++ String spack_short_spec = System.getenv("SPACK_SHORT_SPEC");
++ if (spack_short_spec != null) {
++ result.put("SPACK_SHORT_SPEC", spack_short_spec);
++ }
++
++ String spack_cc = System.getenv("SPACK_CC");
++ if (spack_cc != null) {
++ result.put("SPACK_CC", spack_cc);
++ }
++
++ String spack_cxx = System.getenv("SPACK_CXX");
++ if (spack_cxx != null) {
++ result.put("SPACK_CXX", spack_cxx);
++ }
++
++ String spack_f77 = System.getenv("SPACK_F77");
++ if (spack_f77 != null) {
++ result.put("SPACK_F77", spack_f77);
++ }
++
++ String spack_fc = System.getenv("SPACK_FC");
++ if (spack_fc != null) {
++ result.put("SPACK_FC", spack_fc);
++ }
++
++ String spack_cflags = System.getenv("SPACK_CFLAGS");
++ if (spack_cflags != null) {
++ result.put("SPACK_CFLAGS", spack_cflags);
++ }
++
++ String spack_cxxflags = System.getenv("SPACK_CXXFLAGS");
++ if (spack_cxxflags != null) {
++ result.put("SPACK_CXXFLAGS", spack_cxxflags);
++ }
++
++ String spack_fcflags = System.getenv("SPACK_FCFLAGS");
++ if (spack_fcflags != null) {
++ result.put("SPACK_FCFLAGS", spack_fcflags);
++ }
++
++ String spack_fflags = System.getenv("SPACK_FFLAGS");
++ if (spack_fflags != null) {
++ result.put("SPACK_FFLAGS", spack_fflags);
++ }
++
++ String spack_ldflags = System.getenv("SPACK_LDFLAGS");
++ if (spack_ldflags != null) {
++ result.put("SPACK_LDFLAGS", spack_ldflags);
++ }
++
++ String spack_ldlibs = System.getenv("SPACK_LDLIBS");
++ if (spack_ldlibs != null) {
++ result.put("SPACK_LDLIBS", spack_ldlibs);
++ }
++
++ String spack_debug = System.getenv("SPACK_DEBUG");
++ if (spack_debug != null) {
++ result.put("SPACK_DEBUG", spack_debug);
++ }
++
++ String spack_test_command = System.getenv("SPACK_TEST_COMMAND");
++ if (spack_test_command != null) {
++ result.put("SPACK_TEST_COMMAND", spack_test_command);
++ }
++
++ String spack_dependencies = System.getenv("SPACK_DEPENDENCIES");
++ if (spack_dependencies != null) {
++ result.put("SPACK_DEPENDENCIES", spack_dependencies);
++ }
++
+ return result.build();
+ }
+
diff --git a/var/spack/repos/builtin/packages/bazel/package.py b/var/spack/repos/builtin/packages/bazel/package.py
new file mode 100644
index 0000000000..94ac73cbd2
--- /dev/null
+++ b/var/spack/repos/builtin/packages/bazel/package.py
@@ -0,0 +1,89 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+from multiprocessing import cpu_count
+from spack.util.environment import env_flag
+from spack.build_environment import SPACK_NO_PARALLEL_MAKE
+
+
+class Bazel(Package):
+ """Bazel is Google's own build tool"""
+
+ homepage = "https://www.bazel.io"
+ url = "https://github.com/bazelbuild/bazel/archive/0.3.1.tar.gz"
+
+ version('0.3.1', '5c959467484a7fc7dd2e5e4a1e8e866b')
+ version('0.3.0', '33a2cb457d28e1bee9282134769b9283')
+ version('0.2.3', '393a491d690e43caaba88005efe6da91')
+ version('0.2.2b', '75081804f073cbd194da1a07b16cba5f')
+ version('0.2.2', '644bc4ea7f429d835e74f255dc1054e6')
+
+ depends_on('jdk@8:')
+ patch('fix_env_handling.patch')
+ patch('link.patch')
+ patch('cc_configure.patch')
+
+ def install(self, spec, prefix):
+ bash = which('bash')
+ bash('-c', './compile.sh')
+ mkdir(prefix.bin)
+ install('output/bazel', prefix.bin)
+
+ def setup_dependent_package(self, module, dep_spec):
+ class BazelExecutable(Executable):
+ """Special callable executable object for bazel so the user can
+ specify parallel or not on a per-invocation basis. Using
+ 'parallel' as a kwarg will override whatever the package's
+ global setting is, so you can either default to true or false
+ and override particular calls.
+
+ Note that if the SPACK_NO_PARALLEL_MAKE env var is set it
+ overrides everything.
+ """
+
+ def __init__(self, name, command, jobs):
+ super(BazelExecutable, self).__init__(name)
+ self.bazel_command = command
+ self.jobs = jobs
+
+ def __call__(self, *args, **kwargs):
+ disable = env_flag(SPACK_NO_PARALLEL_MAKE)
+ parallel = ((not disable) and
+ kwargs.get('parallel', self.jobs > 1))
+
+ jobs = "--jobs=1"
+ if parallel:
+ jobs = "--jobs=%d" % self.jobs
+
+ args = (self.bazel_command,) + (jobs,) + args
+
+ return super(BazelExecutable, self).__call__(*args, **kwargs)
+
+ jobs = cpu_count()
+ if not dep_spec.package.parallel:
+ jobs = 1
+ elif dep_spec.package.make_jobs:
+ jobs = dep_spec.package.make_jobs
+ module.bazel = BazelExecutable('bazel', 'build', jobs)
diff --git a/var/spack/repos/builtin/packages/bbcp/package.py b/var/spack/repos/builtin/packages/bbcp/package.py
index 09e897f34e..5d5e64a390 100644
--- a/var/spack/repos/builtin/packages/bbcp/package.py
+++ b/var/spack/repos/builtin/packages/bbcp/package.py
@@ -24,18 +24,25 @@
##############################################################################
from spack import *
+
class Bbcp(Package):
"""Securely and quickly copy data from source to target"""
homepage = "http://www.slac.stanford.edu/~abh/bbcp/"
- version('git', git='http://www.slac.stanford.edu/~abh/bbcp/bbcp.git', branch="master")
+ version('git', git='http://www.slac.stanford.edu/~abh/bbcp/bbcp.git',
+ branch="master")
+
+ depends_on('zlib')
+ depends_on('openssl')
def install(self, spec, prefix):
cd("src")
make()
- # BBCP wants to build the executable in a directory whose name depends on the system type
+ # BBCP wants to build the executable in a directory whose name depends
+ # on the system type
makesname = Executable("../MakeSname")
- bbcp_executable_path = "../bin/%s/bbcp" % makesname(output=str).rstrip("\n")
+ bbcp_executable_path = "../bin/%s/bbcp" % makesname(
+ output=str).rstrip("\n")
destination_path = "%s/bin/" % prefix
mkdirp(destination_path)
install(bbcp_executable_path, destination_path)
diff --git a/var/spack/repos/builtin/packages/bcftools/package.py b/var/spack/repos/builtin/packages/bcftools/package.py
new file mode 100644
index 0000000000..a1b4a06dbb
--- /dev/null
+++ b/var/spack/repos/builtin/packages/bcftools/package.py
@@ -0,0 +1,43 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class Bcftools(Package):
+ """BCFtools is a set of utilities that manipulate variant calls in the
+ Variant Call Format (VCF) and its binary counterpart BCF. All
+ commands work transparently with both VCFs and BCFs, both
+ uncompressed and BGZF-compressed."""
+
+ homepage = "http://samtools.github.io/bcftools/"
+ url = "https://github.com/samtools/bcftools/releases/download/1.3.1/bcftools-1.3.1.tar.bz2"
+
+ version('1.3.1', '575001e9fca37cab0c7a7287ad4b1cdb')
+
+ depends_on('zlib')
+
+ def install(self, spec, prefix):
+ make("prefix=%s" % prefix, "all")
+ make("prefix=%s" % prefix, "install")
diff --git a/var/spack/repos/builtin/packages/bdftopcf/package.py b/var/spack/repos/builtin/packages/bdftopcf/package.py
new file mode 100644
index 0000000000..095f0c1bd4
--- /dev/null
+++ b/var/spack/repos/builtin/packages/bdftopcf/package.py
@@ -0,0 +1,50 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class Bdftopcf(Package):
+ """bdftopcf is a font compiler for the X server and font server. Fonts
+ in Portable Compiled Format can be read by any architecture, although
+ the file is structured to allow one particular architecture to read
+ them directly without reformatting. This allows fast reading on the
+ appropriate machine, but the files are still portable (but read more
+ slowly) on other machines."""
+
+ homepage = "http://cgit.freedesktop.org/xorg/app/bdftopcf"
+ url = "https://www.x.org/archive/individual/app/bdftopcf-1.0.5.tar.gz"
+
+ version('1.0.5', '456416d33e0d41a96b5a3725d99e1be3')
+
+ depends_on('libxfont')
+
+ depends_on('pkg-config@0.9.0:', type='build')
+ depends_on('util-macros', type='build')
+
+ def install(self, spec, prefix):
+ configure('--prefix={0}'.format(prefix))
+
+ make()
+ make('install')
diff --git a/var/spack/repos/builtin/packages/bdw-gc/package.py b/var/spack/repos/builtin/packages/bdw-gc/package.py
new file mode 100644
index 0000000000..5120266319
--- /dev/null
+++ b/var/spack/repos/builtin/packages/bdw-gc/package.py
@@ -0,0 +1,53 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class BdwGc(Package):
+ """The Boehm-Demers-Weiser conservative garbage collector is a garbage
+ collecting replacement for C malloc or C++ new."""
+
+ homepage = "http://www.hboehm.info/gc/"
+ url = "http://www.hboehm.info/gc/gc_source/gc-7.4.4.tar.gz"
+
+ version('7.4.4', '96d18b0448a841c88d56e4ab3d180297')
+
+ variant('libatomic-ops', default=True,
+ description='Use external libatomic-ops')
+
+ depends_on('libatomic-ops', when='+libatomic-ops')
+
+ def install(self, spec, prefix):
+ config_args = [
+ '--prefix={0}'.format(prefix),
+ '--with-libatomic-ops={0}'.format(
+ 'yes' if '+libatomic-ops' in spec else 'no')
+ ]
+
+ configure(*config_args)
+
+ make()
+ make('check')
+ make('install')
diff --git a/var/spack/repos/builtin/packages/bear/package.py b/var/spack/repos/builtin/packages/bear/package.py
index 730a684ec5..f52050d7b9 100644
--- a/var/spack/repos/builtin/packages/bear/package.py
+++ b/var/spack/repos/builtin/packages/bear/package.py
@@ -24,14 +24,16 @@
##############################################################################
from spack import *
+
class Bear(Package):
- """Bear is a tool that generates a compilation database for clang tooling from non-cmake build systems."""
+ """Bear is a tool that generates a compilation database for clang tooling
+ from non-cmake build systems."""
homepage = "https://github.com/rizsotto/Bear"
url = "https://github.com/rizsotto/Bear/archive/2.0.4.tar.gz"
version('2.0.4', 'fd8afb5e8e18f8737ba06f90bd77d011')
- depends_on("cmake")
+ depends_on('cmake', type='build')
depends_on("python")
def install(self, spec, prefix):
diff --git a/var/spack/repos/builtin/packages/bedtools2/package.py b/var/spack/repos/builtin/packages/bedtools2/package.py
new file mode 100644
index 0000000000..46f3185154
--- /dev/null
+++ b/var/spack/repos/builtin/packages/bedtools2/package.py
@@ -0,0 +1,43 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class Bedtools2(Package):
+ """Collectively, the bedtools utilities are a swiss-army knife of
+ tools for a wide-range of genomics analysis tasks. The most
+ widely-used tools enable genome arithmetic: that is, set theory
+ on the genome."""
+
+ homepage = "https://github.com/arq5x/bedtools2"
+ url = "https://github.com/arq5x/bedtools2/archive/v2.26.0.tar.gz"
+
+ version('2.26.0', '52227e7efa6627f0f95d7d734973233d')
+ version('2.25.0', '534fb4a7bf0d0c3f05be52a0160d8e3d')
+
+ depends_on('zlib')
+
+ def install(self, spec, prefix):
+ make("prefix=%s" % prefix, "install")
diff --git a/var/spack/repos/builtin/packages/beforelight/package.py b/var/spack/repos/builtin/packages/beforelight/package.py
new file mode 100644
index 0000000000..37a91f5614
--- /dev/null
+++ b/var/spack/repos/builtin/packages/beforelight/package.py
@@ -0,0 +1,50 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class Beforelight(Package):
+ """The beforelight program is a sample implementation of a screen saver
+ for X servers supporting the MIT-SCREEN-SAVER extension. It is only
+ recommended for use as a code sample, as it does not include features
+ such as screen locking or configurability."""
+
+ homepage = "http://cgit.freedesktop.org/xorg/app/beforelight"
+ url = "https://www.x.org/archive/individual/app/beforelight-1.0.5.tar.gz"
+
+ version('1.0.5', 'f0433eb6df647f36bbb5b38fb2beb22a')
+
+ depends_on('libx11')
+ depends_on('libxscrnsaver')
+ depends_on('libxt')
+
+ depends_on('pkg-config@0.9.0:', type='build')
+ depends_on('util-macros', type='build')
+
+ def install(self, spec, prefix):
+ configure('--prefix={0}'.format(prefix))
+
+ make()
+ make('install')
diff --git a/var/spack/repos/builtin/packages/bertini/package.py b/var/spack/repos/builtin/packages/bertini/package.py
new file mode 100644
index 0000000000..7dd17a062e
--- /dev/null
+++ b/var/spack/repos/builtin/packages/bertini/package.py
@@ -0,0 +1,50 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class Bertini(Package):
+ """Bertini is a general-purpose solver, written in C, that was created
+ for research about polynomial continuation. It solves for the numerical
+ solution of systems of polynomial equations using homotopy continuation."""
+
+ homepage = "https://bertini.nd.edu/"
+ url = "https://bertini.nd.edu/BertiniSource_v1.5.tar.gz"
+
+ version('1.5', 'e3f6cc6e7f9a0cf1d73185e8671af707')
+
+ variant('mpi', default=True, description='Compile in parallel')
+
+ depends_on('flex', type='build')
+ depends_on('bison', type='build')
+ depends_on('gmp')
+ depends_on('mpfr')
+ depends_on('mpi', when='+mpi')
+
+ def install(self, spec, prefix):
+ configure('--prefix=%s' % prefix)
+
+ make()
+ make("install")
diff --git a/var/spack/repos/builtin/packages/bib2xhtml/package.py b/var/spack/repos/builtin/packages/bib2xhtml/package.py
index a9cbd204b6..b356038180 100644
--- a/var/spack/repos/builtin/packages/bib2xhtml/package.py
+++ b/var/spack/repos/builtin/packages/bib2xhtml/package.py
@@ -25,10 +25,11 @@
from spack import *
from glob import glob
+
class Bib2xhtml(Package):
"""bib2xhtml is a program that converts BibTeX files into HTML."""
homepage = "http://www.spinellis.gr/sw/textproc/bib2xhtml/"
- url='http://www.spinellis.gr/sw/textproc/bib2xhtml/bib2xhtml-v3.0-15-gf506.tar.gz'
+ url = 'http://www.spinellis.gr/sw/textproc/bib2xhtml/bib2xhtml-v3.0-15-gf506.tar.gz'
version('3.0-15-gf506', 'a26ba02fe0053bbbf2277bdf0acf8645')
diff --git a/var/spack/repos/builtin/packages/bigreqsproto/package.py b/var/spack/repos/builtin/packages/bigreqsproto/package.py
new file mode 100644
index 0000000000..61fd9c5121
--- /dev/null
+++ b/var/spack/repos/builtin/packages/bigreqsproto/package.py
@@ -0,0 +1,45 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class Bigreqsproto(Package):
+ """Big Requests Extension.
+
+ This extension defines a protocol to enable the use of requests
+ that exceed 262140 bytes in length."""
+
+ homepage = "http://cgit.freedesktop.org/xorg/proto/bigreqsproto"
+ url = "https://www.x.org/archive/individual/proto/bigreqsproto-1.1.2.tar.gz"
+
+ version('1.1.2', '9b83369ac7a5eb2bf54c8f34db043a0e')
+
+ depends_on('pkg-config@0.9.0:', type='build')
+ depends_on('util-macros', type='build')
+
+ def install(self, spec, prefix):
+ configure('--prefix={0}'.format(prefix))
+
+ make('install')
diff --git a/var/spack/repos/builtin/packages/binutils/package.py b/var/spack/repos/builtin/packages/binutils/package.py
index 9e4cc98ae6..bf49ca9405 100644
--- a/var/spack/repos/builtin/packages/binutils/package.py
+++ b/var/spack/repos/builtin/packages/binutils/package.py
@@ -24,28 +24,33 @@
##############################################################################
from spack import *
+
class Binutils(Package):
"""GNU binutils, which contain the linker, assembler, objdump and others"""
- homepage = "http://www.gnu.org/software/binutils/"
- url="https://ftp.gnu.org/gnu/binutils/binutils-2.25.tar.bz2"
+ homepage = "http://www.gnu.org/software/binutils/"
+ url = "https://ftp.gnu.org/gnu/binutils/binutils-2.25.tar.bz2"
+ version('2.27', '2869c9bf3e60ee97c74ac2a6bf4e9d68')
version('2.26', '64146a0faa3b411ba774f47d41de239f')
version('2.25', 'd9f3303f802a5b6b0bb73a335ab89d66')
version('2.24', 'e0f71a7b2ddab0f8612336ac81d9636b')
version('2.23.2', '4f8fa651e35ef262edc01d60fb45702e')
version('2.20.1', '2b9dc8f2b7dbd5ec5992c6e29de0b764')
- depends_on('m4')
- depends_on('flex')
- depends_on('bison')
+ depends_on('m4', type='build')
+ depends_on('flex', type='build')
+ depends_on('bison', type='build')
- # Add a patch that creates binutils libiberty_pic.a which is preferred by OpenSpeedShop and cbtf-krell
- variant('krellpatch', default=False, description="build with openspeedshop based patch.")
+ # Add a patch that creates binutils libiberty_pic.a which is preferred by
+ # OpenSpeedShop and cbtf-krell
+ variant('krellpatch', default=False,
+ description="build with openspeedshop based patch.")
variant('gold', default=True, description="build the gold linker")
patch('binutilskrell-2.24.patch', when='@2.24+krellpatch')
patch('cr16.patch')
+ patch('update_symbol-2.26.patch', when='@2.26')
variant('libiberty', default=False, description='Also install libiberty.')
diff --git a/var/spack/repos/builtin/packages/binutils/update_symbol-2.26.patch b/var/spack/repos/builtin/packages/binutils/update_symbol-2.26.patch
new file mode 100644
index 0000000000..2601f63a6b
--- /dev/null
+++ b/var/spack/repos/builtin/packages/binutils/update_symbol-2.26.patch
@@ -0,0 +1,104 @@
+From 544ddf9322b1b83982e5cb84a54d084ee7e718ea Mon Sep 17 00:00:00 2001
+From: H.J. Lu <hjl.tools@gmail.com>
+Date: Wed, 24 Feb 2016 15:13:35 -0800
+Subject: [PATCH] Update symbol version for symbol from linker script
+
+We need to update symbol version for symbols from linker script.
+
+Backport from master
+
+bfd/
+
+ PR ld/19698
+ * elflink.c (bfd_elf_record_link_assignment): Set versioned if
+ symbol version is unknown.
+
+ld/
+
+ PR ld/19698
+ * testsuite/ld-elf/pr19698.d: New file.
+ * testsuite/ld-elf/pr19698.s: Likewise.
+ * testsuite/ld-elf/pr19698.t: Likewise.
+---
+ bfd/ChangeLog | 9 +++++++++
+ bfd/elflink.c | 13 +++++++++++++
+ ld/ChangeLog | 10 ++++++++++
+ ld/testsuite/ld-elf/pr19698.d | 10 ++++++++++
+ ld/testsuite/ld-elf/pr19698.s | 5 +++++
+ ld/testsuite/ld-elf/pr19698.t | 11 +++++++++++
+ 6 files changed, 58 insertions(+), 0 deletions(-)
+ create mode 100644 ld/testsuite/ld-elf/pr19698.d
+ create mode 100644 ld/testsuite/ld-elf/pr19698.s
+ create mode 100644 ld/testsuite/ld-elf/pr19698.t
+
+diff --git a/bfd/elflink.c b/bfd/elflink.c
+index ae8d148..8fcaadd 100644
+--- a/bfd/elflink.c
++++ b/bfd/elflink.c
+@@ -555,6 +555,19 @@ bfd_elf_record_link_assignment (bfd *output_bfd,
+ if (h == NULL)
+ return provide;
+
++ if (h->versioned == unknown)
++ {
++ /* Set versioned if symbol version is unknown. */
++ char *version = strrchr (name, ELF_VER_CHR);
++ if (version)
++ {
++ if (version > name && version[-1] != ELF_VER_CHR)
++ h->versioned = versioned_hidden;
++ else
++ h->versioned = versioned;
++ }
++ }
++
+ switch (h->root.type)
+ {
+ case bfd_link_hash_defined:
+diff --git a/ld/testsuite/ld-elf/pr19698.d b/ld/testsuite/ld-elf/pr19698.d
+new file mode 100644
+index 0000000..a39f67a
+--- /dev/null
++++ b/ld/testsuite/ld-elf/pr19698.d
+@@ -0,0 +1,10 @@
++#ld: -shared $srcdir/$subdir/pr19698.t
++#readelf : --dyn-syms --wide
++#target: *-*-linux* *-*-gnu* *-*-solaris*
++
++Symbol table '\.dynsym' contains [0-9]+ entries:
++#...
++ +[0-9]+: +[0-9a-f]+ +[0-9a-f]+ +FUNC +GLOBAL +DEFAULT +[0-9]+ +foo@VERS.1
++#...
++ +[0-9]+: +[0-9a-f]+ +[0-9a-f]+ +FUNC +GLOBAL +DEFAULT +[0-9]+ +foo@@VERS.2
++#pass
+diff --git a/ld/testsuite/ld-elf/pr19698.s b/ld/testsuite/ld-elf/pr19698.s
+new file mode 100644
+index 0000000..875dca4
+--- /dev/null
++++ b/ld/testsuite/ld-elf/pr19698.s
+@@ -0,0 +1,5 @@
++ .text
++ .globl foo
++ .type foo, %function
++foo:
++ .byte 0
+diff --git a/ld/testsuite/ld-elf/pr19698.t b/ld/testsuite/ld-elf/pr19698.t
+new file mode 100644
+index 0000000..09d9125
+--- /dev/null
++++ b/ld/testsuite/ld-elf/pr19698.t
+@@ -0,0 +1,11 @@
++"foo@VERS.1" = foo;
++
++VERSION {
++VERS.2 {
++ global:
++ foo;
++};
++
++VERS.1 {
++};
++}
+--
+1.7.1
+
diff --git a/var/spack/repos/builtin/packages/bison/package.py b/var/spack/repos/builtin/packages/bison/package.py
index c5bc051c80..70795f05cc 100644
--- a/var/spack/repos/builtin/packages/bison/package.py
+++ b/var/spack/repos/builtin/packages/bison/package.py
@@ -24,9 +24,10 @@
##############################################################################
from spack import *
+
class Bison(Package):
- """Bison is a general-purpose parser generator that converts
- an annotated context-free grammar into a deterministic LR or
+ """Bison is a general-purpose parser generator that converts
+ an annotated context-free grammar into a deterministic LR or
generalized LR (GLR) parser employing LALR(1) parser tables."""
homepage = "http://www.gnu.org/software/bison/"
@@ -34,7 +35,7 @@ class Bison(Package):
version('3.0.4', 'a586e11cd4aff49c3ff6d3b6a4c9ccf8')
- depends_on("m4")
+ depends_on("m4", type='build')
def install(self, spec, prefix):
configure("--prefix=%s" % prefix)
diff --git a/var/spack/repos/builtin/packages/bitmap/package.py b/var/spack/repos/builtin/packages/bitmap/package.py
new file mode 100644
index 0000000000..55fdacefd5
--- /dev/null
+++ b/var/spack/repos/builtin/packages/bitmap/package.py
@@ -0,0 +1,51 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class Bitmap(Package):
+ """bitmap, bmtoa, atobm - X bitmap (XBM) editor and converter utilities."""
+
+ homepage = "http://cgit.freedesktop.org/xorg/app/bitmap"
+ url = "https://www.x.org/archive/individual/app/bitmap-1.0.8.tar.gz"
+
+ version('1.0.8', '0ca600041bb0836ae7c9f5db5ce09091')
+
+ depends_on('libx11')
+ depends_on('libxmu')
+ depends_on('libxaw')
+ depends_on('libxmu')
+ depends_on('libxt')
+
+ depends_on('xbitmaps', type='build')
+ depends_on('xproto@7.0.25:', type='build')
+ depends_on('pkg-config@0.9.0:', type='build')
+ depends_on('util-macros', type='build')
+
+ def install(self, spec, prefix):
+ configure('--prefix={0}'.format(prefix))
+
+ make()
+ make('install')
diff --git a/var/spack/repos/builtin/packages/bliss/Makefile.spack.patch b/var/spack/repos/builtin/packages/bliss/Makefile.spack.patch
new file mode 100644
index 0000000000..4f4441bbe9
--- /dev/null
+++ b/var/spack/repos/builtin/packages/bliss/Makefile.spack.patch
@@ -0,0 +1,62 @@
+--- old/Makefile.spack
++++ new/Makefile.spack
+@@ -0,0 +1,59 @@
++# Set PREFIX to the install location for both building and installing
++# Set GMP_PREFIX to the location where GMP is installed
++
++SRCS = \
++ bliss_C.cc \
++ defs.cc \
++ graph.cc \
++ heap.cc \
++ orbit.cc \
++ partition.cc \
++ timer.cc \
++ uintseqhash.cc \
++ utils.cc
++
++all: libbliss.la bliss libbliss_gmp.la bliss_gmp
++
++libbliss.la: $(SRCS:%.cc=%.lo)
++ libtool --mode=link --tag=CXX c++ -g -O3 \
++ -rpath $(PREFIX)/lib -o $@ $^
++libbliss_gmp.la: $(SRCS:%.cc=%.gmp.lo)
++ libtool --mode=link --tag=CXX c++ -g -O3 \
++ -rpath $(PREFIX)/lib -o $@ $^ -L$(GMP_PREFIX)/lib -lgmp
++
++bliss: bliss.lo libbliss.la
++ libtool --mode=link --tag=CXX c++ -g -O3 -o $@ $^
++
++bliss_gmp: bliss.gmp.lo libbliss_gmp.la
++ libtool --mode=link --tag=CXX c++ -g -O3 -o $@ $^
++
++%.lo: %.cc
++ libtool --mode=compile --tag=CXX c++ -g -O3 -o $@ -c $*.cc
++%.gmp.lo: %.cc
++ libtool --mode=compile --tag=CXX c++ -g -O3 -o $@ \
++ -c -DBLISS_USE_GMP $*.cc
++
++install:
++ mkdir -p $(PREFIX)/bin
++ mkdir -p $(PREFIX)/include/bliss
++ mkdir -p $(PREFIX)/lib
++ libtool --mode=install cp bliss $(PREFIX)/bin/bliss
++ libtool --mode=install cp bliss_gmp $(PREFIX)/bin/bliss_gmp
++ libtool --mode=install cp bignum.hh $(PREFIX)/include/bliss/bignum.hh
++ libtool --mode=install cp bliss_C.h $(PREFIX)/include/bliss/bliss_C.h
++ libtool --mode=install cp defs.hh $(PREFIX)/include/bliss/defs.hh
++ libtool --mode=install cp graph.hh $(PREFIX)/include/bliss/graph.hh
++ libtool --mode=install cp heap.hh $(PREFIX)/include/bliss/heap.hh
++ libtool --mode=install cp kqueue.hh $(PREFIX)/include/bliss/kqueue.hh
++ libtool --mode=install cp kstack.hh $(PREFIX)/include/bliss/kstack.hh
++ libtool --mode=install cp orbit.hh $(PREFIX)/include/bliss/orbit.hh
++ libtool --mode=install cp partition.hh \
++ $(PREFIX)/include/bliss/partition.hh
++ libtool --mode=install cp timer.hh $(PREFIX)/include/bliss/timer.hh
++ libtool --mode=install cp uintseqhash.hh \
++ $(PREFIX)/include/bliss/uintseqhash.hh
++ libtool --mode=install cp utils.hh $(PREFIX)/include/bliss/utils.hh
++ libtool --mode=install cp libbliss.la $(PREFIX)/lib/libbliss.la
++ libtool --mode=install cp libbliss_gmp.la $(PREFIX)/lib/libbliss_gmp.la
++
++.PHONY: all install
diff --git a/var/spack/repos/builtin/packages/bliss/package.py b/var/spack/repos/builtin/packages/bliss/package.py
new file mode 100644
index 0000000000..a81a806807
--- /dev/null
+++ b/var/spack/repos/builtin/packages/bliss/package.py
@@ -0,0 +1,50 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+
+from spack import *
+
+
+class Bliss(Package):
+ """bliss: A Tool for Computing Automorphism Groups and Canonical
+ Labelings of Graphs"""
+
+ homepage = "http://www.tcs.hut.fi/Software/bliss/"
+ url = "http://www.tcs.hut.fi/Software/bliss/bliss-0.73.zip"
+
+ version('0.73', '72f2e310786923b5c398ba0fc40b42ce')
+
+ # Note: Bliss can also be built without gmp, but we don't support this yet
+
+ depends_on("gmp")
+ depends_on("libtool", type='build')
+
+ patch("Makefile.spack.patch")
+
+ def install(self, spec, prefix):
+ # The Makefile isn't portable; use our own instead
+ makeargs = ["-f", "Makefile.spack",
+ "PREFIX=%s" % prefix, "GMP_PREFIX=%s" % spec["gmp"].prefix]
+ make(*makeargs)
+ make("install", *makeargs)
diff --git a/var/spack/repos/builtin/packages/blitz/package.py b/var/spack/repos/builtin/packages/blitz/package.py
index acc6ddcd07..d6fd31d637 100644
--- a/var/spack/repos/builtin/packages/blitz/package.py
+++ b/var/spack/repos/builtin/packages/blitz/package.py
@@ -24,16 +24,16 @@
##############################################################################
from spack import *
-class Blitz(Package):
+
+class Blitz(AutotoolsPackage):
"""N-dimensional arrays for C++"""
homepage = "http://github.com/blitzpp/blitz"
- url = "https://github.com/blitzpp/blitz/tarball/1.0.0"
+ url = "https://github.com/blitzpp/blitz/tarball/1.0.0"
version('1.0.0', '9f040b9827fe22228a892603671a77af')
- # No dependencies
+ build_targets = ['lib']
- def install(self, spec, prefix):
- configure('--prefix=%s' % prefix)
- make()
- make("install")
+ def check(self):
+ make('check-testsuite')
+ make('check-examples')
diff --git a/var/spack/repos/builtin/packages/boost/boost_10125.patch b/var/spack/repos/builtin/packages/boost/boost_10125.patch
new file mode 100644
index 0000000000..71cfaa871e
--- /dev/null
+++ b/var/spack/repos/builtin/packages/boost/boost_10125.patch
@@ -0,0 +1,51 @@
+--- a/boost/thread/pthread/once.hpp
++++ b/boost/thread/pthread/once.hpp
+@@ -42,7 +42,7 @@ namespace boost
+ }
+
+ #ifdef BOOST_THREAD_PROVIDES_ONCE_CXX11
+-#ifndef BOOST_NO_CXX11_VARIADIC_TEMPLATES
++#if !defined(BOOST_NO_CXX11_VARIADIC_TEMPLATES) && !defined(BOOST_NO_CXX11_RVALUE_REFERENCES)
+ template<typename Function, class ...ArgTypes>
+ inline void call_once(once_flag& flag, BOOST_THREAD_RV_REF(Function) f, BOOST_THREAD_RV_REF(ArgTypes)... args);
+ #else
+@@ -65,7 +65,7 @@ namespace boost
+ private:
+ volatile thread_detail::uintmax_atomic_t epoch;
+
+-#ifndef BOOST_NO_CXX11_VARIADIC_TEMPLATES
++#if !defined(BOOST_NO_CXX11_VARIADIC_TEMPLATES) && !defined(BOOST_NO_CXX11_RVALUE_REFERENCES)
+ template<typename Function, class ...ArgTypes>
+ friend void call_once(once_flag& flag, BOOST_THREAD_RV_REF(Function) f, BOOST_THREAD_RV_REF(ArgTypes)... args);
+ #else
+@@ -118,7 +118,7 @@ namespace boost
+ // http://www.open-std.org/jtc1/sc22/wg21/docs/papers/2007/n2444.html
+
+
+-#ifndef BOOST_NO_CXX11_VARIADIC_TEMPLATES
++#if !defined(BOOST_NO_CXX11_VARIADIC_TEMPLATES) && !defined(BOOST_NO_CXX11_RVALUE_REFERENCES)
+
+
+ template<typename Function, class ...ArgTypes>
+--- a/boost/thread/pthread/once_atomic.hpp
++++ b/boost/thread/pthread/once_atomic.hpp
+@@ -115,7 +115,7 @@ namespace boost
+ #endif
+
+
+-#ifndef BOOST_NO_CXX11_VARIADIC_TEMPLATES
++#if !defined(BOOST_NO_CXX11_VARIADIC_TEMPLATES) && !defined(BOOST_NO_CXX11_RVALUE_REFERENCES)
+
+ template<typename Function, class ...ArgTypes>
+ inline void call_once(once_flag& flag, BOOST_THREAD_RV_REF(Function) f, BOOST_THREAD_RV_REF(ArgTypes)... args)
+--- a/boost/thread/win32/once.hpp
++++ b/boost/thread/win32/once.hpp
+@@ -227,7 +227,7 @@ namespace boost
+ }
+ }
+
+-#ifndef BOOST_NO_CXX11_VARIADIC_TEMPLATES
++#if !defined BOOST_NO_CXX11_VARIADIC_TEMPLATES && !defined(BOOST_NO_CXX11_RVALUE_REFERENCES)
+ //#if defined(BOOST_THREAD_RVALUE_REFERENCES_DONT_MATCH_FUNTION_PTR)
+ inline void call_once(once_flag& flag, void (*f)())
+ {
diff --git a/var/spack/repos/builtin/packages/boost/package.py b/var/spack/repos/builtin/packages/boost/package.py
index b1b9c58b32..738e8a0e35 100644
--- a/var/spack/repos/builtin/packages/boost/package.py
+++ b/var/spack/repos/builtin/packages/boost/package.py
@@ -23,11 +23,10 @@
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
from spack import *
-import spack
import sys
-
import os
-import sys
+from glob import glob
+
class Boost(Package):
"""Boost provides free peer-reviewed portable C++ source
@@ -43,6 +42,9 @@ class Boost(Package):
list_url = "http://sourceforge.net/projects/boost/files/boost/"
list_depth = 2
+ version('1.63.0', '1c837ecd990bb022d07e7aab32b09847')
+ version('1.62.0', '5fb94629535c19e48703bdb2b2e9490f')
+ version('1.61.0', '6095876341956f65f9d35939ccea1a9f')
version('1.60.0', '65a840e1a0b13a558ff19eeb2c4f0cbe')
version('1.59.0', '6aa9a5c6a4ca1016edd0ed1178e3cb87')
version('1.58.0', 'b8839650e61e9c1c0a89f371dd475546')
@@ -74,23 +76,24 @@ class Boost(Package):
version('1.34.0', 'ed5b9291ffad776f8757a916e1726ad0')
default_install_libs = set(['atomic',
- 'chrono',
- 'date_time',
- 'filesystem',
- 'graph',
- 'iostreams',
- 'locale',
- 'log',
- 'math',
- 'program_options',
- 'random',
- 'regex',
- 'serialization',
- 'signals',
- 'system',
- 'test',
- 'thread',
- 'wave'])
+ 'chrono',
+ 'date_time',
+ 'filesystem',
+ 'graph',
+ 'iostreams',
+ 'locale',
+ 'log',
+ 'math',
+ 'program_options',
+ 'random',
+ 'regex',
+ 'serialization',
+ 'signals',
+ 'system',
+ 'test',
+ 'thread',
+ 'timer',
+ 'wave'])
# mpi/python are not installed by default because they pull in many
# dependencies and/or because there is a great deal of customization
@@ -101,15 +104,24 @@ class Boost(Package):
for lib in all_libs:
variant(lib, default=(lib not in default_noinstall_libs),
- description="Compile with {0} library".format(lib))
-
- variant('debug', default=False, description='Switch to the debug version of Boost')
- variant('shared', default=True, description="Additionally build shared libraries")
- variant('multithreaded', default=True, description="Build multi-threaded versions of libraries")
- variant('singlethreaded', default=True, description="Build single-threaded versions of libraries")
- variant('icu_support', default=False, description="Include ICU support (for regex/locale libraries)")
-
- depends_on('icu', when='+icu_support')
+ description="Compile with {0} library".format(lib))
+
+ variant('debug', default=False,
+ description='Switch to the debug version of Boost')
+ variant('shared', default=True,
+ description="Additionally build shared libraries")
+ variant('multithreaded', default=True,
+ description="Build multi-threaded versions of libraries")
+ variant('singlethreaded', default=False,
+ description="Build single-threaded versions of libraries")
+ variant('icu', default=False,
+ description="Build with Unicode and ICU suport")
+ variant('graph', default=False,
+ description="Build the Boost Graph library")
+ variant('taggedlayout', default=False,
+ description="Augment library names with build options")
+
+ depends_on('icu4c', when='+icu')
depends_on('python', when='+python')
depends_on('mpi', when='+mpi')
depends_on('bzip2', when='+iostreams')
@@ -118,22 +130,29 @@ class Boost(Package):
# Patch fix from https://svn.boost.org/trac/boost/ticket/11856
patch('boost_11856.patch', when='@1.60.0%gcc@4.4.7')
+ # Patch fix from https://svn.boost.org/trac/boost/ticket/10125
+ patch('boost_10125.patch', when='@1.55.0%gcc@5.0:5.9')
+
def url_for_version(self, version):
- """Handle Boost's weird URLs, which write the version two different ways."""
+ """
+ Handle Boost's weird URLs,
+ which write the version two different ways.
+ """
parts = [str(p) for p in Version(version)]
dots = ".".join(parts)
underscores = "_".join(parts)
- return "http://downloads.sourceforge.net/project/boost/boost/%s/boost_%s.tar.bz2" % (
- dots, underscores)
+ return "http://downloads.sourceforge.net/project/boost/boost/%s/boost_%s.tar.bz2" % (dots, underscores)
def determine_toolset(self, spec):
- if spec.satisfies("=darwin-x86_64"):
+ if spec.satisfies("platform=darwin"):
return 'darwin'
toolsets = {'g++': 'gcc',
'icpc': 'intel',
'clang++': 'clang'}
+ if spec.satisfies('@1.47:'):
+ toolsets['icpc'] += '-linux'
for cc, toolset in toolsets.iteritems():
if cc in self.compiler.cxx_names:
return toolset
@@ -141,6 +160,26 @@ class Boost(Package):
# fallback to gcc if no toolset found
return 'gcc'
+ def bjam_python_line(self, spec):
+ from os.path import dirname, splitext
+ pydir = 'python%s.%s*' % spec['python'].version.version[:2]
+ incs = join_path(spec['python'].prefix.include, pydir, "pyconfig.h")
+ incs = glob(incs)
+ incs = " ".join([dirname(u) for u in incs])
+
+ pylib = 'libpython%s.%s*' % spec['python'].version.version[:2]
+ all_libs = join_path(spec['python'].prefix.lib, pylib)
+ libs = [u for u in all_libs if splitext(u)[1] == dso_suffix]
+ if len(libs) == 0:
+ libs = [u for u in all_libs if splitext(u)[1] == '.a']
+
+ libs = " ".join(libs)
+ return 'using python : %s : %s : %s : %s ;\n' % (
+ spec['python'].version.up_to(2),
+ join_path(spec['python'].prefix.bin, 'python'),
+ incs, libs
+ )
+
def determine_bootstrap_options(self, spec, withLibs, options):
boostToolsetId = self.determine_toolset(spec)
options.append('--with-toolset=%s' % boostToolsetId)
@@ -148,20 +187,25 @@ class Boost(Package):
if '+python' in spec:
options.append('--with-python=%s' %
- join_path(spec['python'].prefix.bin, 'python'))
+ join_path(spec['python'].prefix.bin, 'python'))
with open('user-config.jam', 'w') as f:
- compiler_wrapper = join_path(spack.build_env_path, 'c++')
- f.write("using {0} : : {1} ;\n".format(boostToolsetId,
- compiler_wrapper))
+ # Boost may end up using gcc even though clang+gfortran is set in
+ # compilers.yaml. Make sure this does not happen:
+ if not spec.satisfies('%intel'):
+ # using intel-linux : : spack_cxx in user-config.jam leads to
+ # error: at project-config.jam:12
+ # error: duplicate initialization of intel-linux with the following parameters: # noqa
+ # error: version = <unspecified>
+ # error: previous initialization at ./user-config.jam:1
+ f.write("using {0} : : {1} ;\n".format(boostToolsetId,
+ spack_cxx))
if '+mpi' in spec:
f.write('using mpi : %s ;\n' %
- join_path(spec['mpi'].prefix.bin, 'mpicxx'))
+ join_path(spec['mpi'].prefix.bin, 'mpicxx'))
if '+python' in spec:
- f.write('using python : %s : %s ;\n' %
- (spec['python'].version,
- join_path(spec['python'].prefix.bin, 'python')))
+ f.write(self.bjam_python_line(spec))
def determine_b2_options(self, spec, options):
if '+debug' in spec:
@@ -177,8 +221,7 @@ class Boost(Package):
'-s', 'BZIP2_INCLUDE=%s' % spec['bzip2'].prefix.include,
'-s', 'BZIP2_LIBPATH=%s' % spec['bzip2'].prefix.lib,
'-s', 'ZLIB_INCLUDE=%s' % spec['zlib'].prefix.include,
- '-s', 'ZLIB_LIBPATH=%s' % spec['zlib'].prefix.lib,
- ])
+ '-s', 'ZLIB_LIBPATH=%s' % spec['zlib'].prefix.lib])
linkTypes = ['static']
if '+shared' in spec:
@@ -190,20 +233,47 @@ class Boost(Package):
if '+singlethreaded' in spec:
threadingOpts.append('single')
if not threadingOpts:
- raise RuntimeError("At least one of {singlethreaded, multithreaded} must be enabled")
+ raise RuntimeError("At least one of {singlethreaded, " +
+ "multithreaded} must be enabled")
+
+ if '+taggedlayout' in spec:
+ layout = 'tagged'
+ else:
+ if len(threadingOpts) > 1:
+ raise RuntimeError("Cannot build both single and " +
+ "multi-threaded targets with system layout")
+ layout = 'system'
options.extend([
- 'toolset=%s' % self.determine_toolset(spec),
'link=%s' % ','.join(linkTypes),
- '--layout=tagged'])
+ '--layout=%s' % layout
+ ])
+
+ if not spec.satisfies('%intel'):
+ options.extend([
+ 'toolset=%s' % self.determine_toolset(spec)
+ ])
+
+ # clang is not officially supported for pre-compiled headers
+ # and at least in clang 3.9 still fails to build
+ # http://www.boost.org/build/doc/html/bbv2/reference/precompiled_headers.html
+ # https://svn.boost.org/trac/boost/ticket/12496
+ if spec.satisfies('%clang'):
+ options.extend(['pch=off'])
return threadingOpts
+ def add_buildopt_symlinks(self, prefix):
+ with working_dir(prefix.lib):
+ for lib in os.listdir(os.curdir):
+ prefix, remainder = lib.split('.', 1)
+ symlink(lib, '%s-mt.%s' % (prefix, remainder))
+
def install(self, spec, prefix):
# On Darwin, Boost expects the Darwin libtool. However, one of the
- # dependencies may have pulled in Spack's GNU libtool, and these two are
- # not compatible. We thus create a symlink to Darwin's libtool and add
- # it at the beginning of PATH.
+ # dependencies may have pulled in Spack's GNU libtool, and these two
+ # are not compatible. We thus create a symlink to Darwin's libtool
+ # and add it at the beginning of PATH.
if sys.platform == 'darwin':
newdir = os.path.abspath('darwin-libtool')
mkdirp(newdir)
@@ -216,7 +286,8 @@ class Boost(Package):
withLibs.append(lib)
if not withLibs:
# if no libraries are specified for compilation, then you dont have
- # to configure/build anything, just copy over to the prefix directory.
+ # to configure/build anything, just copy over to the prefix
+ # directory.
src = join_path(self.stage.source_path, 'boost')
mkdirp(join_path(prefix, 'include'))
dst = join_path(prefix, 'include', 'boost')
@@ -234,6 +305,9 @@ class Boost(Package):
withLibs.remove('chrono')
if not spec.satisfies('@1.43.0:'):
withLibs.remove('random')
+ if '+graph' in spec and '+mpi' in spec:
+ withLibs.remove('graph')
+ withLibs.append('graph_parallel')
# to make Boost find the user-config.jam
env['BOOST_BUILD_PATH'] = './'
@@ -253,11 +327,17 @@ class Boost(Package):
threadingOpts = self.determine_b2_options(spec, b2_options)
+ b2('--clean')
+
# In theory it could be done on one call but it fails on
# Boost.MPI if the threading options are not separated.
for threadingOpt in threadingOpts:
b2('install', 'threading=%s' % threadingOpt, *b2_options)
- # The shared libraries are not installed correctly on Darwin; correct this
+ if '+multithreaded' in spec and '~taggedlayout' in spec:
+ self.add_buildopt_symlinks(prefix)
+
+ # The shared libraries are not installed correctly
+ # on Darwin; correct this
if (sys.platform == 'darwin') and ('+shared' in spec):
fix_darwin_install_name(prefix.lib)
diff --git a/var/spack/repos/builtin/packages/bowtie2/package.py b/var/spack/repos/builtin/packages/bowtie2/package.py
index 6d641da49b..a8a1a34ed4 100644
--- a/var/spack/repos/builtin/packages/bowtie2/package.py
+++ b/var/spack/repos/builtin/packages/bowtie2/package.py
@@ -24,12 +24,15 @@
##############################################################################
from spack import *
from glob import glob
+
+
class Bowtie2(Package):
"""Description"""
homepage = "bowtie-bio.sourceforge.net/bowtie2/index.shtml"
- version('2.2.5','51fa97a862d248d7ee660efc1147c75f', url = "http://downloads.sourceforge.net/project/bowtie-bio/bowtie2/2.2.5/bowtie2-2.2.5-source.zip")
+ version('2.2.5', '51fa97a862d248d7ee660efc1147c75f',
+ url="http://downloads.sourceforge.net/project/bowtie-bio/bowtie2/2.2.5/bowtie2-2.2.5-source.zip")
- patch('bowtie2-2.5.patch',when='@2.2.5', level=0)
+ patch('bowtie2-2.5.patch', when='@2.2.5', level=0)
def install(self, spec, prefix):
make()
@@ -45,4 +48,3 @@ class Bowtie2(Package):
# install('bowtie2-inspect',prefix.bin)
# install('bowtie2-inspect-l',prefix.bin)
# install('bowtie2-inspect-s',prefix.bin)
-
diff --git a/var/spack/repos/builtin/packages/boxlib/package.py b/var/spack/repos/builtin/packages/boxlib/package.py
index ec7db6c08e..8d46014d3b 100644
--- a/var/spack/repos/builtin/packages/boxlib/package.py
+++ b/var/spack/repos/builtin/packages/boxlib/package.py
@@ -24,26 +24,29 @@
##############################################################################
from spack import *
-class Boxlib(Package):
+
+class Boxlib(CMakePackage):
"""BoxLib, a software framework for massively parallel
block-structured adaptive mesh refinement (AMR) codes."""
homepage = "https://ccse.lbl.gov/BoxLib/"
- url = "https://ccse.lbl.gov/pub/Downloads/BoxLib.git";
+ url = "https://github.com/BoxLib-Codes/BoxLib/archive/16.12.2.tar.gz"
- # TODO: figure out how best to version this. No tags in the repo!
- version('master', git='https://ccse.lbl.gov/pub/Downloads/BoxLib.git')
+ version('16.12.2', 'a28d92a5ff3fbbdbbd0a776a59f18526')
depends_on('mpi')
- def install(self, spec, prefix):
- args = std_cmake_args
- args += ['-DCCSE_ENABLE_MPI=1',
- '-DCMAKE_C_COMPILER=%s' % which('mpicc'),
- '-DCMAKE_CXX_COMPILER=%s' % which('mpicxx'),
- '-DCMAKE_Fortran_COMPILER=%s' % which('mpif90')]
+ def cmake_args(self):
+ spec = self.spec
+ options = []
- cmake('.', *args)
- make()
- make("install")
+ options.extend([
+ # '-DBL_SPACEDIM=3',
+ '-DENABLE_POSITION_INDEPENDENT_CODE=ON',
+ '-DENABLE_FBASELIB=ON',
+ '-DCMAKE_C_COMPILER=%s' % spec['mpi'].mpicc,
+ '-DCMAKE_CXX_COMPILER=%s' % spec['mpi'].mpicxx,
+ '-DCMAKE_Fortran_COMPILER=%s' % spec['mpi'].mpifc
+ ])
+ return options
diff --git a/var/spack/repos/builtin/packages/bpp-core/package.py b/var/spack/repos/builtin/packages/bpp-core/package.py
new file mode 100644
index 0000000000..f716a2ee05
--- /dev/null
+++ b/var/spack/repos/builtin/packages/bpp-core/package.py
@@ -0,0 +1,41 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class BppCore(Package):
+ """Bio++ core library."""
+
+ homepage = "http://biopp.univ-montp2.fr/wiki/index.php/Installation"
+ url = "http://biopp.univ-montp2.fr/repos/sources/bpp-core-2.2.0.tar.gz"
+
+ version('2.2.0', '5789ed2ae8687d13664140cd77203477')
+
+ depends_on('cmake', type='build')
+
+ def install(self, spec, prefix):
+ cmake('-DBUILD_TESTING=FALSE', '.', *std_cmake_args)
+ make()
+ make('install')
diff --git a/var/spack/repos/builtin/packages/bpp-phyl/package.py b/var/spack/repos/builtin/packages/bpp-phyl/package.py
new file mode 100644
index 0000000000..4ff77f1540
--- /dev/null
+++ b/var/spack/repos/builtin/packages/bpp-phyl/package.py
@@ -0,0 +1,43 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class BppPhyl(Package):
+ """Bio++ phylogeny library."""
+
+ homepage = "http://biopp.univ-montp2.fr/wiki/index.php/Installation"
+ url = "http://biopp.univ-montp2.fr/repos/sources/bpp-phyl-2.2.0.tar.gz"
+
+ version('2.2.0', '5c40667ec0bf37e0ecaba321be932770')
+
+ depends_on('cmake', type='build')
+ depends_on('bpp-core')
+ depends_on('bpp-seq')
+
+ def install(self, spec, prefix):
+ cmake('-DBUILD_TESTING=FALSE', '.', *std_cmake_args)
+ make()
+ make('install')
diff --git a/var/spack/repos/builtin/packages/bpp-seq/package.py b/var/spack/repos/builtin/packages/bpp-seq/package.py
new file mode 100644
index 0000000000..15c99da2b1
--- /dev/null
+++ b/var/spack/repos/builtin/packages/bpp-seq/package.py
@@ -0,0 +1,42 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class BppSeq(Package):
+ """Bio++ seq library."""
+
+ homepage = "http://biopp.univ-montp2.fr/wiki/index.php/Installation"
+ url = "http://biopp.univ-montp2.fr/repos/sources/bpp-seq-2.2.0.tar.gz"
+
+ version('2.2.0', '44adef0ff4d5ca4e69ccf258c9270633')
+
+ depends_on('cmake', type='build')
+ depends_on('bpp-core')
+
+ def install(self, spec, prefix):
+ cmake('-DBUILD_TESTING=FALSE', '.', *std_cmake_args)
+ make()
+ make('install')
diff --git a/var/spack/repos/builtin/packages/bpp-suite/package.py b/var/spack/repos/builtin/packages/bpp-suite/package.py
new file mode 100644
index 0000000000..d15030622e
--- /dev/null
+++ b/var/spack/repos/builtin/packages/bpp-suite/package.py
@@ -0,0 +1,46 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class BppSuite(Package):
+ """BppSuite is a suite of ready-to-use programs for phylogenetic and
+ sequence analysis."""
+
+ homepage = "http://biopp.univ-montp2.fr/wiki/index.php/BppSuite"
+ url = "http://biopp.univ-montp2.fr/repos/sources/bppsuite/bppsuite-2.2.0.tar.gz"
+
+ version('2.2.0', 'd8b29ad7ccf5bd3a7beb701350c9e2a4')
+
+ depends_on('cmake', type='build')
+ depends_on('texinfo', type='build')
+ depends_on('bpp-core')
+ depends_on('bpp-seq')
+ depends_on('bpp-phyl')
+
+ def install(self, spec, prefix):
+ cmake('.', *std_cmake_args)
+ make()
+ make('install')
diff --git a/var/spack/repos/builtin/packages/bwa/package.py b/var/spack/repos/builtin/packages/bwa/package.py
new file mode 100644
index 0000000000..bb6763629b
--- /dev/null
+++ b/var/spack/repos/builtin/packages/bwa/package.py
@@ -0,0 +1,52 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class Bwa(Package):
+ """Burrow-Wheeler Aligner for pairwise alignment between DNA sequences."""
+
+ homepage = "http://github.com/lh3/bwa"
+ url = "https://github.com/lh3/bwa/releases/download/v0.7.15/bwa-0.7.15.tar.bz2"
+
+ version('0.7.15', 'fcf470a46a1dbe2f96a1c5b87c530554')
+
+ depends_on('zlib')
+
+ def install(self, spec, prefix):
+ filter_file(r'^INCLUDES=',
+ "INCLUDES=-I%s" % spec['zlib'].prefix.include, 'Makefile')
+ filter_file(r'^LIBS=', "LIBS=-L%s " % spec['zlib'].prefix.lib,
+ 'Makefile')
+ make()
+
+ mkdirp(prefix.bin)
+ install('bwa', join_path(prefix.bin, 'bwa'))
+ set_executable(join_path(prefix.bin, 'bwa'))
+ mkdirp(prefix.doc)
+ install('README.md', prefix.doc)
+ install('NEWS.md', prefix.doc)
+ mkdirp(prefix.man1)
+ install('bwa.1', prefix.man1)
diff --git a/var/spack/repos/builtin/packages/bzip2/package.py b/var/spack/repos/builtin/packages/bzip2/package.py
index 3b6d1d830e..9e5894a6a8 100644
--- a/var/spack/repos/builtin/packages/bzip2/package.py
+++ b/var/spack/repos/builtin/packages/bzip2/package.py
@@ -24,54 +24,69 @@
##############################################################################
from spack import *
+
class Bzip2(Package):
"""bzip2 is a freely available, patent free high-quality data
- compressor. It typically compresses files to within 10% to 15%
- of the best available techniques (the PPM family of statistical
- compressors), whilst being around twice as fast at compression
- and six times faster at decompression.
+ compressor. It typically compresses files to within 10% to 15%
+ of the best available techniques (the PPM family of statistical
+ compressors), whilst being around twice as fast at compression
+ and six times faster at decompression."""
- """
homepage = "http://www.bzip.org"
url = "http://www.bzip.org/1.0.6/bzip2-1.0.6.tar.gz"
version('1.0.6', '00b516f4704d4a7cb50a1d97e6e8e15b')
-
def patch(self):
- mf = FileFilter('Makefile-libbz2_so')
- mf.filter(r'^CC=gcc', 'CC=cc')
+ # bzip2 comes with two separate Makefiles for static and dynamic builds
+ # Tell both to use Spack's compiler wrapper instead of GCC
+ filter_file(r'^CC=gcc', 'CC=cc', 'Makefile')
+ filter_file(r'^CC=gcc', 'CC=cc', 'Makefile-libbz2_so')
- # Below stuff patches the link line to use RPATHs on Mac OS X.
+ # Patch the link line to use RPATHs on macOS
if 'darwin' in self.spec.architecture:
v = self.spec.version
- v1, v2, v3 = (v.up_to(i) for i in (1,2,3))
+ v1, v2, v3 = (v.up_to(i) for i in (1, 2, 3))
- mf.filter('$(CC) -shared -Wl,-soname -Wl,libbz2.so.{0} -o libbz2.so.{1} $(OBJS)'.format(v2, v3),
- '$(CC) -dynamiclib -Wl,-install_name -Wl,@rpath/libbz2.{0}.dylib -current_version {1} -compatibility_version {2} -o libbz2.{3}.dylib $(OBJS)'.format(v1, v2, v3, v3), string=True)
+ kwargs = {'ignore_absent': False, 'backup': False, 'string': True}
- mf.filter('$(CC) $(CFLAGS) -o bzip2-shared bzip2.c libbz2.so.{0}'.format(v3),
- '$(CC) $(CFLAGS) -o bzip2-shared bzip2.c libbz2.{0}.dylib'.format(v3), string=True)
- mf.filter('rm -f libbz2.so.{0}'.format(v2),
- 'rm -f libbz2.{0}.dylib'.format(v2), string=True)
- mf.filter('ln -s libbz2.so.{0} libbz2.so.{1}'.format(v3, v2),
- 'ln -s libbz2.{0}.dylib libbz2.{1}.dylib'.format(v3, v2), string=True)
+ mf = FileFilter('Makefile-libbz2_so')
+ mf.filter('$(CC) -shared -Wl,-soname -Wl,libbz2.so.{0} -o libbz2.so.{1} $(OBJS)' # noqa
+ .format(v2, v3),
+ '$(CC) -dynamiclib -Wl,-install_name -Wl,@rpath/libbz2.{0}.dylib -current_version {1} -compatibility_version {2} -o libbz2.{3}.dylib $(OBJS)' # noqa
+ .format(v1, v2, v3, v3),
+ **kwargs)
+ mf.filter(
+ '$(CC) $(CFLAGS) -o bzip2-shared bzip2.c libbz2.so.{0}'.format(v3), # noqa
+ '$(CC) $(CFLAGS) -o bzip2-shared bzip2.c libbz2.{0}.dylib'
+ .format(v3), **kwargs)
+ mf.filter(
+ 'rm -f libbz2.so.{0}'.format(v2),
+ 'rm -f libbz2.{0}.dylib'.format(v2), **kwargs)
+ mf.filter(
+ 'ln -s libbz2.so.{0} libbz2.so.{1}'.format(v3, v2),
+ 'ln -s libbz2.{0}.dylib libbz2.{1}.dylib'.format(v3, v2),
+ **kwargs)
def install(self, spec, prefix):
+ # Build the dynamic library first
make('-f', 'Makefile-libbz2_so')
- make('clean')
- make("install", "PREFIX=%s" % prefix)
+ # Build the static library and everything else
+ make()
+ make('install', 'PREFIX={0}'.format(prefix))
install('bzip2-shared', join_path(prefix.bin, 'bzip2'))
- v1, v2, v3 = (self.spec.version.up_to(i) for i in (1,2,3))
+ v1, v2, v3 = (self.spec.version.up_to(i) for i in (1, 2, 3))
if 'darwin' in self.spec.architecture:
lib = 'libbz2.dylib'
- lib1, lib2, lib3 = ('libbz2.{0}.dylib'.format(v) for v in (v1, v2, v3))
+ lib1, lib2, lib3 = ('libbz2.{0}.dylib'.format(v)
+ for v in (v1, v2, v3))
else:
lib = 'libbz2.so'
- lib1, lib2, lib3 = ('libbz2.so.{0}'.format(v) for v in (v1, v2, v3))
+ lib1, lib2, lib3 = ('libbz2.so.{0}'.format(v)
+ for v in (v1, v2, v3))
install(lib3, join_path(prefix.lib, lib3))
with working_dir(prefix.lib):
diff --git a/var/spack/repos/builtin/packages/c-blosc/package.py b/var/spack/repos/builtin/packages/c-blosc/package.py
new file mode 100644
index 0000000000..4ebf3811a5
--- /dev/null
+++ b/var/spack/repos/builtin/packages/c-blosc/package.py
@@ -0,0 +1,52 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+
+import sys
+
+from spack import *
+
+
+class CBlosc(Package):
+ """Blosc, an extremely fast, multi-threaded, meta-compressor library"""
+ homepage = "http://www.blosc.org"
+ url = "https://github.com/Blosc/c-blosc/archive/v1.9.2.tar.gz"
+
+ version('1.9.2', 'dd2d83069d74b36b8093f1c6b49defc5')
+ version('1.9.1', '7d708d3daadfacf984a87b71b1734ce2')
+ version('1.9.0', 'e4c1dc8e2c468e5cfa2bf05eeee5357a')
+ version('1.8.1', 'd73d5be01359cf271e9386c90dcf5b05')
+ version('1.8.0', '5b92ecb287695ba20cc33d30bf221c4f')
+
+ depends_on("cmake", type='build')
+ depends_on("snappy")
+ depends_on("zlib")
+
+ def install(self, spec, prefix):
+ cmake('.', *std_cmake_args)
+
+ make()
+ make("install")
+ if sys.platform == 'darwin':
+ fix_darwin_install_name(prefix.lib)
diff --git a/var/spack/repos/builtin/packages/cairo/package.py b/var/spack/repos/builtin/packages/cairo/package.py
index 5c7c2fae22..9df93ccddb 100644
--- a/var/spack/repos/builtin/packages/cairo/package.py
+++ b/var/spack/repos/builtin/packages/cairo/package.py
@@ -24,21 +24,30 @@
##############################################################################
from spack import *
-class Cairo(Package):
- """Cairo is a 2D graphics library with support for multiple output devices."""
+
+class Cairo(AutotoolsPackage):
+ """Cairo is a 2D graphics library with support for multiple output
+ devices."""
homepage = "http://cairographics.org"
- url = "http://cairographics.org/releases/cairo-1.14.0.tar.xz"
+ url = "http://cairographics.org/releases/cairo-1.14.8.tar.xz"
+
+ version('1.14.8', 'c6f7b99986f93c9df78653c3e6a3b5043f65145e')
+ version('1.14.0', '53cf589b983412ea7f78feee2e1ba9cea6e3ebae')
- version('1.14.0', 'fc3a5edeba703f906f2241b394f0cced')
+ variant('X', default=False, description="Build with X11 support")
+ depends_on('libx11', when='+X')
+ depends_on('libxext', when='+X')
+ depends_on('libxrender', when='+X')
+ depends_on('libxcb', when='+X')
depends_on("libpng")
depends_on("glib")
depends_on("pixman")
- depends_on("fontconfig@2.10.91:") # Require newer version of fontconfig.
+ depends_on("freetype")
+ depends_on("pkg-config", type="build")
+ depends_on("fontconfig@2.10.91:") # Require newer version of fontconfig.
- def install(self, spec, prefix):
- configure("--prefix=%s" % prefix,
- "--disable-trace", # can cause problems with libiberty
- "--enable-tee")
- make()
- make("install")
+ def configure_args(self):
+ args = ["--disable-trace", # can cause problems with libiberty
+ "--enable-tee"]
+ return args
diff --git a/var/spack/repos/builtin/packages/caliper/package.py b/var/spack/repos/builtin/packages/caliper/package.py
index 4b8fe0d8af..c9d4a41bc2 100644
--- a/var/spack/repos/builtin/packages/caliper/package.py
+++ b/var/spack/repos/builtin/packages/caliper/package.py
@@ -24,9 +24,9 @@
##############################################################################
from spack import *
+
class Caliper(Package):
- """
- Caliper is a generic context annotation system. It gives programmers the
+ """Caliper is a generic context annotation system. It gives programmers the
ability to provide arbitrary program context information to (performance)
tools at runtime.
"""
@@ -34,16 +34,17 @@ class Caliper(Package):
homepage = "https://github.com/LLNL/Caliper"
url = ""
- version('master', git='ssh://git@github.com:LLNL/Caliper.git')
+ version('master', git='https://github.com/LLNL/Caliper.git')
- variant('mpi', default=False, description='Enable MPI function wrappers.')
+ variant('mpi', default=True, description='Enable MPI function wrappers.')
depends_on('libunwind')
depends_on('papi')
depends_on('mpi', when='+mpi')
+ depends_on('cmake', type='build')
def install(self, spec, prefix):
- with working_dir('build', create=True):
- cmake('..', *std_cmake_args)
- make()
- make("install")
+ with working_dir('build', create=True):
+ cmake('..', *std_cmake_args)
+ make()
+ make("install")
diff --git a/var/spack/repos/builtin/packages/callpath/package.py b/var/spack/repos/builtin/packages/callpath/package.py
index 2ad2dc60e4..f8227fa49e 100644
--- a/var/spack/repos/builtin/packages/callpath/package.py
+++ b/var/spack/repos/builtin/packages/callpath/package.py
@@ -24,6 +24,7 @@
##############################################################################
from spack import *
+
class Callpath(Package):
"""Library for representing callpaths consistently in
distributed-memory performance tools."""
@@ -39,6 +40,7 @@ class Callpath(Package):
depends_on("dyninst")
depends_on("adept-utils")
depends_on("mpi")
+ depends_on('cmake', type='build')
def install(self, spec, prefix):
# TODO: offer options for the walker used.
diff --git a/var/spack/repos/builtin/packages/cantera/package.py b/var/spack/repos/builtin/packages/cantera/package.py
new file mode 100644
index 0000000000..5cd9fcdd17
--- /dev/null
+++ b/var/spack/repos/builtin/packages/cantera/package.py
@@ -0,0 +1,202 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+import os
+
+
+class Cantera(Package):
+ """Cantera is a suite of object-oriented software tools for problems
+ involving chemical kinetics, thermodynamics, and/or transport processes."""
+
+ homepage = "http://www.cantera.org/docs/sphinx/html/index.html"
+ url = "https://github.com/Cantera/cantera/archive/v2.2.1.tar.gz"
+
+ version('2.2.1', '9d1919bdef39ddec54485fc8a741a3aa')
+
+ variant('lapack', default=True,
+ description='Build with external BLAS/LAPACK libraries')
+ variant('threadsafe', default=True,
+ description='Build threadsafe, requires Boost')
+ variant('sundials', default=True,
+ description='Build with external Sundials')
+ variant('python', default=False,
+ description='Build the Cantera Python module')
+ variant('matlab', default=False,
+ description='Build the Cantera Matlab toolbox')
+
+ # Required dependencies
+ depends_on('scons', type='build')
+
+ # Recommended dependencies
+ depends_on('blas', when='+lapack')
+ depends_on('lapack', when='+lapack')
+ depends_on('boost', when='+threadsafe')
+ depends_on('sundials', when='+sundials') # must be compiled with -fPIC
+
+ # Python module dependencies
+ extends('python', when='+python')
+ depends_on('py-numpy', when='+python', type=('build', 'run'))
+ depends_on('py-scipy', when='+python', type=('build', 'run'))
+ depends_on('py-cython', when='+python', type=('build', 'run'))
+ depends_on('py-3to2', when='+python', type=('build', 'run'))
+ # TODO: these "when" specs don't actually work
+ # depends_on('py-unittest2', when='+python^python@2.6')
+ # depends_on('py-unittest2py3k', when='+python^python@3.1')
+
+ # Matlab toolbox dependencies
+ # TODO: add Matlab package
+ # TODO: allow packages to extend multiple other packages
+ # extends('matlab', when='+matlab')
+
+ def install(self, spec, prefix):
+ # Required options
+ options = [
+ 'prefix={0}'.format(prefix),
+ 'CC={0}'.format(os.environ['CC']),
+ 'CXX={0}'.format(os.environ['CXX']),
+ 'F77={0}'.format(os.environ['F77']),
+ 'FORTRAN={0}'.format(os.environ['FC']),
+ 'cc_flags=-fPIC',
+ # Allow Spack environment variables to propagate through to SCons
+ 'env_vars=all'
+ ]
+
+ # BLAS/LAPACK support
+ if '+lapack' in spec:
+ lapack_blas = spec['lapack'].lapack_libs + spec['blas'].blas_libs
+ options.extend([
+ 'blas_lapack_libs={0}'.format(','.join(lapack_blas.names)),
+ 'blas_lapack_dir={0}'.format(spec['lapack'].prefix.lib)
+ ])
+
+ # Threadsafe build, requires Boost
+ if '+threadsafe' in spec:
+ options.extend([
+ 'build_thread_safe=yes',
+ 'boost_inc_dir={0}'.format(spec['boost'].prefix.include),
+ 'boost_lib_dir={0}'.format(spec['boost'].prefix.lib)
+ ])
+ else:
+ options.append('build_thread_safe=no')
+
+ # Sundials support
+ if '+sundials' in spec:
+ options.extend([
+ 'use_sundials=y',
+ 'sundials_include={0}'.format(spec['sundials'].prefix.include),
+ 'sundials_libdir={0}'.format(spec['sundials'].prefix.lib),
+ 'sundials_license={0}'.format(
+ join_path(spec['sundials'].prefix, 'LICENSE'))
+ ])
+ else:
+ options.append('use_sundials=n')
+
+ # Python module
+ if '+python' in spec:
+ options.extend([
+ 'python_package=full',
+ 'python_cmd={0}'.format(
+ join_path(spec['python'].prefix.bin, 'python')),
+ 'python_array_home={0}'.format(spec['py-numpy'].prefix)
+ ])
+ if spec['python'].satisfies('@3'):
+ options.extend([
+ 'python3_package=y',
+ 'python3_cmd={0}'.format(
+ join_path(spec['python'].prefix.bin, 'python')),
+ 'python3_array_home={0}'.format(spec['py-numpy'].prefix)
+ ])
+ else:
+ options.append('python3_package=n')
+ else:
+ options.append('python_package=none')
+ options.append('python3_package=n')
+
+ # Matlab toolbox
+ if '+matlab' in spec:
+ options.extend([
+ 'matlab_toolbox=y',
+ 'matlab_path={0}'.format(spec['matlab'].prefix)
+ ])
+ else:
+ options.append('matlab_toolbox=n')
+
+ scons('build', *options)
+
+ if '+python' in spec:
+ # Tests will always fail if Python dependencies aren't built
+ # In addition, 3 of the tests fail when run in parallel
+ scons('test', parallel=False)
+
+ scons('install')
+
+ self.filter_compilers()
+
+ def filter_compilers(self):
+ """Run after install to tell the Makefile and SConstruct files to use
+ the compilers that Spack built the package with.
+
+ If this isn't done, they'll have CC, CXX, F77, and FC set to Spack's
+ generic cc, c++, f77, and f90. We want them to be bound to whatever
+ compiler they were built with."""
+
+ kwargs = {'ignore_absent': True, 'backup': False, 'string': True}
+ dirname = os.path.join(self.prefix, 'share/cantera/samples')
+
+ cc_files = [
+ 'cxx/rankine/Makefile', 'cxx/NASA_coeffs/Makefile',
+ 'cxx/kinetics1/Makefile', 'cxx/flamespeed/Makefile',
+ 'cxx/combustor/Makefile', 'f77/SConstruct'
+ ]
+
+ cxx_files = [
+ 'cxx/rankine/Makefile', 'cxx/NASA_coeffs/Makefile',
+ 'cxx/kinetics1/Makefile', 'cxx/flamespeed/Makefile',
+ 'cxx/combustor/Makefile'
+ ]
+
+ f77_files = [
+ 'f77/Makefile', 'f77/SConstruct'
+ ]
+
+ fc_files = [
+ 'f90/Makefile', 'f90/SConstruct'
+ ]
+
+ for filename in cc_files:
+ filter_file(os.environ['CC'], self.compiler.cc,
+ os.path.join(dirname, filename), **kwargs)
+
+ for filename in cxx_files:
+ filter_file(os.environ['CXX'], self.compiler.cxx,
+ os.path.join(dirname, filename), **kwargs)
+
+ for filename in f77_files:
+ filter_file(os.environ['F77'], self.compiler.f77,
+ os.path.join(dirname, filename), **kwargs)
+
+ for filename in fc_files:
+ filter_file(os.environ['FC'], self.compiler.fc,
+ os.path.join(dirname, filename), **kwargs)
diff --git a/var/spack/repos/builtin/packages/cask/package.py b/var/spack/repos/builtin/packages/cask/package.py
new file mode 100644
index 0000000000..b48365b61d
--- /dev/null
+++ b/var/spack/repos/builtin/packages/cask/package.py
@@ -0,0 +1,55 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+#
+# Based on Homebrew's formula:
+# https://github.com/Homebrew/homebrew-core/blob/master/Formula/cask.rb
+#
+from spack import *
+from glob import glob
+
+
+class Cask(Package):
+ """Cask is a project management tool for Emacs Lisp to automate the package
+ development cycle; development, dependencies, testing, building,
+ packaging and more."""
+ homepage = "http://cask.readthedocs.io/en/latest/"
+ url = "https://github.com/cask/cask/archive/v0.7.4.tar.gz"
+
+ version('0.8.1', '25196468a7ce634cfff14733678be6ba')
+ # version 0.8.0 is broken
+ version('0.7.4', 'c973a7db43bc980dd83759a5864a1260')
+
+ depends_on('emacs', type=('build', 'run'))
+
+ def install(self, spec, prefix):
+ mkdirp(prefix.bin)
+ install('bin/cask', prefix.bin)
+ install_tree('templates', join_path(prefix, 'templates'))
+ for el_file in glob("*.el"):
+ install(el_file, prefix)
+ for misc_file in ['COPYING', 'cask.png', 'README.md']:
+ install(misc_file, prefix)
+ # disable cask's automatic upgrading feature
+ touch(join_path(prefix, ".no-upgrade"))
diff --git a/var/spack/repos/builtin/packages/catch/package.py b/var/spack/repos/builtin/packages/catch/package.py
new file mode 100644
index 0000000000..8d2b0a1b24
--- /dev/null
+++ b/var/spack/repos/builtin/packages/catch/package.py
@@ -0,0 +1,40 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class Catch(Package):
+ """Catch tests"""
+
+ homepage = "https://github.com/philsquared/Catch"
+ url = "https://github.com/philsquared/Catch/archive/v1.3.0.tar.gz"
+
+ version('1.3.0', 'e13694aaff72817d02af8ed27d077cd261b6e857')
+
+ def install(self, spec, prefix):
+ mkdirp(prefix.include)
+ install(join_path('single_include', 'catch.hpp'), prefix.include)
+ # fakes out spack so it installs a module file
+ mkdirp(join_path(prefix, 'bin'))
diff --git a/var/spack/repos/builtin/packages/cblas/package.py b/var/spack/repos/builtin/packages/cblas/package.py
index 0b85c5842a..0828141307 100644
--- a/var/spack/repos/builtin/packages/cblas/package.py
+++ b/var/spack/repos/builtin/packages/cblas/package.py
@@ -23,7 +23,7 @@
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
from spack import *
-import os
+
class Cblas(Package):
"""The BLAS (Basic Linear Algebra Subprograms) are routines that
@@ -42,11 +42,11 @@ class Cblas(Package):
def patch(self):
mf = FileFilter('Makefile.in')
- mf.filter('^BLLIB =.*', 'BLLIB = %s/libblas.a' % self.spec['blas'].prefix.lib)
+ mf.filter('^BLLIB =.*', 'BLLIB = %s/libblas.a' %
+ self.spec['blas'].prefix.lib)
mf.filter('^CC =.*', 'CC = cc')
mf.filter('^FC =.*', 'FC = f90')
-
def install(self, spec, prefix):
make('all')
mkdirp(prefix.lib)
@@ -54,6 +54,5 @@ class Cblas(Package):
# Rename the generated lib file to libcblas.a
install('./lib/cblas_LINUX.a', '%s/libcblas.a' % prefix.lib)
- install('./include/cblas.h','%s' % prefix.include)
- install('./include/cblas_f77.h','%s' % prefix.include)
-
+ install('./include/cblas.h', '%s' % prefix.include)
+ install('./include/cblas_f77.h', '%s' % prefix.include)
diff --git a/var/spack/repos/builtin/packages/cbtf-argonavis/package.py b/var/spack/repos/builtin/packages/cbtf-argonavis/package.py
index 7ce90ce0db..3d8572232c 100644
--- a/var/spack/repos/builtin/packages/cbtf-argonavis/package.py
+++ b/var/spack/repos/builtin/packages/cbtf-argonavis/package.py
@@ -22,7 +22,7 @@
# License along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
-################################################################################
+##########################################################################
# Copyright (c) 2015-2016 Krell Institute. All Rights Reserved.
#
# This program is free software; you can redistribute it and/or modify it under
@@ -38,39 +38,44 @@
# You should have received a copy of the GNU General Public License along with
# this program; if not, write to the Free Software Foundation, Inc., 59 Temple
# Place, Suite 330, Boston, MA 02111-1307 USA
-################################################################################
+##########################################################################
from spack import *
+
class CbtfArgonavis(Package):
- """CBTF Argo Navis project contains the CUDA collector and supporting
- libraries that was done as a result of a DOE SBIR grant."""
+ """CBTF Argo Navis project contains the CUDA collector and supporting
+ libraries that was done as a result of a DOE SBIR grant.
+
+ """
homepage = "http://sourceforge.net/p/cbtf/wiki/Home/"
# Mirror access template example
- #url = "file:/home/jeg/OpenSpeedShop_ROOT/SOURCES/cbtf-argonavis-1.6.tar.gz"
- #version('1.6', '0fafa0008478405c2c2319450f174ed4')
+ # url = "file:/home/jeg/OpenSpeedShop_ROOT/SOURCES/cbtf-argonavis-1.6.tar.gz"
+ # version('1.6', '0fafa0008478405c2c2319450f174ed4')
- version('1.6', branch='master', git='https://github.com/OpenSpeedShop/cbtf-argonavis.git')
+ version('1.8', branch='master',
+ git='https://github.com/OpenSpeedShop/cbtf-argonavis.git')
- depends_on("cmake@3.0.2")
- depends_on("boost@1.50.0:")
+ depends_on("cmake@3.0.2:", type='build')
+ depends_on("boost@1.50.0:1.59.0")
depends_on("papi")
- depends_on("mrnet@5.0.1:+lwthreads+krellpatch")
+ depends_on("mrnet@5.0.1:+lwthreads")
depends_on("cbtf")
depends_on("cbtf-krell")
- depends_on("cuda@6.0.37")
- #depends_on("cuda")
+ depends_on("cuda")
parallel = False
def adjustBuildTypeParams_cmakeOptions(self, spec, cmakeOptions):
- # Sets build type parameters into cmakeOptions the options that will enable the cbtf-krell built type settings
+ # Sets build type parameters into cmakeOptions the options that will
+ # enable the cbtf-krell built type settings
- compile_flags="-O2 -g"
+ compile_flags = "-O2 -g"
BuildTypeOptions = []
- # Set CMAKE_BUILD_TYPE to what cbtf-krell wants it to be, not the stdcmakeargs
+ # Set CMAKE_BUILD_TYPE to what cbtf-krell wants it to be, not the
+ # stdcmakeargs
for word in cmakeOptions[:]:
if word.startswith('-DCMAKE_BUILD_TYPE'):
cmakeOptions.remove(word)
@@ -81,50 +86,54 @@ class CbtfArgonavis(Package):
if word.startswith('-DCMAKE_VERBOSE_MAKEFILE'):
cmakeOptions.remove(word)
BuildTypeOptions.extend([
- '-DCMAKE_VERBOSE_MAKEFILE=ON',
- '-DCMAKE_BUILD_TYPE=None',
- '-DCMAKE_CXX_FLAGS=%s' % compile_flags,
- '-DCMAKE_C_FLAGS=%s' % compile_flags
+ '-DCMAKE_VERBOSE_MAKEFILE=ON',
+ '-DCMAKE_BUILD_TYPE=None',
+ '-DCMAKE_CXX_FLAGS=%s' % compile_flags,
+ '-DCMAKE_C_FLAGS=%s' % compile_flags
])
cmakeOptions.extend(BuildTypeOptions)
-
def install(self, spec, prefix):
- # Look for package installation information in the cbtf and cbtf-krell prefixes
- cmake_prefix_path = join_path(spec['cbtf'].prefix) + ':' + join_path(spec['cbtf-krell'].prefix)
-
- with working_dir('CUDA'):
- with working_dir('build', create=True):
-
- cmakeOptions = []
- cmakeOptions.extend(['-DCMAKE_INSTALL_PREFIX=%s' % prefix,
- '-DCMAKE_PREFIX_PATH=%s' % cmake_prefix_path,
- '-DCUDA_DIR=%s' % spec['cuda'].prefix,
- '-DCUDA_INSTALL_PATH=%s' % spec['cuda'].prefix,
- '-DCUDA_TOOLKIT_ROOT_DIR=%s' % spec['cuda'].prefix,
- '-DCUPTI_DIR=%s' % join_path(spec['cuda'].prefix + '/extras/CUPTI'),
- '-DCUPTI_ROOT=%s' % join_path(spec['cuda'].prefix + '/extras/CUPTI'),
- '-DPAPI_ROOT=%s' % spec['papi'].prefix,
- '-DCBTF_DIR=%s' % spec['cbtf'].prefix,
- '-DCBTF_KRELL_DIR=%s' % spec['cbtf-krell'].prefix,
- '-DBOOST_ROOT=%s' % spec['boost'].prefix,
- '-DBoost_DIR=%s' % spec['boost'].prefix,
- '-DBOOST_LIBRARYDIR=%s' % spec['boost'].prefix.lib,
- '-DMRNET_DIR=%s' % spec['mrnet'].prefix,
- '-DBoost_NO_SYSTEM_PATHS=ON'
- ])
-
- # Add in the standard cmake arguments
- cmakeOptions.extend(std_cmake_args)
-
- # Adjust the standard cmake arguments to what we want the build type, etc to be
- self.adjustBuildTypeParams_cmakeOptions(spec, cmakeOptions)
-
- # Invoke cmake
- cmake('..', *cmakeOptions)
-
- make("clean")
- make()
- make("install")
+ # Look for package installation information in the cbtf and cbtf-krell
+ # prefixes
+ cmake_prefix_path = join_path(
+ spec['cbtf'].prefix) + ':' + join_path(spec['cbtf-krell'].prefix)
+
+ with working_dir('CUDA'):
+ with working_dir('build', create=True):
+
+ cmakeOptions = []
+ cmakeOptions.extend(
+ ['-DCMAKE_INSTALL_PREFIX=%s' % prefix,
+ '-DCMAKE_PREFIX_PATH=%s' % cmake_prefix_path,
+ '-DCUDA_DIR=%s' % spec['cuda'].prefix,
+ '-DCUDA_INSTALL_PATH=%s' % spec['cuda'].prefix,
+ '-DCUDA_TOOLKIT_ROOT_DIR=%s' % spec['cuda'].prefix,
+ '-DCUPTI_DIR=%s' % join_path(
+ spec['cuda'].prefix + '/extras/CUPTI'),
+ '-DCUPTI_ROOT=%s' % join_path(
+ spec['cuda'].prefix + '/extras/CUPTI'),
+ '-DPAPI_ROOT=%s' % spec['papi'].prefix,
+ '-DCBTF_DIR=%s' % spec['cbtf'].prefix,
+ '-DCBTF_KRELL_DIR=%s' % spec['cbtf-krell'].prefix,
+ '-DBOOST_ROOT=%s' % spec['boost'].prefix,
+ '-DBoost_DIR=%s' % spec['boost'].prefix,
+ '-DBOOST_LIBRARYDIR=%s' % spec['boost'].prefix.lib,
+ '-DMRNET_DIR=%s' % spec['mrnet'].prefix,
+ '-DBoost_NO_SYSTEM_PATHS=ON'])
+
+ # Add in the standard cmake arguments
+ cmakeOptions.extend(std_cmake_args)
+
+ # Adjust the standard cmake arguments to what we want the build
+ # type, etc to be
+ self.adjustBuildTypeParams_cmakeOptions(spec, cmakeOptions)
+
+ # Invoke cmake
+ cmake('..', *cmakeOptions)
+
+ make("clean")
+ make()
+ make("install")
diff --git a/var/spack/repos/builtin/packages/cbtf-krell/package.py b/var/spack/repos/builtin/packages/cbtf-krell/package.py
index 7506f78146..3f36942e9a 100644
--- a/var/spack/repos/builtin/packages/cbtf-krell/package.py
+++ b/var/spack/repos/builtin/packages/cbtf-krell/package.py
@@ -22,7 +22,7 @@
# License along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
-################################################################################
+##########################################################################
# Copyright (c) 2015-2016 Krell Institute. All Rights Reserved.
#
# This program is free software; you can redistribute it and/or modify it under
@@ -38,41 +38,51 @@
# You should have received a copy of the GNU General Public License along with
# this program; if not, write to the Free Software Foundation, Inc., 59 Temple
# Place, Suite 330, Boston, MA 02111-1307 USA
-################################################################################
+##########################################################################
from spack import *
+
class CbtfKrell(Package):
- """CBTF Krell project contains the Krell Institute contributions to the CBTF project.
- These contributions include many performance data collectors and support
- libraries as well as some example tools that drive the data collection at
- HPC levels of scale."""
+ """CBTF Krell project contains the Krell Institute contributions to the
+ CBTF project. These contributions include many performance data
+ collectors and support libraries as well as some example tools
+ that drive the data collection at HPC levels of scale.
+
+ """
homepage = "http://sourceforge.net/p/cbtf/wiki/Home/"
# optional mirror access template
- #url = "file:/home/jeg/cbtf-krell-1.6.tar.gz"
- #version('1.6', 'edeb61cd488f16e7b124f77db9ce762d')
+ # url = "file:/home/jeg/cbtf-krell-1.6.tar.gz"
+ # version('1.6', 'edeb61cd488f16e7b124f77db9ce762d')
- version('1.6', branch='master', git='https://github.com/OpenSpeedShop/cbtf-krell.git')
+ version('1.8', branch='master',
+ git='https://github.com/OpenSpeedShop/cbtf-krell.git')
# MPI variants
- variant('openmpi', default=False, description="Build mpi experiment collector for openmpi MPI when this variant is enabled.")
- variant('mpt', default=False, description="Build mpi experiment collector for SGI MPT MPI when this variant is enabled.")
- variant('mvapich2', default=False, description="Build mpi experiment collector for mvapich2 MPI when this variant is enabled.")
- variant('mvapich', default=False, description="Build mpi experiment collector for mvapich MPI when this variant is enabled.")
- variant('mpich2', default=False, description="Build mpi experiment collector for mpich2 MPI when this variant is enabled.")
- variant('mpich', default=False, description="Build mpi experiment collector for mpich MPI when this variant is enabled.")
+ variant('openmpi', default=False,
+ description="Build mpi experiment collector for openmpi MPI..")
+ variant('mpt', default=False,
+ description="Build mpi experiment collector for SGI MPT MPI.")
+ variant('mvapich2', default=False,
+ description="Build mpi experiment collector for mvapich2 MPI.")
+ variant('mvapich', default=False,
+ description="Build mpi experiment collector for mvapich MPI.")
+ variant('mpich2', default=False,
+ description="Build mpi experiment collector for mpich2 MPI.")
+ variant('mpich', default=False,
+ description="Build mpi experiment collector for mpich MPI.")
# Dependencies for cbtf-krell
- depends_on("cmake@3.0.2")
+ depends_on("cmake@3.0.2:", type='build')
# For binutils service
depends_on("binutils@2.24+krellpatch")
# collectionTool
- depends_on("boost@1.50.0:")
- depends_on("dyninst@8.2.1:")
- depends_on("mrnet@5.0.1:+lwthreads+krellpatch")
+ depends_on("boost@1.50.0:1.59.0")
+ depends_on("dyninst@9.2.0")
+ depends_on("mrnet@5.0.1:+lwthreads")
depends_on("xerces-c@3.1.1:")
depends_on("cbtf")
@@ -83,7 +93,8 @@ class CbtfKrell(Package):
depends_on("papi")
# MPI Installations
- # These have not worked either for build or execution, commenting out for now
+ # These have not worked either for build or execution, commenting out for
+ # now
depends_on("openmpi", when='+openmpi')
depends_on("mpich", when='+mpich')
depends_on("mpich2", when='+mpich2')
@@ -94,11 +105,13 @@ class CbtfKrell(Package):
parallel = False
def adjustBuildTypeParams_cmakeOptions(self, spec, cmakeOptions):
- # Sets build type parameters into cmakeOptions the options that will enable the cbtf-krell built type settings
-
- compile_flags="-O2 -g"
+ # Sets build type parameters into cmakeOptions the options that will
+ # enable the cbtf-krell built type settings
+
+ compile_flags = "-O2 -g"
BuildTypeOptions = []
- # Set CMAKE_BUILD_TYPE to what cbtf-krell wants it to be, not the stdcmakeargs
+ # Set CMAKE_BUILD_TYPE to what cbtf-krell wants it to be, not the
+ # stdcmakeargs
for word in cmakeOptions[:]:
if word.startswith('-DCMAKE_BUILD_TYPE'):
cmakeOptions.remove(word)
@@ -109,75 +122,64 @@ class CbtfKrell(Package):
if word.startswith('-DCMAKE_VERBOSE_MAKEFILE'):
cmakeOptions.remove(word)
BuildTypeOptions.extend([
- '-DCMAKE_VERBOSE_MAKEFILE=ON',
- '-DCMAKE_BUILD_TYPE=None',
- '-DCMAKE_CXX_FLAGS=%s' % compile_flags,
- '-DCMAKE_C_FLAGS=%s' % compile_flags
+ '-DCMAKE_VERBOSE_MAKEFILE=ON',
+ '-DCMAKE_BUILD_TYPE=None',
+ '-DCMAKE_CXX_FLAGS=%s' % compile_flags,
+ '-DCMAKE_C_FLAGS=%s' % compile_flags
])
cmakeOptions.extend(BuildTypeOptions)
-
-
def set_mpi_cmakeOptions(self, spec, cmakeOptions):
- # Appends to cmakeOptions the options that will enable the appropriate MPI implementations
-
+ # Appends to cmakeOptions the options that will enable the appropriate
+ # MPI implementations
+
MPIOptions = []
# openmpi
if '+openmpi' in spec:
- MPIOptions.extend([
- '-DOPENMPI_DIR=%s' % spec['openmpi'].prefix
- ])
+ MPIOptions.append('-DOPENMPI_DIR=%s' % spec['openmpi'].prefix)
# mpich
if '+mpich' in spec:
- MPIOptions.extend([
- '-DMPICH_DIR=%s' % spec['mpich'].prefix
- ])
+ MPIOptions.append('-DMPICH_DIR=%s' % spec['mpich'].prefix)
# mpich2
if '+mpich2' in spec:
- MPIOptions.extend([
- '-DMPICH2_DIR=%s' % spec['mpich2'].prefix
- ])
+ MPIOptions.append('-DMPICH2_DIR=%s' % spec['mpich2'].prefix)
# mvapich
if '+mvapich' in spec:
- MPIOptions.extend([
- '-DMVAPICH_DIR=%s' % spec['mvapich'].prefix
- ])
+ MPIOptions.append('-DMVAPICH_DIR=%s' % spec['mvapich'].prefix)
# mvapich2
if '+mvapich2' in spec:
- MPIOptions.extend([
- '-DMVAPICH2_DIR=%s' % spec['mvapich2'].prefix
- ])
+ MPIOptions.append('-DMVAPICH2_DIR=%s' % spec['mvapich2'].prefix)
# mpt
if '+mpt' in spec:
- MPIOptions.extend([
- '-DMPT_DIR=%s' % spec['mpt'].prefix
- ])
+ MPIOptions.append('-DMPT_DIR=%s' % spec['mpt'].prefix)
cmakeOptions.extend(MPIOptions)
def install(self, spec, prefix):
- # Add in paths for finding package config files that tell us where to find these packages
- #cmake_prefix_path = join_path(spec['cbtf'].prefix) + ':' + join_path(spec['dyninst'].prefix)
- #'-DCMAKE_PREFIX_PATH=%s' % cmake_prefix_path
+ # Add in paths for finding package config files that tell us
+ # where to find these packages
+ # cmake_prefix_path = \
+ # join_path(spec['cbtf'].prefix) + ':' + \
+ # join_path(spec['dyninst'].prefix)
+ # '-DCMAKE_PREFIX_PATH=%s' % cmake_prefix_path
- # Build cbtf-krell with cmake
+ # Build cbtf-krell with cmake
with working_dir('build_cbtf_krell', create=True):
cmakeOptions = []
- cmakeOptions.extend(['-DCMAKE_INSTALL_PREFIX=%s' % prefix,
- '-DCBTF_DIR=%s' % spec['cbtf'].prefix,
- '-DBINUTILS_DIR=%s' % spec['binutils'].prefix,
- '-DLIBMONITOR_DIR=%s' % spec['libmonitor'].prefix,
- '-DLIBUNWIND_DIR=%s' % spec['libunwind'].prefix,
- '-DPAPI_DIR=%s' % spec['papi'].prefix,
- '-DBOOST_DIR=%s' % spec['boost'].prefix,
- '-DMRNET_DIR=%s' % spec['mrnet'].prefix,
- '-DDYNINST_DIR=%s' % spec['dyninst'].prefix,
- '-DXERCESC_DIR=%s' % spec['xerces-c'].prefix
- ])
-
+ cmakeOptions.extend(
+ ['-DCMAKE_INSTALL_PREFIX=%s' % prefix,
+ '-DCBTF_DIR=%s' % spec['cbtf'].prefix,
+ '-DBINUTILS_DIR=%s' % spec['binutils'].prefix,
+ '-DLIBMONITOR_DIR=%s' % spec['libmonitor'].prefix,
+ '-DLIBUNWIND_DIR=%s' % spec['libunwind'].prefix,
+ '-DPAPI_DIR=%s' % spec['papi'].prefix,
+ '-DBOOST_DIR=%s' % spec['boost'].prefix,
+ '-DMRNET_DIR=%s' % spec['mrnet'].prefix,
+ '-DDYNINST_DIR=%s' % spec['dyninst'].prefix,
+ '-DXERCESC_DIR=%s' % spec['xerces-c'].prefix])
# Add any MPI implementations coming from variant settings
self.set_mpi_cmakeOptions(spec, cmakeOptions)
@@ -185,9 +187,10 @@ class CbtfKrell(Package):
# Add in the standard cmake arguments
cmakeOptions.extend(std_cmake_args)
- # Adjust the standard cmake arguments to what we want the build type, etc to be
+ # Adjust the standard cmake arguments to what we want the build
+ # type, etc to be
self.adjustBuildTypeParams_cmakeOptions(spec, cmakeOptions)
-
+
# Invoke cmake
cmake('..', *cmakeOptions)
@@ -195,56 +198,54 @@ class CbtfKrell(Package):
make()
make("install")
-
-
- #if '+cray' in spec:
- #if 'cray' in self.spec.architecture:
+ # if '+cray' in spec:
+ # if 'cray' in self.spec.architecture:
# if '+runtime' in spec:
# with working_dir('build_cbtf_cray_runtime', create=True):
# python_vers='%d.%d' % spec['python'].version[:2]
# cmake .. \
- # -DCMAKE_BUILD_TYPE=Debug \
- # -DTARGET_OS="cray" \
- # -DRUNTIME_ONLY="true" \
- # -DCMAKE_INSTALL_PREFIX=${CBTF_KRELL_PREFIX} \
- # -DCMAKE_PREFIX_PATH=${CBTF_ROOT} \
- # -DCBTF_DIR=${CBTF_ROOT} \
- # -DBOOST_ROOT=${BOOST_INSTALL_PREFIX} \
- # -DXERCESC_DIR=${XERCESC_INSTALL_PREFIX} \
- # -DBINUTILS_DIR=${KRELL_ROOT} \
- # -DLIBMONITOR_DIR=${KRELL_ROOT_COMPUTE} \
- # -DLIBUNWIND_DIR=${KRELL_ROOT_COMPUTE} \
- # -DPAPI_DIR=${PAPI_ROOT} \
- # -DDYNINST_DIR=${DYNINST_CN_ROOT} \
- # -DMRNET_DIR=${MRNET_INSTALL_PREFIX} \
- # -DMPICH2_DIR=/opt/cray/mpt/7.0.1/gni/mpich2-gnu/48
+ # -DCMAKE_BUILD_TYPE=Debug \
+ # -DTARGET_OS="cray" \
+ # -DRUNTIME_ONLY="true" \
+ # -DCMAKE_INSTALL_PREFIX=${CBTF_KRELL_PREFIX} \
+ # -DCMAKE_PREFIX_PATH=${CBTF_ROOT} \
+ # -DCBTF_DIR=${CBTF_ROOT} \
+ # -DBOOST_ROOT=${BOOST_INSTALL_PREFIX} \
+ # -DXERCESC_DIR=${XERCESC_INSTALL_PREFIX} \
+ # -DBINUTILS_DIR=${KRELL_ROOT} \
+ # -DLIBMONITOR_DIR=${KRELL_ROOT_COMPUTE} \
+ # -DLIBUNWIND_DIR=${KRELL_ROOT_COMPUTE} \
+ # -DPAPI_DIR=${PAPI_ROOT} \
+ # -DDYNINST_DIR=${DYNINST_CN_ROOT} \
+ # -DMRNET_DIR=${MRNET_INSTALL_PREFIX} \
+ # -DMPICH2_DIR=/opt/cray/mpt/7.0.1/gni/mpich2-gnu/48
# else:
# with working_dir('build_cbtf_cray_frontend', create=True):
# python_vers='%d.%d' % spec['python'].version[:2]
# cmake .. \
- # -DCMAKE_BUILD_TYPE=Debug \
- # -DCMAKE_INSTALL_PREFIX=${CBTF_KRELL_PREFIX} \
- # -DCMAKE_PREFIX_PATH=${CBTF_ROOT} \
- # -DCBTF_DIR=${CBTF_ROOT} \
- # -DRUNTIME_TARGET_OS="cray" \
- # -DCBTF_KRELL_CN_RUNTIME_DIR=${CBTF_KRELL_CN_RUNTIME_ROOT} \
- # -DCBTF_CN_RUNTIME_DIR=${CBTF_CN_RUNTIME_ROOT} \
- # -DLIBMONITOR_CN_RUNTIME_DIR=${LIBMONITOR_CN_ROOT} \
- # -DLIBUNWIND_CN_RUNTIME_DIR=${LIBUNWIND_CN_ROOT} \
- # -DPAPI_CN_RUNTIME_DIR=${PAPI_CN_ROOT} \
- # -DXERCESC_CN_RUNTIME_DIR=/${XERCESC_CN_ROOT} \
- # -DMRNET_CN_RUNTIME_DIR=${MRNET_CN_ROOT} \
- # -DBOOST_CN_RUNTIME_DIR=${BOOST_CN_ROOT} \
- # -DDYNINST_CN_RUNTIME_DIR=${DYNINST_CN_ROOT} \
- # -DBOOST_ROOT=/${KRELL_ROOT} \
- # -DXERCESC_DIR=/${KRELL_ROOT} \
- # -DBINUTILS_DIR=/${KRELL_ROOT} \
- # -DLIBMONITOR_DIR=${KRELL_ROOT} \
- # -DLIBUNWIND_DIR=${KRELL_ROOT} \
- # -DPAPI_DIR=${PAPI_ROOT} \
- # -DDYNINST_DIR=${KRELL_ROOT} \
- # -DMRNET_DIR=${KRELL_ROOT} \
- # -DMPICH2_DIR=/opt/cray/mpt/7.0.1/gni/mpich2-gnu/48
+ # -DCMAKE_BUILD_TYPE=Debug \
+ # -DCMAKE_INSTALL_PREFIX=${CBTF_KRELL_PREFIX} \
+ # -DCMAKE_PREFIX_PATH=${CBTF_ROOT} \
+ # -DCBTF_DIR=${CBTF_ROOT} \
+ # -DRUNTIME_TARGET_OS="cray" \
+ # -DCBTF_KRELL_CN_RUNTIME_DIR=${CBTF_KRELL_CN_RUNTIME_ROOT} \
+ # -DCBTF_CN_RUNTIME_DIR=${CBTF_CN_RUNTIME_ROOT} \
+ # -DLIBMONITOR_CN_RUNTIME_DIR=${LIBMONITOR_CN_ROOT} \
+ # -DLIBUNWIND_CN_RUNTIME_DIR=${LIBUNWIND_CN_ROOT} \
+ # -DPAPI_CN_RUNTIME_DIR=${PAPI_CN_ROOT} \
+ # -DXERCESC_CN_RUNTIME_DIR=/${XERCESC_CN_ROOT} \
+ # -DMRNET_CN_RUNTIME_DIR=${MRNET_CN_ROOT} \
+ # -DBOOST_CN_RUNTIME_DIR=${BOOST_CN_ROOT} \
+ # -DDYNINST_CN_RUNTIME_DIR=${DYNINST_CN_ROOT} \
+ # -DBOOST_ROOT=/${KRELL_ROOT} \
+ # -DXERCESC_DIR=/${KRELL_ROOT} \
+ # -DBINUTILS_DIR=/${KRELL_ROOT} \
+ # -DLIBMONITOR_DIR=${KRELL_ROOT} \
+ # -DLIBUNWIND_DIR=${KRELL_ROOT} \
+ # -DPAPI_DIR=${PAPI_ROOT} \
+ # -DDYNINST_DIR=${KRELL_ROOT} \
+ # -DMRNET_DIR=${KRELL_ROOT} \
+ # -DMPICH2_DIR=/opt/cray/mpt/7.0.1/gni/mpich2-gnu/48
# fi
#
# make("clean")
@@ -264,22 +265,22 @@ class CbtfKrell(Package):
# fi
#
# else:
-# # Build cbtf-krell with cmake
+# # Build cbtf-krell with cmake
# with working_dir('build_cbtf_krell', create=True):
# cmake('..',
# '-DCMAKE_BUILD_TYPE=Debug',
-# '-DCMAKE_INSTALL_PREFIX=%s' % prefix,
-# '-DCBTF_DIR=%s' % spec['cbtf'].prefix,
-# '-DBINUTILS_DIR=%s' % spec['binutils'].prefix,
-# '-DLIBMONITOR_DIR=%s' % spec['libmonitor'].prefix,
-# '-DLIBUNWIND_DIR=%s' % spec['libunwind'].prefix,
-# '-DPAPI_DIR=%s' % spec['papi'].prefix,
-# '-DBOOST_DIR=%s' % spec['boost'].prefix,
-# '-DMRNET_DIR=%s' % spec['mrnet'].prefix,
-# '-DDYNINST_DIR=%s' % spec['dyninst'].prefix,
-# '-DXERCESC_DIR=%s' % spec['xerces-c'].prefix,
-# '-DOPENMPI_DIR=%s' % openmpi_prefix_path,
-# '-DCMAKE_PREFIX_PATH=%s' % cmake_prefix_path,
+# '-DCMAKE_INSTALL_PREFIX=%s' % prefix,
+# '-DCBTF_DIR=%s' % spec['cbtf'].prefix,
+# '-DBINUTILS_DIR=%s' % spec['binutils'].prefix,
+# '-DLIBMONITOR_DIR=%s' % spec['libmonitor'].prefix,
+# '-DLIBUNWIND_DIR=%s'% spec['libunwind'].prefix,
+# '-DPAPI_DIR=%s' % spec['papi'].prefix,
+# '-DBOOST_DIR=%s' % spec['boost'].prefix,
+# '-DMRNET_DIR=%s' % spec['mrnet'].prefix,
+# '-DDYNINST_DIR=%s' % spec['dyninst'].prefix,
+# '-DXERCESC_DIR=%s' % spec['xerces-c'].prefix,
+# '-DOPENMPI_DIR=%s' % openmpi_prefix_path,
+# '-DCMAKE_PREFIX_PATH=%s' % cmake_prefix_path,
# *std_cmake_args)
#
# make("clean")
diff --git a/var/spack/repos/builtin/packages/cbtf-lanl/package.py b/var/spack/repos/builtin/packages/cbtf-lanl/package.py
index fa7de3d4a3..1545c7bf8b 100644
--- a/var/spack/repos/builtin/packages/cbtf-lanl/package.py
+++ b/var/spack/repos/builtin/packages/cbtf-lanl/package.py
@@ -22,7 +22,7 @@
# License along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
-################################################################################
+##########################################################################
# Copyright (c) 2015-2016 Krell Institute. All Rights Reserved.
#
# This program is free software; you can redistribute it and/or modify it under
@@ -38,24 +38,26 @@
# You should have received a copy of the GNU General Public License along with
# this program; if not, write to the Free Software Foundation, Inc., 59 Temple
# Place, Suite 330, Boston, MA 02111-1307 USA
-################################################################################
+##########################################################################
from spack import *
+
class CbtfLanl(Package):
- """CBTF LANL project contains a memory tool and data center type system command monitoring tool."""
+ """CBTF LANL project contains a memory tool and data center type system
+ command monitoring tool."""
homepage = "http://sourceforge.net/p/cbtf/wiki/Home/"
-
# Mirror access template example
- #url = "file:/g/g24/jeg/cbtf-lanl-1.5.tar.gz"
- #version('1.5', 'c3f78f967b0a42c6734ce4be0e602426')
+ # url = "file:/g/g24/jeg/cbtf-lanl-1.6.tar.gz"
+ # version('1.6', 'c3f78f967b0a42c6734ce4be0e602426')
- version('1.6', branch='master', git='http://git.code.sf.net/p/cbtf-lanl/cbtf-lanl')
+ version('1.8', branch='master',
+ git='http://git.code.sf.net/p/cbtf-lanl/cbtf-lanl')
- depends_on("cmake@3.0.2")
+ depends_on("cmake@3.0.2:", type='build')
# Dependencies for cbtf-krell
- depends_on("mrnet@5.0.1:+lwthreads+krellpatch")
+ depends_on("mrnet@5.0.1:+lwthreads")
depends_on("xerces-c@3.1.1:")
depends_on("cbtf")
depends_on("cbtf-krell")
@@ -63,11 +65,13 @@ class CbtfLanl(Package):
parallel = False
def adjustBuildTypeParams_cmakeOptions(self, spec, cmakeOptions):
- # Sets build type parameters into cmakeOptions the options that will enable the cbtf-krell built type settings
+ # Sets build type parameters into cmakeOptions the options that will
+ # enable the cbtf-krell built type settings
- compile_flags="-O2 -g"
+ compile_flags = "-O2 -g"
BuildTypeOptions = []
- # Set CMAKE_BUILD_TYPE to what cbtf-krell wants it to be, not the stdcmakeargs
+ # Set CMAKE_BUILD_TYPE to what cbtf-krell wants it to be, not the
+ # stdcmakeargs
for word in cmakeOptions[:]:
if word.startswith('-DCMAKE_BUILD_TYPE'):
cmakeOptions.remove(word)
@@ -78,40 +82,43 @@ class CbtfLanl(Package):
if word.startswith('-DCMAKE_VERBOSE_MAKEFILE'):
cmakeOptions.remove(word)
BuildTypeOptions.extend([
- '-DCMAKE_VERBOSE_MAKEFILE=ON',
- '-DCMAKE_BUILD_TYPE=None',
- '-DCMAKE_CXX_FLAGS=%s' % compile_flags,
- '-DCMAKE_C_FLAGS=%s' % compile_flags
+ '-DCMAKE_VERBOSE_MAKEFILE=ON',
+ '-DCMAKE_BUILD_TYPE=None',
+ '-DCMAKE_CXX_FLAGS=%s' % compile_flags,
+ '-DCMAKE_C_FLAGS=%s' % compile_flags
])
cmakeOptions.extend(BuildTypeOptions)
def install(self, spec, prefix):
- # Add in paths for finding package config files that tell us where to find these packages
- cmake_prefix_path = join_path(spec['cbtf'].prefix) + ':' + join_path(spec['cbtf-krell'].prefix)
-
- with working_dir('build', create=True):
- cmakeOptions = []
- cmakeOptions.extend(['-DCMAKE_INSTALL_PREFIX=%s' % prefix,
- '-DCBTF_DIR=%s' % spec['cbtf'].prefix,
- '-DCBTF_KRELL_DIR=%s' % spec['cbtf-krell'].prefix,
- '-DMRNET_DIR=%s' % spec['mrnet'].prefix,
- '-DXERCESC_DIR=%s' % spec['xerces-c'].prefix,
- '-DCMAKE_PREFIX_PATH=%s' % cmake_prefix_path,
- '-DCMAKE_MODULE_PATH=%s' % join_path(prefix.share,'KrellInstitute','cmake')
- ])
-
- # Add in the standard cmake arguments
- cmakeOptions.extend(std_cmake_args)
-
- # Adjust the standard cmake arguments to what we want the build type, etc to be
- self.adjustBuildTypeParams_cmakeOptions(spec, cmakeOptions)
-
- # Invoke cmake
- cmake('..', *cmakeOptions)
-
- make("clean")
- make()
- make("install")
-
+ # Add in paths for finding package config files that tell us where to
+ # find these packages
+ cmake_prefix_path = join_path(
+ spec['cbtf'].prefix) + ':' + join_path(spec['cbtf-krell'].prefix)
+
+ with working_dir('build', create=True):
+ cmakeOptions = []
+ cmakeOptions.extend(
+ ['-DCMAKE_INSTALL_PREFIX=%s' % prefix,
+ '-DCBTF_DIR=%s' % spec['cbtf'].prefix,
+ '-DCBTF_KRELL_DIR=%s' % spec['cbtf-krell'].prefix,
+ '-DMRNET_DIR=%s' % spec['mrnet'].prefix,
+ '-DXERCESC_DIR=%s' % spec['xerces-c'].prefix,
+ '-DCMAKE_PREFIX_PATH=%s' % cmake_prefix_path,
+ '-DCMAKE_MODULE_PATH=%s' % join_path(
+ prefix.share, 'KrellInstitute', 'cmake')])
+
+ # Add in the standard cmake arguments
+ cmakeOptions.extend(std_cmake_args)
+
+ # Adjust the standard cmake arguments to what we want the build
+ # type, etc to be
+ self.adjustBuildTypeParams_cmakeOptions(spec, cmakeOptions)
+
+ # Invoke cmake
+ cmake('..', *cmakeOptions)
+
+ make("clean")
+ make()
+ make("install")
diff --git a/var/spack/repos/builtin/packages/cbtf/package.py b/var/spack/repos/builtin/packages/cbtf/package.py
index 2231cf1d45..7c9626c90e 100644
--- a/var/spack/repos/builtin/packages/cbtf/package.py
+++ b/var/spack/repos/builtin/packages/cbtf/package.py
@@ -22,7 +22,7 @@
# License along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
-################################################################################
+##########################################################################
# Copyright (c) 2015-2016 Krell Institute. All Rights Reserved.
#
# This program is free software; you can redistribute it and/or modify it under
@@ -38,28 +38,34 @@
# You should have received a copy of the GNU General Public License along with
# this program; if not, write to the Free Software Foundation, Inc., 59 Temple
# Place, Suite 330, Boston, MA 02111-1307 USA
-################################################################################
+##########################################################################
from spack import *
+
class Cbtf(Package):
- """CBTF project contains the base code for CBTF that supports creating components,
- component networks and the support to connect these components and component
- networks into sequential and distributed network tools."""
+ """CBTF project contains the base code for CBTF that supports creating
+ components, component networks and the support to connect these
+ components and component networks into sequential and distributed
+ network tools.
+
+ """
homepage = "http://sourceforge.net/p/cbtf/wiki/Home"
# Mirror access template example
- #url = "file:/home/jeg/cbtf-1.6.tar.gz"
- #version('1.6', 'c1ef4e5aa4e470dffb042abdba0b9987')
+ # url = "file:/home/jeg/cbtf-1.6.tar.gz"
+ # version('1.6', 'c1ef4e5aa4e470dffb042abdba0b9987')
# Use when the git repository is available
- version('1.6', branch='master', git='https://github.com/OpenSpeedShop/cbtf.git')
+ version('1.8', branch='master',
+ git='https://github.com/OpenSpeedShop/cbtf.git')
- variant('runtime', default=False, description="build only the runtime libraries and collectors.")
+ variant('runtime', default=False,
+ description="build only the runtime libraries and collectors.")
- depends_on("cmake@3.0.2")
- depends_on("boost@1.50.0:")
- depends_on("mrnet@5.0.1:+lwthreads+krellpatch")
+ depends_on("cmake@3.0.2:", type='build')
+ depends_on("boost@1.50.0:1.59.0")
+ depends_on("mrnet@5.0.1:+lwthreads")
depends_on("xerces-c@3.1.1:")
# Work around for spack libxml2 package bug, take off python when fixed
depends_on("libxml2+python")
@@ -67,11 +73,13 @@ class Cbtf(Package):
parallel = False
def adjustBuildTypeParams_cmakeOptions(self, spec, cmakeOptions):
- # Sets build type parameters into cmakeOptions the options that will enable the cbtf-krell built type settings
-
- compile_flags="-O2 -g"
+ # Sets build type parameters into cmakeOptions the options that will
+ # enable the cbtf-krell built type settings
+
+ compile_flags = "-O2 -g"
BuildTypeOptions = []
- # Set CMAKE_BUILD_TYPE to what cbtf-krell wants it to be, not the stdcmakeargs
+ # Set CMAKE_BUILD_TYPE to what cbtf-krell wants it to be, not the
+ # stdcmakeargs
for word in cmakeOptions[:]:
if word.startswith('-DCMAKE_BUILD_TYPE'):
cmakeOptions.remove(word)
@@ -80,61 +88,66 @@ class Cbtf(Package):
if word.startswith('-DCMAKE_C_FLAGS'):
cmakeOptions.remove(word)
BuildTypeOptions.extend([
- '-DCMAKE_BUILD_TYPE=None',
- '-DCMAKE_CXX_FLAGS=%s' % compile_flags,
- '-DCMAKE_C_FLAGS=%s' % compile_flags
+ '-DCMAKE_BUILD_TYPE=None',
+ '-DCMAKE_CXX_FLAGS=%s' % compile_flags,
+ '-DCMAKE_C_FLAGS=%s' % compile_flags
])
cmakeOptions.extend(BuildTypeOptions)
def install(self, spec, prefix):
- with working_dir('build', create=True):
-
- # Boost_NO_SYSTEM_PATHS Set to TRUE to suppress searching
- # in system paths (or other locations outside of BOOST_ROOT
- # or BOOST_INCLUDEDIR). Useful when specifying BOOST_ROOT.
- # Defaults to OFF.
-
- if '+runtime' in spec:
- # Install message tag include file for use in Intel MIC cbtf-krell build
- # FIXME
- cmakeOptions = []
- cmakeOptions.extend(['-DCMAKE_INSTALL_PREFIX=%s' % prefix,
- '-DBoost_NO_SYSTEM_PATHS=TRUE',
- '-DXERCESC_DIR=%s' % spec['xerces-c'].prefix,
- '-DBOOST_ROOT=%s' % spec['boost'].prefix,
- '-DMRNET_DIR=%s' % spec['mrnet'].prefix,
- '-DCMAKE_MODULE_PATH=%s' % join_path(prefix.share,'KrellInstitute','cmake')
- ])
-
- # Add in the standard cmake arguments
- cmakeOptions.extend(std_cmake_args)
-
- # Adjust the standard cmake arguments to what we want the build type, etc to be
- self.adjustBuildTypeParams_cmakeOptions(spec, cmakeOptions)
-
- # Invoke cmake
- cmake('..', *cmakeOptions)
-
- else:
- cmakeOptions = []
- cmakeOptions.extend(['-DCMAKE_INSTALL_PREFIX=%s' % prefix,
- '-DBoost_NO_SYSTEM_PATHS=TRUE',
- '-DXERCESC_DIR=%s' % spec['xerces-c'].prefix,
- '-DBOOST_ROOT=%s' % spec['boost'].prefix,
- '-DMRNET_DIR=%s' % spec['mrnet'].prefix,
- '-DCMAKE_MODULE_PATH=%s' % join_path(prefix.share,'KrellInstitute','cmake')
- ])
-
- # Add in the standard cmake arguments
- cmakeOptions.extend(std_cmake_args)
-
- # Adjust the standard cmake arguments to what we want the build type, etc to be
- self.adjustBuildTypeParams_cmakeOptions(spec, cmakeOptions)
-
- # Invoke cmake
- cmake('..', *cmakeOptions)
-
- make("clean")
- make()
- make("install")
+ with working_dir('build', create=True):
+
+ # Boost_NO_SYSTEM_PATHS Set to TRUE to suppress searching
+ # in system paths (or other locations outside of BOOST_ROOT
+ # or BOOST_INCLUDEDIR). Useful when specifying BOOST_ROOT.
+ # Defaults to OFF.
+
+ if '+runtime' in spec:
+ # Install message tag include file for use in Intel MIC
+ # cbtf-krell build
+ # FIXME
+ cmakeOptions = []
+ cmakeOptions.extend(
+ ['-DCMAKE_INSTALL_PREFIX=%s' % prefix,
+ '-DBoost_NO_SYSTEM_PATHS=TRUE',
+ '-DXERCESC_DIR=%s' % spec['xerces-c'].prefix,
+ '-DBOOST_ROOT=%s' % spec['boost'].prefix,
+ '-DMRNET_DIR=%s' % spec['mrnet'].prefix,
+ '-DCMAKE_MODULE_PATH=%s' % join_path(
+ prefix.share, 'KrellInstitute', 'cmake')])
+
+ # Add in the standard cmake arguments
+ cmakeOptions.extend(std_cmake_args)
+
+ # Adjust the standard cmake arguments to what we want the build
+ # type, etc to be
+ self.adjustBuildTypeParams_cmakeOptions(spec, cmakeOptions)
+
+ # Invoke cmake
+ cmake('..', *cmakeOptions)
+
+ else:
+ cmakeOptions = []
+ cmakeOptions.extend(
+ ['-DCMAKE_INSTALL_PREFIX=%s' % prefix,
+ '-DBoost_NO_SYSTEM_PATHS=TRUE',
+ '-DXERCESC_DIR=%s' % spec['xerces-c'].prefix,
+ '-DBOOST_ROOT=%s' % spec['boost'].prefix,
+ '-DMRNET_DIR=%s' % spec['mrnet'].prefix,
+ '-DCMAKE_MODULE_PATH=%s' % join_path(
+ prefix.share, 'KrellInstitute', 'cmake')])
+
+ # Add in the standard cmake arguments
+ cmakeOptions.extend(std_cmake_args)
+
+ # Adjust the standard cmake arguments to what we want the build
+ # type, etc to be
+ self.adjustBuildTypeParams_cmakeOptions(spec, cmakeOptions)
+
+ # Invoke cmake
+ cmake('..', *cmakeOptions)
+
+ make("clean")
+ make()
+ make("install")
diff --git a/var/spack/repos/builtin/packages/ccache/package.py b/var/spack/repos/builtin/packages/ccache/package.py
new file mode 100644
index 0000000000..69b590629e
--- /dev/null
+++ b/var/spack/repos/builtin/packages/ccache/package.py
@@ -0,0 +1,44 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class Ccache(AutotoolsPackage):
+ """ccache is a compiler cache. It speeds up recompilation by caching
+ previous compilations and detecting when the same compilation is being done
+ again."""
+
+ homepage = "https://ccache.samba.org/"
+ url = "https://www.samba.org/ftp/ccache/ccache-3.3.3.tar.gz"
+
+ version('3.3.3', 'ea1f95303749b9ac136c617d1b333eef')
+ version('3.3.2', 'b966d56603e1fad2bac22930e5f01830')
+ version('3.3.1', '7102ef024cff09d353b3f4c48379b40b')
+ version('3.3' , 'b7ac8fdd556f93831618483325fbb1ef')
+ version('3.2.9', '8f3f6e15e75a0e6020166927d41bd0b3')
+
+ depends_on('gperf')
+ depends_on('libxslt')
+ depends_on('zlib')
diff --git a/var/spack/repos/builtin/packages/cdd/Makefile.spack.patch b/var/spack/repos/builtin/packages/cdd/Makefile.spack.patch
new file mode 100644
index 0000000000..4c97187a57
--- /dev/null
+++ b/var/spack/repos/builtin/packages/cdd/Makefile.spack.patch
@@ -0,0 +1,22 @@
+--- old/Makefile.spack
++++ new/Makefile.spack
+@@ -0,0 +1,19 @@
++# Set PREFIX to the install location for both building and installing
++
++all: cdd dplex_test
++
++cdd: cdd.lo cddio.lo cddarith.lo dplex.lo setoper.lo
++ libtool --mode=link --tag=CC cc -g -O2 -o $@ $^
++
++dplex_test: dplex.lo dplex_test.lo setoper.lo
++ libtool --mode=link --tag=CC cc -g -O2 -o $@ $^
++
++%.lo: %.c
++ libtool --mode=compile --tag=CC cc -g -O2 -c $*.c
++
++install:
++ mkdir -p $(PREFIX)/bin
++ libtool --mode=install cp cdd $(PREFIX)/bin/cdd
++ libtool --mode=install cp dplex_test $(PREFIX)/bin/dplex_test
++
++.PHONY: all install
diff --git a/var/spack/repos/builtin/packages/cdd/package.py b/var/spack/repos/builtin/packages/cdd/package.py
new file mode 100644
index 0000000000..4a0a0aefef
--- /dev/null
+++ b/var/spack/repos/builtin/packages/cdd/package.py
@@ -0,0 +1,52 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+
+from spack import *
+
+
+class Cdd(Package):
+ """The program cdd+ (cdd, respectively) is a C++ (ANSI C)
+ implementation of the Double Description Method [MRTT53] for
+ generating all vertices (i.e. extreme points) and extreme rays of
+ a general convex polyhedron given by a system of linear
+ inequalities"""
+ homepage = "https://www.inf.ethz.ch/personal/fukudak/cdd_home/cdd.html"
+ url = "ftp://ftp.ifor.math.ethz.ch/pub/fukuda/cdd/cdd-061a.tar.gz"
+
+ version('0.61a', '22c24a7a9349dd7ec0e24531925a02d9')
+
+ depends_on("libtool", type="build")
+
+ patch("Makefile.spack.patch")
+
+ def url_for_version(self, version):
+ url = "ftp://ftp.ifor.math.ethz.ch/pub/fukuda/cdd/cdd-{0}.tar.gz"
+ return url.format(version.joined)
+
+ def install(self, spec, prefix):
+ # The Makefile isn't portable; use our own instead
+ makeargs = ["-f", "Makefile.spack", "PREFIX=%s" % prefix]
+ make(*makeargs)
+ make("install", *makeargs)
diff --git a/var/spack/repos/builtin/packages/cddlib/package.py b/var/spack/repos/builtin/packages/cddlib/package.py
new file mode 100644
index 0000000000..ced5f46d1f
--- /dev/null
+++ b/var/spack/repos/builtin/packages/cddlib/package.py
@@ -0,0 +1,58 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+
+from spack import *
+
+
+class Cddlib(Package):
+ """The C-library cddlib is a C implementation of the Double Description
+ Method of Motzkin et al. for generating all vertices (i.e. extreme points)
+ and extreme rays of a general convex polyhedron in R^d given by a system
+ of linear inequalities"""
+ homepage = "https://www.inf.ethz.ch/personal/fukudak/cdd_home/"
+ # This is the original download url. It is currently down [2016-08-23],
+ # but should be reinstated or updated once the issue is resolved.
+ # url = "ftp://ftp.ifor.math.ethz.ch/pub/fukuda/cdd/cddlib-094h.tar.gz"
+ url = "http://pkgs.fedoraproject.org/lookaside/pkgs/cddlib/cddlib-094h.tar.gz/1467d270860bbcb26d3ebae424690e7c/cddlib-094h.tar.gz"
+
+ def url_for_version(self, version):
+ # Since the commit id is part of the version, we can't
+ # auto-generate the string, and we need to explicitly list all
+ # known versions here. Currently, there is only one version.
+ if str(version) == '0.94h':
+ return "http://pkgs.fedoraproject.org/lookaside/pkgs/cddlib/cddlib-094h.tar.gz/1467d270860bbcb26d3ebae424690e7c/cddlib-094h.tar.gz"
+ raise InstallError("Unsupported version %s" % str(version))
+
+ version('0.94h', '1467d270860bbcb26d3ebae424690e7c')
+
+ # Note: It should be possible to build cddlib also without gmp
+
+ depends_on("gmp")
+ depends_on("libtool", type="build")
+
+ def install(self, spec, prefix):
+ configure("--prefix=%s" % prefix)
+ make()
+ make("install")
diff --git a/var/spack/repos/builtin/packages/cdo/package.py b/var/spack/repos/builtin/packages/cdo/package.py
new file mode 100644
index 0000000000..775dc31cf3
--- /dev/null
+++ b/var/spack/repos/builtin/packages/cdo/package.py
@@ -0,0 +1,117 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class Cdo(Package):
+ """CDO is a collection of command line Operators to manipulate and analyse
+ Climate and NWP model Data. """
+
+ homepage = "https://code.zmaw.de/projects/cdo"
+
+ version('1.7.2', 'f08e4ce8739a4f2b63fc81a24db3ee31', url='https://code.zmaw.de/attachments/download/12760/cdo-1.7.2.tar.gz')
+ version('1.6.9', 'bf0997bf20e812f35e10188a930e24e2', url='https://code.zmaw.de/attachments/download/10198/cdo-1.6.9.tar.gz')
+
+ variant('szip', default=True, description='Enable szip compression for GRIB1')
+ variant('hdf5', default=False, description='Enable HDF5 support')
+ variant('netcdf', default=True, description='Enable NetCDF support')
+ variant('udunits2', default=True, description='Enable UDUNITS2 support')
+ variant('grib', default=True, description='Enable GRIB_API support')
+ variant('libxml2', default=True, description='Enable libxml2 support')
+ variant('proj', default=True, description='Enable PROJ library for cartographic projections')
+ variant('curl', default=True, description='Enable curl support')
+ variant('fftw', default=True, description='Enable support for fftw3')
+ variant('magics', default=True, description='Enable Magics library support')
+
+ depends_on('szip', when='+szip')
+ depends_on('netcdf', when='+netcdf')
+ depends_on('hdf5+threadsafe', when='+hdf5')
+ depends_on('udunits2', when='+udunits2')
+ depends_on('grib-api', when='+grib')
+ depends_on('libxml2', when='+libxml2')
+ depends_on('proj', when='+proj')
+ depends_on('curl', when='+curl')
+ depends_on('fftw', when='+fftw')
+ depends_on('magics', when='+magics')
+
+ def install(self, spec, prefix):
+ config_args = ["--prefix=" + prefix,
+ "--enable-shared",
+ "--enable-static"]
+
+ if '+szip' in spec:
+ config_args.append('--with-szlib=' + spec['szip'].prefix)
+ else:
+ config_args.append('--without-szlib')
+
+ if '+hdf5' in spec:
+ config_args.append('--with-hdf5=' + spec['hdf5'].prefix)
+ else:
+ config_args.append('--without-hdf5')
+
+ if '+netcdf' in spec:
+ config_args.append('--with-netcdf=' + spec['netcdf'].prefix)
+ else:
+ config_args.append('--without-netcdf')
+
+ if '+udunits2' in spec:
+ config_args.append('--with-udunits2=' + spec['udunits2'].prefix)
+ else:
+ config_args.append('--without-udunits2')
+
+ if '+grib' in spec:
+ config_args.append('--with-grib_api=' + spec['grib-api'].prefix)
+ else:
+ config_args.append('--without-grib_api')
+
+ if '+libxml2' in spec:
+ config_args.append('--with-libxml2=' + spec['libxml2'].prefix)
+ else:
+ config_args.append('--without-libxml2')
+
+ if '+proj' in spec:
+ config_args.append('--with-proj=' + spec['proj'].prefix)
+ else:
+ config_args.append('--without-proj')
+
+ if '+curl' in spec:
+ config_args.append('--with-curl=' + spec['curl'].prefix)
+ else:
+ config_args.append('--without-curl')
+
+ if '+fftw' in spec:
+ config_args.append('--with-fftw3')
+ else:
+ config_args.append('--without-fftw3')
+
+ if '+magics' in spec:
+ config_args.append('--with-magics=' + spec['magics'].prefix)
+ else:
+ config_args.append('--without-magics')
+
+ configure(*config_args)
+
+ make()
+ make('install')
diff --git a/var/spack/repos/builtin/packages/cereal/package.py b/var/spack/repos/builtin/packages/cereal/package.py
index c53c667efb..0ce6ec593c 100644
--- a/var/spack/repos/builtin/packages/cereal/package.py
+++ b/var/spack/repos/builtin/packages/cereal/package.py
@@ -26,11 +26,21 @@ from spack import *
import os
import shutil
+
class Cereal(Package):
- """cereal is a header-only C++11 serialization library. cereal takes arbitrary data types and reversibly turns them into different representations, such as compact binary encodings, XML, or JSON. cereal was designed to be fast, light-weight, and easy to extend - it has no external dependencies and can be easily bundled with other code or used standalone."""
+ """cereal is a header-only C++11 serialization library. cereal takes
+ arbitrary data types and reversibly turns them into different
+ representations, such as compact binary encodings, XML, or
+ JSON. cereal was designed to be fast, light-weight, and easy to
+ extend - it has no external dependencies and can be easily bundled
+ with other code or used standalone.
+
+ """
homepage = "http://uscilab.github.io/cereal/"
url = "https://github.com/USCiLab/cereal/archive/v1.1.2.tar.gz"
+ version('1.2.1', '64476ed74c19068ee543b53ad3992261')
+ version('1.2.0', 'e372c9814696481dbdb7d500e1410d2b')
version('1.1.2', '34d4ad174acbff005c36d4d10e48cbb9')
version('1.1.1', '0ceff308c38f37d5b5f6df3927451c27')
version('1.1.0', '9f2d5f72e935c54f4c6d23e954ce699f')
@@ -39,7 +49,7 @@ class Cereal(Package):
patch("Werror.patch")
- depends_on("cmake @2.6.2:")
+ depends_on('cmake@2.6.2:', type='build')
def install(self, spec, prefix):
# Don't use -Werror
diff --git a/var/spack/repos/builtin/packages/cfitsio/package.py b/var/spack/repos/builtin/packages/cfitsio/package.py
index ed49ae5808..79af31ae21 100644
--- a/var/spack/repos/builtin/packages/cfitsio/package.py
+++ b/var/spack/repos/builtin/packages/cfitsio/package.py
@@ -24,17 +24,19 @@
##############################################################################
from spack import *
+
class Cfitsio(Package):
- """
- CFITSIO is a library of C and Fortran subroutines for reading and writing
+ """CFITSIO is a library of C and Fortran subroutines for reading and writing
data files in FITS (Flexible Image Transport System) data format.
"""
+
homepage = 'http://heasarc.gsfc.nasa.gov/fitsio/'
+
version('3.370', 'abebd2d02ba5b0503c633581e3bfa116')
- def url_for_version(self, v):
+ def url_for_version(self, version):
url = 'ftp://heasarc.gsfc.nasa.gov/software/fitsio/c/cfitsio{0}.tar.gz'
- return url.format(str(v).replace('.', ''))
+ return url.format(version.joined)
def install(self, spec, prefix):
configure('--prefix=' + prefix)
diff --git a/var/spack/repos/builtin/packages/cgal/package.py b/var/spack/repos/builtin/packages/cgal/package.py
index ea1b20e34f..a16572246b 100644
--- a/var/spack/repos/builtin/packages/cgal/package.py
+++ b/var/spack/repos/builtin/packages/cgal/package.py
@@ -22,53 +22,87 @@
# License along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
-
from spack import *
class Cgal(Package):
- """
- CGAL is a software project that provides easy access to efficient and reliable geometric algorithms in the form of
- a C++ library. CGAL is used in various areas needing geometric computation, such as geographic information systems,
- computer aided design, molecular biology, medical imaging, computer graphics, and robotics.
+ """The Computational Geometry Algorithms Library (CGAL) is a C++ library
+ that aims to provide easy access to efficient and reliable algorithms in
+ computational geometry. CGAL is used in various areas needing geometric
+ computation, such as geographic information systems, computer aided design,
+ molecular biology, medical imaging, computer graphics, and robotics.
"""
homepage = 'http://www.cgal.org/'
url = 'https://github.com/CGAL/cgal/archive/releases/CGAL-4.7.tar.gz'
+ version('4.9', '7b628db3e5614347f776c046b7666089')
version('4.7', '4826714810f3b4c65cac96b90fb03b67')
version('4.6.3', 'e8ee2ecc8d2b09b94a121c09257b576d')
- # Installation instructions : http://doc.cgal.org/latest/Manual/installation.html
- variant('shared', default=True, description='Enables the build of shared libraries')
- variant('debug', default=False, description='Builds a debug version of the libraries')
+ variant('shared', default=True,
+ description='Enables the build of shared libraries')
+ variant('debug', default=False,
+ description='Builds a debug version of the libraries')
- depends_on('boost')
- depends_on('mpfr')
+ # ---- See "7 CGAL Libraries" at:
+ # http://doc.cgal.org/latest/Manual/installation.html
+
+ # The CORE library provides exact arithmetic for geometric computations.
+ # See: http://cs.nyu.edu/exact/core_pages/
+ # http://cs.nyu.edu/exact/core_pages/svn-core.html
+ variant('core', default=False,
+ description='Build the CORE library for algebraic numbers')
+ variant('imageio', default=False,
+ description='Build utilities to read/write image files')
+ variant('demos', default=False,
+ description='Build CGAL demos')
+
+ # Essential Third Party Libraries
+ depends_on('boost+thread+system')
depends_on('gmp')
+ depends_on('mpfr')
+
+ # Required for CGAL_ImageIO
+ # depends_on('opengl', when='+imageio') # not yet in Spack
depends_on('zlib')
- depends_on('cmake')
- # FIXME : Qt5 dependency missing (needs Qt5 and OpenGL)
- # FIXME : Optional third party libraries missing
+ # Optional to build CGAL_Qt5 (demos)
+ # depends_on('opengl', when='+demos') # not yet in Spack
+ depends_on('qt@5:', when='+demos')
- def install(self, spec, prefix):
+ # Optional Third Party Libraries
+ # depends_on('leda')
+ # depends_on('mpfi')
+ # depends_on('rs')
+ # depends_on('rs3')
+ # depends_on('ntl')
+ # depends_on('eigen')
+ # depends_on('libqglviewer')
+ # depends_on('esbtl')
+ # depends_on('intel-tbb')
- options = []
- options.extend(std_cmake_args)
- # CGAL supports only Release and Debug build type. Any other build type will raise an error at configure time
- if '+debug' in spec:
- options.append('-DCMAKE_BUILD_TYPE:STRING=Debug')
- else:
- options.append('-DCMAKE_BUILD_TYPE:STRING=Release')
+ # Build dependencies
+ depends_on('cmake', type='build')
+
+ def install(self, spec, prefix):
+ # Installation instructions:
+ # http://doc.cgal.org/latest/Manual/installation.html
- if '+shared' in spec:
- options.append('-DBUILD_SHARED_LIBS:BOOL=ON')
- else:
- options.append('-DBUILD_SHARED_LIBS:BOOL=OFF')
+ options = std_cmake_args + [
+ # CGAL supports only Release and Debug build type. Any
+ # other build type will raise an error at configure time
+ '-DCMAKE_BUILD_TYPE:STRING=%s' %
+ ('Debug' if '+debug' in spec else 'Release'),
+ '-DBUILD_SHARED_LIBS:BOOL=%s' %
+ ('ON' if '+shared' in spec else 'OFF'),
+ '-DWITH_CGAL_Core:BOOL=%s' %
+ ('YES' if '+core' in spec else 'NO'),
+ '-DWITH_CGAL_ImageIO:BOOL=%s' %
+ ('YES' if '+imageio' in spec else 'NO'),
+ '-DWITH_CGAL_Qt5:BOOL=%s' %
+ ('YES' if '+demos' in spec else 'NO')]
- build_directory = join_path(self.stage.path, 'spack-build')
- source_directory = self.stage.source_path
- with working_dir(build_directory, create=True):
- cmake(source_directory, *options)
+ with working_dir('spack-build', create=True):
+ cmake('..', *options)
make()
- make("install")
+ make('install')
diff --git a/var/spack/repos/builtin/packages/cgm/package.py b/var/spack/repos/builtin/packages/cgm/package.py
index c5da72d25a..5a998d471c 100644
--- a/var/spack/repos/builtin/packages/cgm/package.py
+++ b/var/spack/repos/builtin/packages/cgm/package.py
@@ -24,6 +24,7 @@
##############################################################################
from spack import *
+
class Cgm(Package):
"""The Common Geometry Module, Argonne (CGMA) is a code library
which provides geometry functionality used for mesh generation and
@@ -33,7 +34,7 @@ class Cgm(Package):
version('13.1.1', '4e8dbc4ba8f65767b29f985f7a23b01f')
version('13.1.0', 'a6c7b22660f164ce893fb974f9cb2028')
- version('13.1' , '95f724bda04919fc76818a5b7bc0b4ed')
+ version('13.1', '95f724bda04919fc76818a5b7bc0b4ed')
depends_on("mpi")
@@ -42,7 +43,6 @@ class Cgm(Package):
'//\1',
'geom/parallel/CGMReadParallel.cpp')
-
def install(self, spec, prefix):
configure("--with-mpi",
"--prefix=%s" % prefix,
diff --git a/var/spack/repos/builtin/packages/cgns/package.py b/var/spack/repos/builtin/packages/cgns/package.py
new file mode 100644
index 0000000000..ba3fd7f821
--- /dev/null
+++ b/var/spack/repos/builtin/packages/cgns/package.py
@@ -0,0 +1,73 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class Cgns(Package):
+ """The CFD General Notation System (CGNS) provides a general, portable,
+ and extensible standard for the storage and retrieval of computational
+ fluid dynamics (CFD) analysis data."""
+
+ homepage = "http://cgns.github.io/"
+ url = "https://github.com/CGNS/CGNS/archive/v3.3.0.tar.gz"
+
+ version('3.3.0', '64e5e8d97144c1462bee9ea6b2a81d7f')
+
+ variant('hdf5', default=True, description='Enable HDF5 interface')
+
+ depends_on('cmake', type='build')
+ depends_on('hdf5', when='+hdf5')
+
+ def install(self, spec, prefix):
+ cmake_args = std_cmake_args[:]
+
+ if self.compiler.f77 and self.compiler.fc:
+ cmake_args.append('-DCGNS_ENABLE_FORTRAN=ON')
+ else:
+ cmake_args.append('-DCGNS_ENABLE_FORTRAN=OFF')
+
+ if '+hdf5' in spec:
+ cmake_args.extend([
+ '-DCGNS_ENABLE_HDF5=ON',
+ '-DHDF5_NEEDS_ZLIB=ON'
+ ])
+
+ if spec.satisfies('^hdf5+mpi'):
+ cmake_args.append('-DHDF5_NEEDS_MPI=ON')
+ else:
+ cmake_args.append('-DHDF5_NEEDS_MPI=OFF')
+
+ if spec.satisfies('^hdf5+szip'):
+ cmake_args.append('-DHDF5_NEEDS_SZIP=ON')
+ else:
+ cmake_args.append('-DHDF5_NEEDS_SZIP=OFF')
+ else:
+ cmake_args.append('-DCGNS_ENABLE_HDF5=OFF')
+
+ with working_dir('spack-build', create=True):
+ cmake('..', *cmake_args)
+
+ make()
+ make('install')
diff --git a/var/spack/repos/builtin/packages/charm/mpi.patch b/var/spack/repos/builtin/packages/charm/mpi.patch
new file mode 100644
index 0000000000..e909d5f876
--- /dev/null
+++ b/var/spack/repos/builtin/packages/charm/mpi.patch
@@ -0,0 +1,19 @@
+--- old/src/scripts/configure
++++ new/src/scripts/configure
+@@ -3293,10 +3293,16 @@
+ test_link "whether -lmpi" "ok" "no" "-lmpi"
+ if test $pass -eq 1
+ then
+ add_flag CMK_SYSLIBS='"$CMK_SYSLIBS -lmpi"' "mpi lib"
+ else
++ test_link "whether -lmpi -lmpi_cxx" "ok" "no" "-lmpi -lmpi_cxx"
++ if test $pass -eq 1
++ then
++ add_flag CMK_SYSLIBS='"$CMK_SYSLIBS -lmpi -lmpi_cxx"' "mpi lib"
++ else
+ echo "Error: can not find mpi library"
+ test_finish 1
++ fi
+ fi
+ fi
+ else
diff --git a/var/spack/repos/builtin/packages/charm/package.py b/var/spack/repos/builtin/packages/charm/package.py
new file mode 100644
index 0000000000..7072e37cd3
--- /dev/null
+++ b/var/spack/repos/builtin/packages/charm/package.py
@@ -0,0 +1,190 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+
+import os
+import platform
+import shutil
+import sys
+from spack import *
+
+
+class Charm(Package):
+ """Charm++ is a parallel programming framework in C++ supported by
+ an adaptive runtime system, which enhances user productivity and
+ allows programs to run portably from small multicore computers
+ (your laptop) to the largest supercomputers."""
+
+ homepage = "http://charmplusplus.org"
+ url = "http://charm.cs.illinois.edu/distrib/charm-6.7.1.tar.gz"
+
+ version("6.7.1", "a8e20cf85e9c8721158f5bbd0ade48d9")
+ version("6.7.0", "35a39a7975f1954a7db2d76736158231")
+ version("6.6.1", "9554230f741e2599deaaac4d9d93d7ab")
+ version("6.6.0", "31e95901b3f7324d52107e6ad000fcc8")
+ version("6.5.1", "034d99458474a3ab96d8bede8a691a5d")
+
+ # Support OpenMPI; see
+ # <https://charm.cs.illinois.edu/redmine/issues/1206>
+ patch("mpi.patch")
+ # Ignore compiler warnings while configuring
+ patch("strictpass.patch")
+
+ # Communication mechanisms (choose exactly one)
+ # TODO: Support Blue Gene/Q PAMI, Cray GNI, Cray shmem, CUDA
+ variant("mpi", default=True,
+ description="Use MPI as communication mechanism")
+ variant("multicore", default=False,
+ description="Disable inter-node communication")
+ variant("net", default=False,
+ description="Use net communication mechanism")
+ variant("netlrts", default=True,
+ description="Use netlrts communication mechanism")
+ variant("verbs", default=False,
+ description="Use Infiniband as communication mechanism")
+
+ # Other options
+ # Something is off with PAPI -- there are build errors. Maybe
+ # Charm++ expects a particular version?
+ variant("papi", default=False, description="Enable PAPI integration")
+ variant("smp", default=True,
+ description=(
+ "Enable SMP parallelism (does not work with +multicore)"))
+ variant("tcp", default=False,
+ description="Use TCP as transport mechanism (requires +net)")
+
+ # Note: We could add variants for AMPI, LIBS, bigemulator, msa, Tau
+ # Note: We could support shared libraries
+
+ depends_on("mpi", when="+mpi")
+ depends_on("papi", when="+papi")
+
+ def install(self, spec, prefix):
+ target = "charm++"
+
+ # Note: Turn this into a multi-valued variant, once these
+ # exist in Spack
+ if sum(["+mpi" in spec,
+ "+multicore" in spec,
+ "+net" in spec,
+ "+netlrts" in spec,
+ "+verbs" in spec]) != 1:
+ raise InstallError(
+ "Exactly one communication mechanism "
+ "(+mpi, +multicore, +net, +netlrts, or +verbs) "
+ "must be enabled")
+ if "+mpi" in spec:
+ comm = "mpi"
+ if "+multicore" in spec:
+ comm = "multicore"
+ if "+net" in spec:
+ comm = "net"
+ if "+netlrts" in spec:
+ comm = "netlrts"
+ if "+verbs" in spec:
+ comm = "verbs"
+
+ plat = sys.platform
+ if plat.startswith("linux"):
+ plat = "linux"
+ mach = platform.machine()
+
+ # Define Charm++ version names for various (plat, mach, comm)
+ # combinations. Note that not all combinations are supported.
+ versions = {
+ ("darwin", "i386", "multicore"): "multicore-darwin-x86",
+ ("darwin", "i386", "net"): "net-darwin-x86",
+ ("darwin", "x86_64", "mpi"): "mpi-darwin-x86_64",
+ ("darwin", "x86_64", "multicore"): "multicore-darwin-x86_64",
+ ("darwin", "x86_64", "net"): "net-darwin-x86_64",
+ ("darwin", "x86_64", "netlrts"): "netlrts-darwin-x86_64",
+ ("linux", "i386", "mpi"): "mpi-linux",
+ ("linux", "i386", "multicore"): "multicore-linux32",
+ ("linux", "i386", "net"): "net-linux",
+ ("linux", "i386", "netlrts"): "netlrts-linux",
+ ("linux", "x86_64", "mpi"): "mpi-linux-x86_64",
+ ("linux", "x86_64", "multicore"): "multicore-linux64",
+ ("linux", "x86_64", "net"): "net-linux-x86_64",
+ ("linux", "x86_64", "netlrts"): "netlrts-linux-x86_64",
+ ("linux", "x86_64", "verbs"): "verbs-linux-x86_64",
+ }
+ if (plat, mach, comm) not in versions:
+ raise InstallError(
+ "The communication mechanism %s is not supported "
+ "on a %s platform with a %s CPU" %
+ (comm, plat, mach))
+ version = versions[(plat, mach, comm)]
+
+ # We assume that Spack's compiler wrappers make this work. If
+ # not, then we need to query the compiler vendor from Spack
+ # here.
+ compiler = "gcc"
+
+ options = [compiler,
+ "--with-production", # Note: turn this into a variant
+ "-j%d" % make_jobs,
+ "--destination=%s" % prefix]
+ if "+mpi" in spec:
+ options.append("--basedir=%s" % spec["mpi"].prefix)
+ if "+papi" in spec:
+ options.extend(["papi", "--basedir=%s" % spec["papi"].prefix])
+ if "+smp" in spec:
+ if "+multicore" in spec:
+ # This is a Charm++ limitation; it would lead to a
+ # build error
+ raise InstallError("Cannot combine +smp with +multicore")
+ options.append("smp")
+ if "+tcp" in spec:
+ if "+net" not in spec:
+ # This is a Charm++ limitation; it would lead to a
+ # build error
+ raise InstallError(
+ "The +tcp variant requires "
+ "the +net communication mechanism")
+ options.append("tcp")
+
+ # Call "make" via the build script
+ # Note: This builds Charm++ in the "tmp" subdirectory of the
+ # install directory. Maybe we could set up a symbolic link
+ # back to the build tree to prevent this? Alternatively, we
+ # could dissect the build script; the build instructions say
+ # this wouldn't be difficult.
+ build = Executable(join_path(".", "build"))
+ build(target, version, *options)
+
+ # Charm++'s install script does not copy files, it only creates
+ # symbolic links. Fix this.
+ for dirpath, dirnames, filenames in os.walk(prefix):
+ for filename in filenames:
+ filepath = join_path(dirpath, filename)
+ if os.path.islink(filepath):
+ tmppath = filepath + ".tmp"
+ # Skip dangling symbolic links
+ try:
+ shutil.copy2(filepath, tmppath)
+ os.remove(filepath)
+ os.rename(tmppath, filepath)
+ except:
+ pass
+ shutil.rmtree(join_path(prefix, "tmp"))
diff --git a/var/spack/repos/builtin/packages/charm/strictpass.patch b/var/spack/repos/builtin/packages/charm/strictpass.patch
new file mode 100644
index 0000000000..44aa4fbd38
--- /dev/null
+++ b/var/spack/repos/builtin/packages/charm/strictpass.patch
@@ -0,0 +1,16 @@
+--- old/src/scripts/configure
++++ new/src/scripts/configure
+@@ -2146,13 +2146,6 @@
+ test_result $? "$1" "$2" "$3"
+ strictpass=$pass
+ strictfail=$fail
+- if test $pass -eq 1
+- then
+- if cat out | grep -i "warn" > /dev/null 2>&1
+- then
+- strictpass="0" && strictfail="1"
+- fi
+- fi
+ cat out >> $charmout
+ /bin/rm -f out
+ }
diff --git a/var/spack/repos/builtin/packages/cityhash/package.py b/var/spack/repos/builtin/packages/cityhash/package.py
index caa15780e3..85d948cc57 100644
--- a/var/spack/repos/builtin/packages/cityhash/package.py
+++ b/var/spack/repos/builtin/packages/cityhash/package.py
@@ -25,16 +25,20 @@
from spack import *
from spack.util.environment import *
+
class Cityhash(Package):
+ """CityHash, a family of hash functions for strings."""
+
homepage = "https://github.com/google/cityhash"
url = "https://github.com/google/cityhash"
- version('2013-07-31', git='https://github.com/google/cityhash.git', commit='8af9b8c2b889d80c22d6bc26ba0df1afb79a30db')
- version('master', branch='master', git='https://github.com/google/cityhash.git')
+ version('2013-07-31', git='https://github.com/google/cityhash.git',
+ commit='8af9b8c2b889d80c22d6bc26ba0df1afb79a30db')
+ version('master', branch='master',
+ git='https://github.com/google/cityhash.git')
def install(self, spec, prefix):
configure('--enable-sse4.2', '--prefix=%s' % prefix)
make()
make("install")
-
diff --git a/var/spack/repos/builtin/packages/cleverleaf/package.py b/var/spack/repos/builtin/packages/cleverleaf/package.py
index 6b54d1bf26..3bd1f0b5d0 100644
--- a/var/spack/repos/builtin/packages/cleverleaf/package.py
+++ b/var/spack/repos/builtin/packages/cleverleaf/package.py
@@ -24,22 +24,26 @@
##############################################################################
from spack import *
+
class Cleverleaf(Package):
- """
- CleverLeaf is a hydrodynamics mini-app that extends CloverLeaf with Adaptive
- Mesh Refinement using the SAMRAI toolkit from Lawrence Livermore National
- Laboratory. The primary goal of CleverLeaf is to evaluate the application of
- AMR to the Lagrangian-Eulerian hydrodynamics scheme used by CloverLeaf.
+ """CleverLeaf is a hydrodynamics mini-app that extends CloverLeaf with
+ Adaptive Mesh Refinement using the SAMRAI toolkit from Lawrence
+ Livermore National Laboratory. The primary goal of CleverLeaf is
+ to evaluate the application of AMR to the Lagrangian-Eulerian
+ hydrodynamics scheme used by CloverLeaf.
+
"""
homepage = "http://uk-mac.github.io/CleverLeaf/"
url = "https://github.com/UK-MAC/CleverLeaf/tarball/master"
- version('develop', git='https://github.com/UK-MAC/CleverLeaf_ref.git', branch='develop')
+ version('develop', git='https://github.com/UK-MAC/CleverLeaf_ref.git',
+ branch='develop')
- depends_on("SAMRAI@3.8.0:")
+ depends_on("samrai@3.8.0:")
depends_on("hdf5+mpi")
depends_on("boost")
+ depends_on('cmake', type='build')
def install(self, spec, prefix):
cmake(*std_cmake_args)
diff --git a/var/spack/repos/builtin/packages/clhep/darwin/CLHEP.patch b/var/spack/repos/builtin/packages/clhep/darwin/CLHEP.patch
new file mode 100644
index 0000000000..ca1d96574a
--- /dev/null
+++ b/var/spack/repos/builtin/packages/clhep/darwin/CLHEP.patch
@@ -0,0 +1,11 @@
+--- CLHEP/CMakeLists.txt 2016-06-20 14:41:12.000000000 -0500
++++ CLHEP/CMakeLists.txt 2016-06-20 14:40:57.000000000 -0500
+@@ -37,7 +37,7 @@
+ # If Policy CMP0042 exists, use OLD to prefer the use of install names
+ # instead of the new @rpath default.
+ if(POLICY CMP0042)
+- cmake_policy(SET CMP0042 NEW)
++ cmake_policy(SET CMP0042 OLD)
+ endif()
+
+ set(CMAKE_MODULE_PATH
diff --git a/var/spack/repos/builtin/packages/clhep/package.py b/var/spack/repos/builtin/packages/clhep/package.py
new file mode 100644
index 0000000000..02a9da9e27
--- /dev/null
+++ b/var/spack/repos/builtin/packages/clhep/package.py
@@ -0,0 +1,81 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+
+from spack import *
+
+
+class Clhep(Package):
+ """CLHEP is a C++ Class Library for High Energy Physics. """
+ homepage = "http://proj-clhep.web.cern.ch/proj-clhep/"
+ url = "http://proj-clhep.web.cern.ch/proj-clhep/DISTRIBUTION/tarFiles/clhep-2.2.0.5.tgz"
+ list_url = "https://proj-clhep.web.cern.ch/proj-clhep/DISTRIBUTION/"
+
+ version('2.3.2.2', '567b304b0fa017e1e9fbf199f456ebe9')
+ version('2.3.1.1', '16efca7641bc118c9d217cc96fe90bf5')
+ version('2.3.1.0', 'b084934fc26a4182a08c09c292e19161')
+ version('2.3.0.0', 'a00399a2ca867f2be902c22fc71d7e2e')
+ version('2.2.0.8', '5a23ed3af785ac100a25f6cb791846af')
+ version('2.2.0.5', '1584e8ce6ebf395821aed377df315c7c')
+ version('2.2.0.4', '71d2c7c2e39d86a0262e555148de01c1')
+
+ variant('debug', default=False, description="Switch to the debug version of CLHEP.")
+ variant('cxx11', default=True, description="Compile using c++11 dialect.")
+ variant('cxx14', default=False, description="Compile using c++14 dialect.")
+
+ depends_on('cmake@2.8.12.2:', when='@2.2.0.4:2.3.0.0', type='build')
+ depends_on('cmake@3.2:', when='@2.3.0.1:', type='build')
+
+ def patch(self):
+ filter_file('SET CMP0042 OLD',
+ 'SET CMP0042 NEW',
+ '%s/%s/CLHEP/CMakeLists.txt'
+ % (self.stage.path, self.spec.version))
+
+ def install(self, spec, prefix):
+ # Handle debug
+ # Pull out the BUILD_TYPE so we can change it (Release is default)
+ cmake_args = [arg for arg in std_cmake_args if 'BUILD_TYPE' not in arg]
+ build_type = 'Debug' if '+debug' in spec else 'MinSizeRel'
+ cmake_args.extend(['-DCMAKE_BUILD_TYPE=' + build_type])
+
+ if '+cxx11' in spec:
+ env['CXXFLAGS'] = self.compiler.cxx11_flag
+ cmake_args.append('-DCLHEP_BUILD_CXXSTD=' +
+ self.compiler.cxx11_flag)
+
+ if '+cxx14' in spec:
+ env['CXXFLAGS'] = self.compiler.cxx14_flag
+ cmake_args.append('-DCLHEP_BUILD_CXXSTD=' +
+ self.compiler.cxx14_flag)
+
+ # Note that the tar file is unusual in that there's a
+ # CLHEP directory (addtional layer)
+ cmake_args.append("../CLHEP")
+
+ # Run cmake in a build directory
+ with working_dir('build', create=True):
+ cmake(*cmake_args)
+ make()
+ make("install")
diff --git a/var/spack/repos/builtin/packages/cloog/package.py b/var/spack/repos/builtin/packages/cloog/package.py
index db3d2ac928..a979ae83fc 100644
--- a/var/spack/repos/builtin/packages/cloog/package.py
+++ b/var/spack/repos/builtin/packages/cloog/package.py
@@ -24,6 +24,7 @@
##############################################################################
from spack import *
+
class Cloog(Package):
"""CLooG is a free software and library to generate code for
scanning Z-polyhedra. That is, it finds a code (e.g. in C,
diff --git a/var/spack/repos/builtin/packages/cmake/intel-c-gnu11.patch b/var/spack/repos/builtin/packages/cmake/intel-c-gnu11.patch
new file mode 100644
index 0000000000..afe6f871ec
--- /dev/null
+++ b/var/spack/repos/builtin/packages/cmake/intel-c-gnu11.patch
@@ -0,0 +1,23 @@
+diff --git a/Modules/Compiler/Intel-C.cmake b/Modules/Compiler/Intel-C.cmake
+index eb9602a..edca154 100644
+--- a/Modules/Compiler/Intel-C.cmake
++++ b/Modules/Compiler/Intel-C.cmake
+@@ -16,14 +16,14 @@ endif()
+
+ if (NOT CMAKE_C_COMPILER_VERSION VERSION_LESS 15.0.0)
+ set(CMAKE_C11_STANDARD_COMPILE_OPTION "${_std}=c11")
+- set(CMAKE_C11_EXTENSION_COMPILE_OPTION "${_std}=c11")
++ set(CMAKE_C11_EXTENSION_COMPILE_OPTION "${_std}=gnu11")
+ endif()
+
+-if (NOT CMAKE_C_COMPILER_VERSION VERSION_LESS 12.1)
++if (NOT CMAKE_C_COMPILER_VERSION VERSION_LESS 12.0)
+ set(CMAKE_C90_STANDARD_COMPILE_OPTION "${_std}=c89")
+- set(CMAKE_C90_EXTENSION_COMPILE_OPTION "${_std}=c89")
++ set(CMAKE_C90_EXTENSION_COMPILE_OPTION "${_std}=gnu89")
+ set(CMAKE_C99_STANDARD_COMPILE_OPTION "${_std}=c99")
+- set(CMAKE_C99_EXTENSION_COMPILE_OPTION "${_std}=c99")
++ set(CMAKE_C99_EXTENSION_COMPILE_OPTION "${_std}=gnu99")
+ endif()
+
+ if(NOT CMAKE_C_COMPILER_VERSION VERSION_LESS 12.1)
diff --git a/var/spack/repos/builtin/packages/cmake/package.py b/var/spack/repos/builtin/packages/cmake/package.py
index 7b2a125fe5..60c95b9184 100644
--- a/var/spack/repos/builtin/packages/cmake/package.py
+++ b/var/spack/repos/builtin/packages/cmake/package.py
@@ -24,35 +24,55 @@
##############################################################################
from spack import *
+
class Cmake(Package):
"""A cross-platform, open-source build system. CMake is a family of
tools designed to build, test and package software."""
- homepage = 'https://www.cmake.org'
- url = 'https://cmake.org/files/v3.4/cmake-3.4.3.tar.gz'
+ homepage = 'https://www.cmake.org'
+ url = 'https://cmake.org/files/v3.4/cmake-3.4.3.tar.gz'
+ list_url = 'https://cmake.org/files/'
+ list_depth = 2
+ version('3.7.1', 'd031d5a06e9f1c5367cdfc56fbd2a1c8')
+ version('3.6.1', 'd6dd661380adacdb12f41b926ec99545')
+ version('3.6.0', 'aa40fbecf49d99c083415c2411d12db9')
version('3.5.2', '701386a1b5ec95f8d1075ecf96383e02')
version('3.5.1', 'ca051f4a66375c89d1a524e726da0296')
version('3.5.0', '33c5d09d4c33d4ffcc63578a6ba8777e')
version('3.4.3', '4cb3ff35b2472aae70f542116d616e63')
version('3.4.0', 'cd3034e0a44256a0917e254167217fc8')
version('3.3.1', '52638576f4e1e621fed6c3410d3a1b12')
+ version('3.1.0', '188eb7dc9b1b82b363bc51c0d3f1d461')
version('3.0.2', 'db4c687a31444a929d2fdc36c4dfb95f')
version('2.8.10.2', '097278785da7182ec0aea8769d06860c')
- variant('ncurses', default=True, description='Enables the build of the ncurses gui')
- variant('openssl', default=True, description="Enables CMake's OpenSSL features")
- variant('qt', default=False, description='Enables the build of cmake-gui')
- variant('doc', default=False, description='Enables the generation of html and man page documentation')
+ variant('ownlibs', default=True, description='Use CMake-provided third-party libraries')
+ variant('qt', default=False, description='Enables the build of cmake-gui')
+ variant('doc', default=False, description='Enables the generation of html and man page documentation')
+ variant('openssl', default=True, description="Enables CMake's OpenSSL features")
+ variant('ncurses', default=True, description='Enables the build of the ncurses gui')
+
+ depends_on('curl', when='~ownlibs')
+ depends_on('expat', when='~ownlibs')
+ # depends_on('jsoncpp', when='~ownlibs') # circular dependency
+ depends_on('zlib', when='~ownlibs')
+ depends_on('bzip2', when='~ownlibs')
+ depends_on('xz', when='~ownlibs')
+ depends_on('libarchive', when='~ownlibs')
+ depends_on('qt', when='+qt')
+ depends_on('python@2.7.11:', when='+doc', type='build')
+ depends_on('py-sphinx', when='+doc', type='build')
+ depends_on('openssl', when='+openssl')
+ depends_on('ncurses', when='+ncurses')
- depends_on('ncurses', when='+ncurses')
- depends_on('openssl', when='+openssl')
- depends_on('qt', when='+qt')
- depends_on('python@2.7.11:', when='+doc')
- depends_on('py-sphinx', when='+doc')
+ # Cannot build with Intel, should be fixed in 3.6.2
+ # https://gitlab.kitware.com/cmake/cmake/issues/16226
+ patch('intel-c-gnu11.patch', when='@3.6.0:3.6.1')
def url_for_version(self, version):
"""Handle CMake's version-based custom URLs."""
- return 'https://cmake.org/files/v%s/cmake-%s.tar.gz' % (version.up_to(2), version)
+ return 'https://cmake.org/files/v%s/cmake-%s.tar.gz' % (
+ version.up_to(2), version)
def validate(self, spec):
"""
@@ -70,12 +90,27 @@ class Cmake(Package):
# Consistency check
self.validate(spec)
- # configure, build, install:
- options = ['--prefix=%s' % prefix]
- options.append('--parallel=%s' % str(make_jobs))
+ options = [
+ '--prefix={0}'.format(prefix),
+ '--parallel={0}'.format(make_jobs)]
+ if spec.satisfies("@3.2:"):
+ options.append(
+ # jsoncpp requires CMake to build
+ # use CMake-provided library to avoid circular dependency
+ '--no-system-jsoncpp'
+ )
+
+ if '+ownlibs' in spec:
+ # Build and link to the CMake-provided third-party libraries
+ options.append('--no-system-libs')
+ else:
+ # Build and link to the Spack-installed third-party libraries
+ options.append('--system-libs')
if '+qt' in spec:
options.append('--qt-gui')
+ else:
+ options.append('--no-qt-gui')
if '+doc' in spec:
options.append('--sphinx-html')
@@ -85,6 +120,10 @@ class Cmake(Package):
options.append('--')
options.append('-DCMAKE_USE_OPENSSL=ON')
- configure(*options)
+ bootstrap = Executable('./bootstrap')
+ bootstrap(*options)
+
make()
+ if self.run_tests:
+ make('test') # some tests fail, takes forever
make('install')
diff --git a/var/spack/repos/builtin/packages/cmocka/package.py b/var/spack/repos/builtin/packages/cmocka/package.py
index 41f80d9761..274b78379a 100644
--- a/var/spack/repos/builtin/packages/cmocka/package.py
+++ b/var/spack/repos/builtin/packages/cmocka/package.py
@@ -24,6 +24,7 @@
##############################################################################
from spack import *
+
class Cmocka(Package):
"""Unit-testing framework in pure C"""
homepage = "https://cmocka.org/"
@@ -32,9 +33,11 @@ class Cmocka(Package):
version('1.0.1', 'ed861e501a21a92b2af63e466df2015e')
parallel = False
+ depends_on('cmake', type='build')
+
def install(self, spec, prefix):
with working_dir('spack-build', create=True):
- cmake('..', *std_cmake_args)
+ cmake('..', *std_cmake_args)
- make()
- make("install")
+ make()
+ make("install")
diff --git a/var/spack/repos/builtin/packages/cmor/package.py b/var/spack/repos/builtin/packages/cmor/package.py
new file mode 100644
index 0000000000..b5debf9537
--- /dev/null
+++ b/var/spack/repos/builtin/packages/cmor/package.py
@@ -0,0 +1,72 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class Cmor(AutotoolsPackage):
+ """Climate Model Output Rewriter is used to produce CF-compliant netCDF
+ files. The structure of the files created by the library and the metadata
+ they contain fulfill the requirements of many of the climate community's
+ standard model experiments."""
+
+ homepage = "http://cmor.llnl.gov"
+ url = "https://github.com/PCMDI/cmor/archive/3.1.2.tar.gz"
+
+ version('3.2.0', 'b48105105d4261012c19cd65e89ff7a6')
+ version('3.1.2', '72f7227159c901e4bcf80d2c73a8ce77')
+
+ variant('fortran', default=True, description='Enable Fortran API')
+ variant('python', default=False, description='Enable PYTHON support')
+
+ depends_on('uuid')
+ depends_on('netcdf')
+ depends_on('udunits2')
+ depends_on('hdf5@:1.8')
+
+ extends('python', when='+python')
+ depends_on('python@:2.7', when='+python')
+ depends_on('py-numpy', type=('build', 'run'), when='+python')
+
+ @AutotoolsPackage.precondition('configure')
+ def validate(self):
+ if '+fortran' in self.spec and not self.compiler.fc:
+ msg = 'cannot build a fortran variant without a fortran compiler'
+ raise RuntimeError(msg)
+
+ def configure_args(self):
+ extra_args = ['--disable-debug']
+
+ if '+fortran' in self.spec:
+ extra_args.append('--enable-fortran')
+ else:
+ extra_args.append('--disable-fortran')
+
+ return extra_args
+
+ def install(self, spec, prefix):
+ make('install')
+
+ if '+python' in spec:
+ setup_py('install', '--prefix=' + prefix)
diff --git a/var/spack/repos/builtin/packages/cnmem/package.py b/var/spack/repos/builtin/packages/cnmem/package.py
index f4c05f5b5f..0c62023952 100644
--- a/var/spack/repos/builtin/packages/cnmem/package.py
+++ b/var/spack/repos/builtin/packages/cnmem/package.py
@@ -24,6 +24,7 @@
##############################################################################
from spack import *
+
class Cnmem(Package):
"""CNMem mempool for CUDA devices"""
homepage = "https://github.com/NVIDIA/cnmem"
@@ -31,6 +32,6 @@ class Cnmem(Package):
version('git', git='https://github.com/NVIDIA/cnmem.git', branch="master")
def install(self, spec, prefix):
- cmake('.',*std_cmake_args)
- make()
- make('install')
+ cmake('.', *std_cmake_args)
+ make()
+ make('install')
diff --git a/var/spack/repos/builtin/packages/compiz/package.py b/var/spack/repos/builtin/packages/compiz/package.py
new file mode 100644
index 0000000000..ec21f5b4f2
--- /dev/null
+++ b/var/spack/repos/builtin/packages/compiz/package.py
@@ -0,0 +1,63 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class Compiz(Package):
+ """compiz - OpenGL window and compositing manager.
+
+ Compiz is an OpenGL compositing manager that use
+ GLX_EXT_texture_from_pixmap for binding redirected top-level
+ windows to texture objects. It has a flexible plug-in system
+ and it is designed to run well on most graphics hardware."""
+
+ homepage = "http://www.compiz.org/"
+ url = "https://www.x.org/archive/individual/app/compiz-0.7.8.tar.gz"
+
+ version('0.7.8', 'e99977d9170a7bd5d571004eed038428')
+
+ depends_on('libxcb')
+ depends_on('libxcomposite')
+ depends_on('libxfixes')
+ depends_on('libxdamage')
+ depends_on('libxrandr')
+ depends_on('libxinerama')
+ depends_on('libice')
+ depends_on('libsm')
+ depends_on('libxml2')
+ depends_on('libxslt')
+
+ # TODO: add dependencies
+ # libstartup-notification-1.0 >= 0.7
+ depends_on('libxrender')
+ depends_on('libpng')
+ depends_on('glib')
+ depends_on('gconf')
+
+ def install(self, spec, prefix):
+ configure('--prefix={0}'.format(prefix))
+
+ make()
+ make('install')
diff --git a/var/spack/repos/builtin/packages/compositeproto/package.py b/var/spack/repos/builtin/packages/compositeproto/package.py
new file mode 100644
index 0000000000..1b3fbda0af
--- /dev/null
+++ b/var/spack/repos/builtin/packages/compositeproto/package.py
@@ -0,0 +1,45 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class Compositeproto(Package):
+ """Composite Extension.
+
+ This package contains header files and documentation for the composite
+ extension. Library and server implementations are separate."""
+
+ homepage = "http://cgit.freedesktop.org/xorg/proto/compositeproto"
+ url = "https://www.x.org/archive/individual/proto/compositeproto-0.4.2.tar.gz"
+
+ version('0.4.2', '2dea7c339432b3363faf2d29c208e7b5')
+
+ depends_on('pkg-config@0.9.0:', type='build')
+ depends_on('util-macros', type='build')
+
+ def install(self, spec, prefix):
+ configure('--prefix={0}'.format(prefix))
+
+ make('install')
diff --git a/var/spack/repos/builtin/packages/conduit/package.py b/var/spack/repos/builtin/packages/conduit/package.py
new file mode 100644
index 0000000000..9715542126
--- /dev/null
+++ b/var/spack/repos/builtin/packages/conduit/package.py
@@ -0,0 +1,313 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+import socket
+import os
+
+import llnl.util.tty as tty
+
+
+def cmake_cache_entry(name, value):
+ """
+ Helper that creates CMake cache entry strings used in
+ 'host-config' files.
+ """
+ return 'set("{0}" "{1}" CACHE PATH "")\n\n'.format(name, value)
+
+
+class Conduit(Package):
+ """Conduit is an open source project from Lawrence Livermore National
+ Laboratory that provides an intuitive model for describing hierarchical
+ scientific data in C++, C, Fortran, and Python. It is used for data
+ coupling between packages in-core, serialization, and I/O tasks."""
+
+ homepage = "http://software.llnl.gov/conduit"
+ url = "https://github.com/LLNL/conduit/archive/v0.2.1.tar.gz"
+
+ version('0.2.1', 'cd2b42c76f70ac3546582b6da77c6028')
+ version('0.2.0', 'd595573dedf55514c11d7391092fd760')
+
+ version('master', git='https://github.com/LLNL/conduit.git')
+
+ ###########################################################################
+ # package variants
+ ###########################################################################
+
+ variant("shared", default=True, description="Build Conduit as shared libs")
+
+ variant("cmake", default=True,
+ description="Build CMake (if off, attempt to use cmake from PATH)")
+
+ # variants for python support
+ variant("python", default=True, description="Build Conduit Python support")
+
+ # variants for comm and i/o
+ variant("mpi", default=True, description="Build Conduit MPI Support")
+ variant("hdf5", default=True, description="Build Conduit HDF5 support")
+ variant("silo", default=True, description="Build Conduit Silo support")
+
+ # variants for dev-tools (docs, etc)
+ variant("doc", default=False, description="Build Conduit's documentation")
+
+ ###########################################################################
+ # package dependencies
+ ###########################################################################
+
+ #######################
+ # CMake
+ #######################
+ # cmake 3.3.1 is the version we tested
+ depends_on("cmake@3.3.1", when="+cmake")
+
+ #######################
+ # Python
+ #######################
+ extends("python", when="+python")
+ # TODO: blas and lapack are disabled due to build
+ # issues Cyrus experienced on OSX 10.11.6
+ depends_on("py-numpy~blas~lapack", when="+python")
+
+ #######################
+ # I/O Packages
+ #######################
+ # TODO: cxx variant is disabled due to build issue Cyrus
+ # experienced on BGQ. When on, the static build tries
+ # to link agains shared libs.
+ #
+ # we are not using hdf5's mpi or fortran features.
+ depends_on("hdf5~cxx~mpi~fortran", when="+shared")
+ depends_on("hdf5~shared~cxx~mpi~fortran", when="~shared")
+
+ # we are not using silo's fortran features
+ depends_on("silo~fortran", when="+shared")
+ depends_on("silo~shared~fortran", when="~shared")
+
+ #######################
+ # MPI
+ #######################
+ depends_on("mpi", when="+mpi")
+
+ #######################
+ # Documentation related
+ #######################
+ depends_on("py-sphinx", when="+python+doc")
+ depends_on("doxygen", when="+doc")
+
+ def install(self, spec, prefix):
+ """
+ Build and install Conduit.
+ """
+ with working_dir('spack-build', create=True):
+ host_cfg_fname = self.create_host_config(spec, prefix)
+ cmake_args = []
+ # if we have a static build, we need to avoid any of
+ # spack's default cmake settings related to rpaths
+ # (see: https://github.com/LLNL/spack/issues/2658)
+ if "+shared" in spec:
+ cmake_args.extend(std_cmake_args)
+ else:
+ for arg in std_cmake_args:
+ if arg.count("RPATH") == 0:
+ cmake_args.append(arg)
+ cmake_args.extend(["-C", host_cfg_fname, "../src"])
+ cmake(*cmake_args)
+ make()
+ make("install")
+
+ def create_host_config(self, spec, prefix):
+ """
+ This method creates a 'host-config' file that specifies
+ all of the options used to configure and build conduit.
+
+ For more details see about 'host-config' files see:
+ http://software.llnl.gov/conduit/building.html
+ """
+
+ #######################
+ # Compiler Info
+ #######################
+ c_compiler = env["SPACK_CC"]
+ cpp_compiler = env["SPACK_CXX"]
+ f_compiler = None
+
+ if self.compiler.fc:
+ # even if this is set, it may not exist so do one more sanity check
+ if os.path.isfile(env["SPACK_FC"]):
+ f_compiler = env["SPACK_FC"]
+
+ #######################################################################
+ # By directly fetching the names of the actual compilers we appear
+ # to doing something evil here, but this is necessary to create a
+ # 'host config' file that works outside of the spack install env.
+ #######################################################################
+
+ sys_type = spec.architecture
+ # if on llnl systems, we can use the SYS_TYPE
+ if "SYS_TYPE" in env:
+ sys_type = env["SYS_TYPE"]
+
+ ##############################################
+ # Find and record what CMake is used
+ ##############################################
+
+ if "+cmake" in spec:
+ cmake_exe = join_path(spec['cmake'].prefix.bin, "cmake")
+ else:
+ cmake_exe = which("cmake")
+ if cmake_exe is None:
+ msg = 'failed to find CMake (and cmake variant is off)'
+ raise RuntimeError(msg)
+ cmake_exe = cmake_exe.command
+
+ host_cfg_fname = "%s-%s-%s.cmake" % (socket.gethostname(),
+ sys_type,
+ spec.compiler)
+
+ cfg = open(host_cfg_fname, "w")
+ cfg.write("##################################\n")
+ cfg.write("# spack generated host-config\n")
+ cfg.write("##################################\n")
+ cfg.write("# {0}-{1}\n".format(sys_type, spec.compiler))
+ cfg.write("##################################\n\n")
+
+ # Include path to cmake for reference
+ cfg.write("# cmake from spack \n")
+ cfg.write("# cmake executable path: %s\n\n" % cmake_exe)
+
+ #######################
+ # Compiler Settings
+ #######################
+
+ cfg.write("#######\n")
+ cfg.write("# using %s compiler spec\n" % spec.compiler)
+ cfg.write("#######\n\n")
+ cfg.write("# c compiler used by spack\n")
+ cfg.write(cmake_cache_entry("CMAKE_C_COMPILER", c_compiler))
+ cfg.write("# cpp compiler used by spack\n")
+ cfg.write(cmake_cache_entry("CMAKE_CXX_COMPILER", cpp_compiler))
+
+ cfg.write("# fortran compiler used by spack\n")
+ if f_compiler is not None:
+ cfg.write(cmake_cache_entry("ENABLE_FORTRAN", "ON"))
+ cfg.write(cmake_cache_entry("CMAKE_Fortran_COMPILER", f_compiler))
+ else:
+ cfg.write("# no fortran compiler found\n\n")
+ cfg.write(cmake_cache_entry("ENABLE_FORTRAN", "OFF"))
+
+ #######################
+ # Python
+ #######################
+
+ cfg.write("# Python Support\n")
+
+ if "+python" in spec:
+ python_exe = join_path(spec['python'].prefix.bin, "python")
+ cfg.write("# Enable python module builds\n")
+ cfg.write(cmake_cache_entry("ENABLE_PYTHON", "ON"))
+ cfg.write("# python from spack \n")
+ cfg.write(cmake_cache_entry("PYTHON_EXECUTABLE", python_exe))
+ # install module to standard style site packages dir
+ # so we can support spack activate
+ py_ver_short = "python{0}".format(spec["python"].version.up_to(2))
+ pym_prefix = join_path("${CMAKE_INSTALL_PREFIX}",
+ "lib",
+ py_ver_short,
+ "site-packages")
+ # use pym_prefix as the install path
+ cfg.write(cmake_cache_entry("PYTHON_MODULE_INSTALL_PREFIX",
+ pym_prefix))
+ else:
+ cfg.write(cmake_cache_entry("ENABLE_PYTHON", "OFF"))
+
+ if "+doc" in spec:
+ cfg.write(cmake_cache_entry("ENABLE_DOCS", "ON"))
+
+ cfg.write("# sphinx from spack \n")
+ sphinx_build_exe = join_path(spec['py-sphinx'].prefix.bin,
+ "sphinx-build")
+ cfg.write(cmake_cache_entry("SPHINX_EXECUTABLE", sphinx_build_exe))
+
+ cfg.write("# doxygen from uberenv\n")
+ doxygen_exe = join_path(spec['doxygen'].prefix.bin, "doxygen")
+ cfg.write(cmake_cache_entry("DOXYGEN_EXECUTABLE", doxygen_exe))
+ else:
+ cfg.write(cmake_cache_entry("ENABLE_DOCS", "OFF"))
+
+ #######################
+ # MPI
+ #######################
+
+ cfg.write("# MPI Support\n")
+
+ if "+mpi" in spec:
+ cfg.write(cmake_cache_entry("ENABLE_MPI", "ON"))
+ cfg.write(cmake_cache_entry("MPI_C_COMPILER", spec['mpi'].mpicc))
+ # we use `mpicc` as `MPI_CXX_COMPILER` b/c we don't want to
+ # introduce linking deps to the MPI C++ libs (we aren't using
+ # C++ features of MPI) -- this happens with some versions of
+ # OpenMPI
+ cfg.write(cmake_cache_entry("MPI_CXX_COMPILER", spec['mpi'].mpicc))
+ cfg.write(cmake_cache_entry("MPI_Fortran_COMPILER",
+ spec['mpi'].mpifc))
+ else:
+ cfg.write(cmake_cache_entry("ENABLE_MPI", "OFF"))
+
+ #######################################################################
+ # I/O Packages
+ #######################################################################
+
+ cfg.write("# I/O Packages\n\n")
+
+ #######################
+ # HDF5
+ #######################
+
+ cfg.write("# hdf5 from spack \n")
+
+ if "+hdf5" in spec:
+ cfg.write(cmake_cache_entry("HDF5_DIR", spec['hdf5'].prefix))
+ else:
+ cfg.write("# hdf5 not built by spack \n")
+
+ #######################
+ # Silo
+ #######################
+
+ cfg.write("# silo from spack \n")
+
+ if "+silo" in spec:
+ cfg.write(cmake_cache_entry("SILO_DIR", spec['silo'].prefix))
+ else:
+ cfg.write("# silo not built by spack \n")
+
+ cfg.write("##################################\n")
+ cfg.write("# end spack generated host-config\n")
+ cfg.write("##################################\n")
+ cfg.close()
+
+ host_cfg_fname = os.path.abspath(host_cfg_fname)
+ tty.info("spack generated conduit host-config file: " + host_cfg_fname)
+ return host_cfg_fname
diff --git a/var/spack/repos/builtin/packages/constype/package.py b/var/spack/repos/builtin/packages/constype/package.py
new file mode 100644
index 0000000000..dcf88fdd55
--- /dev/null
+++ b/var/spack/repos/builtin/packages/constype/package.py
@@ -0,0 +1,47 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class Constype(Package):
+ """constype prints on the standard output the Sun code for the type of
+ display that the specified device is.
+
+ It was originally written for SunOS, but has been ported to other
+ SPARC OS'es and to Solaris on both SPARC & x86."""
+
+ homepage = "http://cgit.freedesktop.org/xorg/app/constype"
+ url = "https://www.x.org/archive/individual/app/constype-1.0.4.tar.gz"
+
+ version('1.0.4', '2333b9ac9fd32e58b05afa651c4590a3')
+
+ depends_on('pkg-config@0.9.0:', type='build')
+ depends_on('util-macros', type='build')
+
+ def install(self, spec, prefix):
+ configure('--prefix={0}'.format(prefix))
+
+ make()
+ make('install')
diff --git a/var/spack/repos/builtin/packages/converge/package.py b/var/spack/repos/builtin/packages/converge/package.py
new file mode 100644
index 0000000000..429be8542d
--- /dev/null
+++ b/var/spack/repos/builtin/packages/converge/package.py
@@ -0,0 +1,69 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+from distutils.dir_util import copy_tree
+import os
+
+
+class Converge(Package):
+ """CONVERGE is a revolutionary computational fluid dynamics (CFD) program
+ that eliminates the grid generation bottleneck from the simulation process.
+ CONVERGE was developed by engine simulation experts and is straightforward
+ to use for both engine and non-engine simulations. Unlike many CFD
+ programs, CONVERGE automatically generates a perfectly orthogonal,
+ structured grid at runtime based on simple, user-defined grid control
+ parameters. This grid generation method completely eliminates the need to
+ manually generate a grid. In addition, CONVERGE offers many other features
+ to expedite the setup process and to ensure that your simulations are as
+ computationally efficient as possible.
+
+ Note: CONVERGE is licensed software. You will need to create an account on
+ the CONVERGE homepage and download CONVERGE yourself. Spack will search
+ your current directory for the download file. Alternatively, add this file
+ to a mirror so that Spack can find it. For instructions on how to set up a
+ mirror, see http://spack.readthedocs.io/en/latest/mirrors.html"""
+
+ homepage = "https://www.convergecfd.com/"
+ url = "file://%s/converge_install_2.3.16.tar.gz" % os.getcwd()
+
+ version('2.3.16', '8b80f1e73a63181c427c7732ad279986')
+
+ variant('mpi', default=True, description='Build with MPI support')
+
+ # The Converge Getting Started Guide recommends:
+ # MPICH: 3.1.4
+ # HP-MPI: 2.0.3+
+ # OpenMPI: 1.6.*
+ depends_on('mpi', when='+mpi')
+
+ # Licensing
+ license_required = True
+ license_comment = '#'
+ license_files = ['license/license.lic']
+ license_vars = ['RLM_LICENSE']
+ license_url = 'http://www.reprisesoftware.com/RLM_License_Administration.pdf'
+
+ def install(self, spec, prefix):
+ copy_tree('.', prefix)
diff --git a/var/spack/repos/builtin/packages/coreutils/package.py b/var/spack/repos/builtin/packages/coreutils/package.py
index cb8f596b41..94cfa11341 100644
--- a/var/spack/repos/builtin/packages/coreutils/package.py
+++ b/var/spack/repos/builtin/packages/coreutils/package.py
@@ -24,6 +24,7 @@
##############################################################################
from spack import *
+
class Coreutils(Package):
"""The GNU Core Utilities are the basic file, shell and text
manipulation utilities of the GNU operating system. These are
diff --git a/var/spack/repos/builtin/packages/cp2k/package.py b/var/spack/repos/builtin/packages/cp2k/package.py
new file mode 100644
index 0000000000..3a175ea1a2
--- /dev/null
+++ b/var/spack/repos/builtin/packages/cp2k/package.py
@@ -0,0 +1,208 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+import os
+import shutil
+import copy
+
+from spack import *
+
+
+class Cp2k(Package):
+ """CP2K is a quantum chemistry and solid state physics software package
+ that can perform atomistic simulations of solid state, liquid, molecular,
+ periodic, material, crystal, and biological systems
+ """
+ homepage = 'https://www.cp2k.org'
+ url = 'https://sourceforge.net/projects/cp2k/files/cp2k-3.0.tar.bz2'
+
+ version('3.0', 'c05bc47335f68597a310b1ed75601d35')
+
+ variant('mpi', default=True, description='Enable MPI support')
+ variant('plumed', default=False, description='Enable PLUMED support')
+
+ depends_on('python', type='build')
+
+ depends_on('lapack')
+ depends_on('blas')
+ depends_on('fftw')
+ depends_on('libint@:1.2', when='@3.0')
+
+ depends_on('mpi', when='+mpi')
+ depends_on('scalapack', when='+mpi')
+ depends_on('plumed+shared+mpi', when='+plumed+mpi')
+ depends_on('plumed+shared~mpi', when='+plumed~mpi')
+ depends_on('pexsi', when='+mpi')
+ depends_on('wannier90', when='+mpi')
+ depends_on('elpa', when='+mpi')
+
+ # TODO : add dependency on libsmm, libxsmm
+ # TODO : add dependency on CUDA
+
+ parallel = False
+
+ def install(self, spec, prefix):
+ # Construct a proper filename for the architecture file
+ cp2k_architecture = '{0.architecture}-{0.compiler.name}'.format(spec)
+ cp2k_version = 'sopt' if '~mpi' in spec else 'popt'
+ makefile_basename = '.'.join([cp2k_architecture, cp2k_version])
+ makefile = join_path('arch', makefile_basename)
+
+ # Write the custom makefile
+ with open(makefile, 'w') as mkf:
+ # Optimization flags
+ optflags = {
+ 'gcc': ['-O2',
+ '-ffast-math',
+ '-ffree-form',
+ '-ffree-line-length-none',
+ '-ftree-vectorize',
+ '-funroll-loops',
+ '-mtune=native'],
+ 'intel': ['-O2',
+ '-pc64',
+ '-unroll',
+ '-heap-arrays 64']
+ }
+ cppflags = [
+ '-D__FFTW3',
+ '-D__LIBINT',
+ '-I' + spec['fftw'].prefix.include
+ ]
+ fcflags = copy.deepcopy(optflags[self.spec.compiler.name])
+ fcflags.extend([
+ '-I' + spec['fftw'].prefix.include
+ ])
+ fftw = find_libraries(['libfftw3'], root=spec['fftw'].prefix.lib)
+ ldflags = [fftw.search_flags]
+ libs = [
+ join_path(spec['libint'].prefix.lib, 'libint.so'),
+ join_path(spec['libint'].prefix.lib, 'libderiv.so'),
+ join_path(spec['libint'].prefix.lib, 'libr12.so')
+ ]
+ if '+plumed' in self.spec:
+ # Include Plumed.inc in the Makefile
+ mkf.write('include {0}\n'.format(
+ join_path(self.spec['plumed'].prefix.lib,
+ 'plumed',
+ 'src',
+ 'lib',
+ 'Plumed.inc')
+ ))
+ # Add required macro
+ cppflags.extend(['-D__PLUMED2'])
+ libs.extend([
+ join_path(self.spec['plumed'].prefix.lib,
+ 'libplumed.{0}'.format(dso_suffix))
+ ])
+
+ mkf.write('CC = {0.compiler.cc}\n'.format(self))
+ if '%intel' in self.spec:
+ # CPP is a commented command in Intel arch of CP2K
+ # This is the hack through which cp2k developers avoid doing :
+ #
+ # ${CPP} <file>.F > <file>.f90
+ #
+ # and use `-fpp` instead
+ mkf.write('CPP = # {0.compiler.cc} -P\n'.format(self))
+ mkf.write('AR = xiar -r\n')
+ else:
+ mkf.write('CPP = {0.compiler.cc} -E\n'.format(self))
+ mkf.write('AR = ar -r\n')
+ fc = self.compiler.fc if '~mpi' in spec else self.spec['mpi'].mpifc
+ mkf.write('FC = {0}\n'.format(fc))
+ mkf.write('LD = {0}\n'.format(fc))
+ # Intel
+ if '%intel' in self.spec:
+ cppflags.extend([
+ '-D__INTEL_COMPILER',
+ '-D__MKL'
+ ])
+ fcflags.extend([
+ '-diag-disable 8290,8291,10010,10212,11060',
+ '-free',
+ '-fpp'
+ ])
+ # MPI
+ if '+mpi' in self.spec:
+ cppflags.extend([
+ '-D__parallel',
+ '-D__LIBPEXSI',
+ '-D__WANNIER90',
+ '-D__ELPA3',
+ '-D__SCALAPACK'
+ ])
+ fcflags.extend([
+ '-I' + join_path(
+ spec['elpa'].prefix,
+ 'include',
+ 'elpa-{0}'.format(str(spec['elpa'].version)),
+ 'modules'
+ ),
+ '-I' + join_path(spec['pexsi'].prefix, 'fortran')
+ ])
+ scalapack = spec['scalapack'].scalapack_libs
+ ldflags.append(scalapack.search_flags)
+ libs.extend([
+ join_path(spec['elpa'].prefix.lib,
+ 'libelpa.{0}'.format(dso_suffix)),
+ join_path(spec['wannier90'].prefix.lib, 'libwannier.a'),
+ join_path(spec['pexsi'].prefix.lib, 'libpexsi.a'),
+ join_path(spec['superlu-dist'].prefix.lib,
+ 'libsuperlu_dist.a'),
+ join_path(
+ spec['parmetis'].prefix.lib,
+ 'libparmetis.{0}'.format(dso_suffix)
+ ),
+ join_path(
+ spec['metis'].prefix.lib,
+ 'libmetis.{0}'.format(dso_suffix)
+ ),
+ ])
+ libs.extend(scalapack)
+ libs.extend(self.spec['mpi'].mpicxx_shared_libs)
+ libs.extend(self.compiler.stdcxx_libs)
+ # LAPACK / BLAS
+ lapack = spec['lapack'].lapack_libs
+ blas = spec['blas'].blas_libs
+
+ ldflags.append((lapack + blas).search_flags)
+ libs.extend([str(x) for x in (fftw, lapack, blas)])
+
+ # Write compiler flags to file
+ mkf.write('CPPFLAGS = {0}\n'.format(' '.join(cppflags)))
+ mkf.write('FCFLAGS = {0}\n'.format(' '.join(fcflags)))
+ mkf.write('LDFLAGS = {0}\n'.format(' '.join(ldflags)))
+ mkf.write('LIBS = {0}\n'.format(' '.join(libs)))
+
+ with working_dir('makefiles'):
+ # Apparently the Makefile bases its paths on PWD
+ # so we need to set PWD = os.getcwd()
+ pwd_backup = env['PWD']
+ env['PWD'] = os.getcwd()
+ make('ARCH={0}'.format(cp2k_architecture),
+ 'VERSION={0}'.format(cp2k_version))
+ env['PWD'] = pwd_backup
+ exe_dir = join_path('exe', cp2k_architecture)
+ shutil.copytree(exe_dir, self.prefix.bin)
diff --git a/var/spack/repos/builtin/packages/cppad/package.py b/var/spack/repos/builtin/packages/cppad/package.py
new file mode 100644
index 0000000000..1ec31bbeef
--- /dev/null
+++ b/var/spack/repos/builtin/packages/cppad/package.py
@@ -0,0 +1,48 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class Cppad(CMakePackage):
+ """A Package for Differentiation of C++ Algorithms."""
+
+ homepage = "https://www.coin-or.org/CppAD/"
+
+ version('20170114', '565a534dc813fa1289764222cd8c11ea')
+ version('develop', git='https://github.com/coin-or/CppAD.git')
+
+ depends_on('cmake', type='build')
+
+ def url_for_version(self, version):
+ """Handle version-based custom URLs."""
+ return "http://www.coin-or.org/download/source/CppAD/cppad-%s.gpl.tgz" % (version)
+
+ def cmake_args(self):
+ # This package does not obey CMAKE_INSTALL_PREFIX
+ args = [
+ "-Dcppad_prefix=%s" % (self.prefix),
+ "-Dcmake_install_docdir=share/cppad/doc"
+ ]
+ return args
diff --git a/var/spack/repos/builtin/packages/cppcheck/package.py b/var/spack/repos/builtin/packages/cppcheck/package.py
index 16f052ef2f..fd48fcb7e3 100644
--- a/var/spack/repos/builtin/packages/cppcheck/package.py
+++ b/var/spack/repos/builtin/packages/cppcheck/package.py
@@ -24,6 +24,7 @@
##############################################################################
from spack import *
+
class Cppcheck(Package):
"""A tool for static C/C++ code analysis."""
homepage = "http://cppcheck.sourceforge.net/"
diff --git a/var/spack/repos/builtin/packages/cppunit/package.py b/var/spack/repos/builtin/packages/cppunit/package.py
new file mode 100644
index 0000000000..78956798b5
--- /dev/null
+++ b/var/spack/repos/builtin/packages/cppunit/package.py
@@ -0,0 +1,34 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class Cppunit(AutotoolsPackage):
+ """Obsolete Unit testing framework for C++"""
+
+ homepage = "https://wiki.freedesktop.org/www/Software/cppunit/"
+ url = "http://dev-www.libreoffice.org/src/cppunit-1.13.2.tar.gz"
+
+ version('1.13.2', '0eaf8bb1dcf4d16b12bec30d0732370390d35e6f')
diff --git a/var/spack/repos/builtin/packages/cram/package.py b/var/spack/repos/builtin/packages/cram/package.py
index 7e05587087..bef26cdcbd 100644
--- a/var/spack/repos/builtin/packages/cram/package.py
+++ b/var/spack/repos/builtin/packages/cram/package.py
@@ -24,6 +24,7 @@
##############################################################################
from spack import *
+
class Cram(Package):
"""Cram runs many small MPI jobs inside one large MPI job."""
homepage = "https://github.com/llnl/cram"
@@ -33,6 +34,7 @@ class Cram(Package):
extends('python')
depends_on("mpi")
+ depends_on('cmake', type='build')
def install(self, spec, prefix):
cmake(".", *std_cmake_args)
diff --git a/var/spack/repos/builtin/packages/cryptopp/package.py b/var/spack/repos/builtin/packages/cryptopp/package.py
index 77895a7331..c92f262a9a 100644
--- a/var/spack/repos/builtin/packages/cryptopp/package.py
+++ b/var/spack/repos/builtin/packages/cryptopp/package.py
@@ -25,20 +25,26 @@
import glob
from spack import *
+
class Cryptopp(Package):
"""Crypto++ is an open-source C++ library of cryptographic schemes. The
- library supports a number of different cryptography algorithms, including
- authenticated encryption schemes (GCM, CCM), hash functions (SHA-1, SHA2),
- public-key encryption (RSA, DSA), and a few obsolete/historical encryption
- algorithms (MD5, Panama)."""
+ library supports a number of different cryptography algorithms,
+ including authenticated encryption schemes (GCM, CCM), hash
+ functions (SHA-1, SHA2), public-key encryption (RSA, DSA), and a
+ few obsolete/historical encryption algorithms (MD5, Panama).
+
+ """
homepage = "http://www.cryptopp.com"
- base_url = "http://www.cryptopp.com"
version('5.6.3', '3c5b70e2ec98b7a24988734446242d07')
version('5.6.2', '7ed022585698df48e65ce9218f6c6a67')
version('5.6.1', '96cbeba0907562b077e26bcffb483828')
+ def url_for_version(self, version):
+ url = "{0}/{1}{2}.zip"
+ return url.format(self.homepage, self.name, version.joined)
+
def install(self, spec, prefix):
make()
@@ -48,7 +54,3 @@ class Cryptopp(Package):
mkdirp(prefix.lib)
install('libcryptopp.a', prefix.lib)
-
- def url_for_version(self, version):
- version_string = str(version).replace('.', '')
- return '%s/cryptopp%s.zip' % (Cryptopp.base_url, version_string)
diff --git a/var/spack/repos/builtin/packages/cscope/package.py b/var/spack/repos/builtin/packages/cscope/package.py
index 88d522f486..73017ffc57 100644
--- a/var/spack/repos/builtin/packages/cscope/package.py
+++ b/var/spack/repos/builtin/packages/cscope/package.py
@@ -24,18 +24,14 @@
##############################################################################
from spack import *
-class Cscope(Package):
+
+class Cscope(AutotoolsPackage):
"""Cscope is a developer's tool for browsing source code."""
- homepage = "http://http://cscope.sourceforge.net/"
+
+ homepage = "http://cscope.sourceforge.net/"
url = "http://downloads.sourceforge.net/project/cscope/cscope/15.8b/cscope-15.8b.tar.gz"
version('15.8b', '8f9409a238ee313a96f9f87fe0f3b176')
# Can be configured to use flex (not necessary)
# ./configure --with-flex
-
- def install(self, spec, prefix):
- configure('--prefix=%s' % prefix)
-
- make()
- make("install")
diff --git a/var/spack/repos/builtin/packages/cube/package.py b/var/spack/repos/builtin/packages/cube/package.py
index 40c2cc6893..9cea3c40e8 100644
--- a/var/spack/repos/builtin/packages/cube/package.py
+++ b/var/spack/repos/builtin/packages/cube/package.py
@@ -22,14 +22,12 @@
# License along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
-
from spack import *
class Cube(Package):
- """
- Cube the profile viewer for Score-P and Scalasca profiles. It displays a multi-dimensional performance space
- consisting of the dimensions:
+ """Cube the profile viewer for Score-P and Scalasca profiles. It displays a
+ multi-dimensional performance space consisting of the dimensions:
- performance metric
- call path
- system resource
@@ -38,18 +36,26 @@ class Cube(Package):
homepage = "http://www.scalasca.org/software/cube-4.x/download.html"
url = "http://apps.fz-juelich.de/scalasca/releases/cube/4.2/dist/cube-4.2.3.tar.gz"
+ version('4.3.4', '50f73060f55311cb12c5b3cb354d59fa',
+ url='http://apps.fz-juelich.de/scalasca/releases/cube/4.3/dist/cube-4.3.4.tar.gz')
version('4.3.3', '07e109248ed8ffc7bdcce614264a2909',
url='http://apps.fz-juelich.de/scalasca/releases/cube/4.3/dist/cube-4.3.3.tar.gz')
-
version('4.2.3', '8f95b9531f5a8f8134f279c2767c9b20',
url="http://apps.fz-juelich.de/scalasca/releases/cube/4.2/dist/cube-4.2.3.tar.gz")
- # TODO : add variant that builds GUI on top of Qt
+ variant('gui', default=False, description='Build CUBE GUI')
+
+ depends_on('zlib')
+ depends_on('qt@4.6:', when='+gui')
def install(self, spec, prefix):
configure_args = ["--prefix=%s" % prefix,
- "--without-paraver",
- "--without-gui"]
+ "--without-paraver"]
+
+ # TODO : need to handle cross compiling build
+ if '+gui' not in spec:
+ configure_args.append('--without-gui')
+
configure(*configure_args)
- make(parallel=False)
+ make()
make("install", parallel=False)
diff --git a/var/spack/repos/builtin/packages/cuda/package.py b/var/spack/repos/builtin/packages/cuda/package.py
index 96694670ad..ed8518f98f 100644
--- a/var/spack/repos/builtin/packages/cuda/package.py
+++ b/var/spack/repos/builtin/packages/cuda/package.py
@@ -26,39 +26,46 @@ from spack import *
from glob import glob
import os
+
class Cuda(Package):
- """CUDA is a parallel computing platform and programming model invented by
- NVIDIA. It enables dramatic increases in computing performance by harnessing
- the power of the graphics processing unit (GPU).
+ """CUDA is a parallel computing platform and programming model invented
+ by NVIDIA. It enables dramatic increases in computing performance by
+ harnessing the power of the graphics processing unit (GPU).
+
+ Note: NVIDIA does not provide a download URL for CUDA so you will
+ need to download it yourself. Go to
+ https://developer.nvidia.com/cuda-downloads and select your Operating
+ System, Architecture, Distribution, and Version. For the Installer
+ Type, select runfile and click Download. Spack will search your
+ current directory for this file. Alternatively, add this file to a
+ mirror so that Spack can find it. For instructions on how to set up a
+ mirror, see http://spack.readthedocs.io/en/latest/mirrors.html.
- Note: NVIDIA does not provide a download URL for CUDA so you will need to
- download it yourself. Go to https://developer.nvidia.com/cuda-downloads
- and select your Operating System, Architecture, Distribution, and Version.
- For the Installer Type, select runfile and click Download. Spack will search
- your current directory for this file. Alternatively, add this file to a
- mirror so that Spack can find it. For instructions on how to set up a mirror,
- see http://software.llnl.gov/spack/mirrors.html
+ Note: This package does not currently install the drivers necessary
+ to run CUDA. These will need to be installed manually. See:
+ http://docs.nvidia.com/cuda/cuda-getting-started-guide-for-linux for
+ details.
- Note: This package does not currently install the drivers necessary to run
- CUDA. These will need to be installed manually. See:
- http://docs.nvidia.com/cuda/cuda-getting-started-guide-for-linux for details."""
+ """
homepage = "http://www.nvidia.com/object/cuda_home_new.html"
+ version('8.0.44', '6dca912f9b7e2b7569b0074a41713640', expand=False,
+ url="file://%s/cuda_8.0.44_linux.run" % os.getcwd())
version('7.5.18', '4b3bcecf0dfc35928a0898793cf3e4c6', expand=False,
url="file://%s/cuda_7.5.18_linux.run" % os.getcwd())
version('6.5.14', '90b1b8f77313600cc294d9271741f4da', expand=False,
url="file://%s/cuda_6.5.14_linux_64.run" % os.getcwd())
-
def install(self, spec, prefix):
runfile = glob(os.path.join(self.stage.path, 'cuda*.run'))[0]
chmod = which('chmod')
chmod('+x', runfile)
runfile = which(runfile)
- # Note: NVIDIA does not officially support many newer versions of compilers.
- # For example, on CentOS 6, you must use GCC 4.4.7 or older. See:
+ # Note: NVIDIA does not officially support many newer versions of
+ # compilers. For example, on CentOS 6, you must use GCC 4.4.7 or
+ # older. See:
# http://docs.nvidia.com/cuda/cuda-installation-guide-linux/#system-requirements
# for details.
@@ -68,4 +75,3 @@ class Cuda(Package):
'--toolkit', # install CUDA Toolkit
'--toolkitpath=%s' % prefix
)
-
diff --git a/var/spack/repos/builtin/packages/curl/package.py b/var/spack/repos/builtin/packages/curl/package.py
index e3de6ee5a1..a22ac52714 100644
--- a/var/spack/repos/builtin/packages/curl/package.py
+++ b/var/spack/repos/builtin/packages/curl/package.py
@@ -24,6 +24,7 @@
##############################################################################
from spack import *
+
class Curl(Package):
"""cURL is an open source command line tool and library for
transferring data with URL syntax"""
@@ -31,6 +32,10 @@ class Curl(Package):
homepage = "http://curl.haxx.se"
url = "http://curl.haxx.se/download/curl-7.46.0.tar.bz2"
+ version('7.50.3', 'bd177fd6deecce00cfa7b5916d831c5e')
+ version('7.50.2', '6e161179f7af4b9f8b6ea21420132719')
+ version('7.50.1', '015f6a0217ca6f2c5442ca406476920b')
+ version('7.49.1', '6bb1f7af5b58b30e4e6414b8c1abccab')
version('7.47.1', '9ea3123449439bbd960cd25cf98796fb')
version('7.46.0', '9979f989a2a9930d10f1b3deeabc2148')
version('7.45.0', '62c1a352b28558f25ba6209214beadc8')
diff --git a/var/spack/repos/builtin/packages/czmq/package.py b/var/spack/repos/builtin/packages/czmq/package.py
index d69f3e5009..ef6374619b 100644
--- a/var/spack/repos/builtin/packages/czmq/package.py
+++ b/var/spack/repos/builtin/packages/czmq/package.py
@@ -25,33 +25,37 @@
from spack import *
import os
+
class Czmq(Package):
""" A C interface to the ZMQ library """
homepage = "http://czmq.zeromq.org"
url = "https://github.com/zeromq/czmq/archive/v3.0.2.tar.gz"
- version('3.0.2', '23e9885f7ee3ce88d99d0425f52e9be1', url='https://github.com/zeromq/czmq/archive/v3.0.2.tar.gz')
+ version('3.0.2', '23e9885f7ee3ce88d99d0425f52e9be1',
+ url='https://github.com/zeromq/czmq/archive/v3.0.2.tar.gz')
- depends_on('libtool')
- depends_on('automake')
- depends_on('autoconf')
- depends_on('pkg-config')
+ depends_on('libtool', type='build')
+ depends_on('automake', type='build')
+ depends_on('autoconf', type='build')
+ depends_on('pkg-config', type='build')
depends_on('zeromq')
def install(self, spec, prefix):
- bash = which("bash")
# Work around autogen.sh oddities
+ # bash = which("bash")
# bash("./autogen.sh")
mkdirp("config")
autoreconf = which("autoreconf")
autoreconf("--install", "--verbose", "--force",
- "-I", "config",
- "-I", os.path.join(spec['pkg-config'].prefix, "share", "aclocal"),
- "-I", os.path.join(spec['automake'].prefix, "share", "aclocal"),
- "-I", os.path.join(spec['libtool'].prefix, "share", "aclocal"),
- )
+ "-I", "config",
+ "-I", os.path.join(spec['pkg-config'].prefix,
+ "share", "aclocal"),
+ "-I", os.path.join(spec['automake'].prefix,
+ "share", "aclocal"),
+ "-I", os.path.join(spec['libtool'].prefix,
+ "share", "aclocal"),
+ )
configure("--prefix=%s" % prefix)
make()
make("install")
-
diff --git a/var/spack/repos/builtin/packages/daal/package.py b/var/spack/repos/builtin/packages/daal/package.py
new file mode 100644
index 0000000000..a39064df9e
--- /dev/null
+++ b/var/spack/repos/builtin/packages/daal/package.py
@@ -0,0 +1,52 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+import os
+
+from spack.pkg.builtin.intel import IntelInstaller
+
+
+class Daal(IntelInstaller):
+ """Intel Data Analytics Acceleration Library.
+
+ Note: You will have to add the download file to a
+ mirror so that Spack can find it. For instructions on how to set up a
+ mirror, see http://spack.readthedocs.io/en/latest/mirrors.html"""
+
+ homepage = "https://software.intel.com/en-us/daal"
+
+ version('2016.2.181', 'aad2aa70e5599ebfe6f85b29d8719d46',
+ url="file://%s/l_daal_2016.2.181.tgz" % os.getcwd())
+ version('2016.3.210', 'ad747c0dd97dace4cad03cf2266cad28',
+ url="file://%s/l_daal_2016.3.210.tgz" % os.getcwd())
+
+ def install(self, spec, prefix):
+
+ self.intel_prefix = os.path.join(prefix, "pkg")
+ IntelInstaller.install(self, spec, prefix)
+
+ daal_dir = os.path.join(self.intel_prefix, "daal")
+ for f in os.listdir(daal_dir):
+ os.symlink(os.path.join(daal_dir, f), os.path.join(self.prefix, f))
diff --git a/var/spack/repos/builtin/packages/dakota/package.py b/var/spack/repos/builtin/packages/dakota/package.py
index 3a8ddf28ef..e8f7d0889b 100644
--- a/var/spack/repos/builtin/packages/dakota/package.py
+++ b/var/spack/repos/builtin/packages/dakota/package.py
@@ -26,17 +26,22 @@ from spack import *
class Dakota(Package):
- """
- The Dakota toolkit provides a flexible, extensible interface between analysis codes and iterative systems
- analysis methods. Dakota contains algorithms for:
+ """The Dakota toolkit provides a flexible, extensible interface between
+ analysis codes and iterative systems analysis methods. Dakota
+ contains algorithms for:
- optimization with gradient and non gradient-based methods;
- - uncertainty quantification with sampling, reliability, stochastic expansion, and epistemic methods;
+ - uncertainty quantification with sampling, reliability, stochastic
+ - expansion, and epistemic methods;
- parameter estimation with nonlinear least squares methods;
- - sensitivity/variance analysis with design of experiments and parameter study methods.
+ - sensitivity/variance analysis with design of experiments and
+ - parameter study methods.
+
+ These capabilities may be used on their own or as components within
+ advanced strategies such as hybrid optimization, surrogate-based
+ optimization, mixed integer nonlinear programming, or optimization
+ under uncertainty.
- These capabilities may be used on their own or as components within advanced strategies such as hybrid optimization,
- surrogate-based optimization, mixed integer nonlinear programming, or optimization under uncertainty.
"""
homepage = 'https://dakota.sandia.gov/'
@@ -45,8 +50,10 @@ class Dakota(Package):
version('6.3', '05a58d209fae604af234c894c3f73f6d')
- variant('debug', default=False, description='Builds a debug version of the libraries')
- variant('shared', default=True, description='Enables the build of shared libraries')
+ variant('debug', default=False,
+ description='Builds a debug version of the libraries')
+ variant('shared', default=True,
+ description='Enables the build of shared libraries')
variant('mpi', default=True, description='Activates MPI support')
depends_on('blas')
@@ -55,6 +62,7 @@ class Dakota(Package):
depends_on('python')
depends_on('boost')
+ depends_on('cmake', type='build')
def url_for_version(self, version):
return Dakota._url_str.format(version=version)
@@ -63,12 +71,17 @@ class Dakota(Package):
options = []
options.extend(std_cmake_args)
- options.extend(['-DCMAKE_BUILD_TYPE:STRING=%s' % ('Debug' if '+debug' in spec else 'Release'),
- '-DBUILD_SHARED_LIBS:BOOL=%s' % ('ON' if '+shared' in spec else 'OFF')])
+ options.extend([
+ '-DCMAKE_BUILD_TYPE:STRING=%s' % (
+ 'Debug' if '+debug' in spec else 'Release'),
+ '-DBUILD_SHARED_LIBS:BOOL=%s' % (
+ 'ON' if '+shared' in spec else 'OFF')])
if '+mpi' in spec:
- options.extend(['-DDAKOTA_HAVE_MPI:BOOL=ON',
- '-DMPI_CXX_COMPILER:STRING=%s' % join_path(spec['mpi'].prefix.bin, 'mpicxx')])
+ options.extend([
+ '-DDAKOTA_HAVE_MPI:BOOL=ON',
+ '-DMPI_CXX_COMPILER:STRING=%s' % join_path(
+ spec['mpi'].prefix.bin, 'mpicxx')])
build_directory = join_path(self.stage.path, 'spack-build')
source_directory = self.stage.source_path
diff --git a/var/spack/repos/builtin/packages/damageproto/package.py b/var/spack/repos/builtin/packages/damageproto/package.py
new file mode 100644
index 0000000000..84e0fac311
--- /dev/null
+++ b/var/spack/repos/builtin/packages/damageproto/package.py
@@ -0,0 +1,45 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class Damageproto(Package):
+ """X Damage Extension.
+
+ This package contains header files and documentation for the X Damage
+ extension. Library and server implementations are separate."""
+
+ homepage = "https://cgit.freedesktop.org/xorg/proto/damageproto"
+ url = "https://www.x.org/releases/individual/proto/damageproto-1.2.1.tar.gz"
+
+ version('1.2.1', 'bf8c47b7f48625230cff155180f8ddce')
+
+ depends_on('pkg-config@0.9.0:', type='build')
+ depends_on('util-macros', type='build')
+
+ def install(self, spec, prefix):
+ configure('--prefix={0}'.format(prefix))
+
+ make('install')
diff --git a/var/spack/repos/builtin/packages/damselfly/package.py b/var/spack/repos/builtin/packages/damselfly/package.py
index 42fab63f98..a37728c92b 100644
--- a/var/spack/repos/builtin/packages/damselfly/package.py
+++ b/var/spack/repos/builtin/packages/damselfly/package.py
@@ -24,15 +24,19 @@
##############################################################################
from spack import *
+
class Damselfly(Package):
"""Damselfly is a model-based parallel network simulator."""
homepage = "https://github.com/llnl/damselfly"
url = "https://github.com/llnl/damselfly"
- version('1.0', '05cf7e2d8ece4408c0f2abb7ab63fd74c0d62895', git='https://github.com/llnl/damselfly.git', tag='v1.0')
+ version('1.0', '05cf7e2d8ece4408c0f2abb7ab63fd74c0d62895',
+ git='https://github.com/llnl/damselfly.git', tag='v1.0')
+
+ depends_on('cmake', type='build')
def install(self, spec, prefix):
with working_dir('spack-build', create=True):
- cmake('-DCMAKE_BUILD_TYPE=release', '..', *std_cmake_args)
- make()
- make('install')
+ cmake('-DCMAKE_BUILD_TYPE=release', '..', *std_cmake_args)
+ make()
+ make('install')
diff --git a/var/spack/repos/builtin/packages/darshan-runtime/package.py b/var/spack/repos/builtin/packages/darshan-runtime/package.py
new file mode 100644
index 0000000000..a031374381
--- /dev/null
+++ b/var/spack/repos/builtin/packages/darshan-runtime/package.py
@@ -0,0 +1,75 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+import os
+
+
+class DarshanRuntime(Package):
+ """Darshan (runtime) is a scalable HPC I/O characterization tool
+ designed to capture an accurate picture of application I/O behavior,
+ including properties such as patterns of access within files, with
+ minimum overhead. DarshanRuntime package should be installed on
+ systems where you intend to instrument MPI applications."""
+
+ homepage = "http://www.mcs.anl.gov/research/projects/darshan/"
+ url = "ftp://ftp.mcs.anl.gov/pub/darshan/releases/darshan-3.1.0.tar.gz"
+
+ version('3.1.0', '439d717323e6265b2612ed127886ae52')
+ version('3.0.0', '732577fe94238936268d74d7d74ebd08')
+
+ depends_on('mpi')
+ depends_on('zlib')
+
+ variant('slurm', default=False, description='Use Slurm Job ID')
+ variant('cobalt', default=False, description='Use Coblat Job Id')
+ variant('pbs', default=False, description='Use PBS Job Id')
+
+ def install(self, spec, prefix):
+
+ job_id = 'NONE'
+ if '+slurm' in spec:
+ job_id = 'SLURM_JOBID'
+ if '+cobalt' in spec:
+ job_id = 'COBALT_JOBID'
+ if '+pbs' in spec:
+ job_id = 'PBS_JOBID'
+
+ # TODO: BG-Q and other platform configure options
+ options = ['CC=%s' % spec['mpi'].mpicc,
+ '--with-mem-align=8',
+ '--with-log-path-by-env=DARSHAN_LOG_DIR_PATH',
+ '--with-jobid-env=%s' % job_id,
+ '--with-zlib=%s' % spec['zlib'].prefix]
+
+ with working_dir('spack-build', create=True):
+ configure = Executable('../darshan-runtime/configure')
+ configure('--prefix=%s' % prefix, *options)
+ make()
+ make('install')
+
+ def setup_environment(self, spack_env, run_env):
+ # default path for log file, could be user or site specific setting
+ darshan_log_dir = '%s' % os.environ['HOME']
+ run_env.set('DARSHAN_LOG_DIR_PATH', darshan_log_dir)
diff --git a/var/spack/repos/builtin/packages/darshan-util/package.py b/var/spack/repos/builtin/packages/darshan-util/package.py
new file mode 100644
index 0000000000..47b0497ca8
--- /dev/null
+++ b/var/spack/repos/builtin/packages/darshan-util/package.py
@@ -0,0 +1,51 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class DarshanUtil(Package):
+ """Darshan (util) is collection of tools for parsing and summarizing log
+ files produced by Darshan (runtime) instrumentation. This package is
+ typically installed on systems (front-end) where you intend to analyze
+ log files produced by Darshan (runtime)."""
+
+ homepage = "http://www.mcs.anl.gov/research/projects/darshan/"
+ url = "ftp://ftp.mcs.anl.gov/pub/darshan/releases/darshan-3.1.0.tar.gz"
+
+ version('3.1.0', '439d717323e6265b2612ed127886ae52')
+ version('3.0.0', '732577fe94238936268d74d7d74ebd08')
+
+ depends_on('zlib')
+
+ def install(self, spec, prefix):
+
+ options = ['CC=%s' % self.compiler.cc,
+ '--with-zlib=%s' % spec['zlib'].prefix]
+
+ with working_dir('spack-build', create=True):
+ configure = Executable('../darshan-util/configure')
+ configure('--prefix=%s' % prefix, *options)
+ make()
+ make('install')
diff --git a/var/spack/repos/builtin/packages/datamash/package.py b/var/spack/repos/builtin/packages/datamash/package.py
new file mode 100644
index 0000000000..85adeca996
--- /dev/null
+++ b/var/spack/repos/builtin/packages/datamash/package.py
@@ -0,0 +1,44 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class Datamash(Package):
+ """GNU datamash is a command-line program which performs basic numeric,
+ textual and statistical operations on input textual data files.
+ """
+
+ homepage = "https://www.gnu.org/software/datamash/"
+ url = "http://ftp.gnu.org/gnu/datamash/datamash-1.0.5.tar.gz"
+
+ version('1.1.0', '79a6affca08107a095e97e4237fc8775')
+ version('1.0.7', '9f317bab07454032ba9c068e7f17b04b')
+ version('1.0.6', 'ff26fdef0f343cb695cf1853e14a1a5b')
+ version('1.0.5', '9a29549dc7feca49fdc5fab696614e11')
+
+ def install(self, spec, prefix):
+ configure('--prefix=%s' % prefix)
+ make()
+ make("install")
diff --git a/var/spack/repos/builtin/packages/dbus/package.py b/var/spack/repos/builtin/packages/dbus/package.py
index 130ba2ea1f..fdca68f53f 100644
--- a/var/spack/repos/builtin/packages/dbus/package.py
+++ b/var/spack/repos/builtin/packages/dbus/package.py
@@ -24,6 +24,7 @@
##############################################################################
from spack import *
+
class Dbus(Package):
"""D-Bus is a message bus system, a simple way for applications to
talk to one another. D-Bus supplies both a system daemon (for
diff --git a/var/spack/repos/builtin/packages/dealii/package.py b/var/spack/repos/builtin/packages/dealii/package.py
index 49dc971d3a..66267b2d9a 100644
--- a/var/spack/repos/builtin/packages/dealii/package.py
+++ b/var/spack/repos/builtin/packages/dealii/package.py
@@ -23,126 +23,174 @@
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
from spack import *
-import sys
+import os
-class Dealii(Package):
- """C++ software library providing well-documented tools to build finite element codes for a broad variety of PDEs."""
+
+class Dealii(CMakePackage):
+ """C++ software library providing well-documented tools to build finite
+ element codes for a broad variety of PDEs."""
homepage = "https://www.dealii.org"
- url = "https://github.com/dealii/dealii/releases/download/v8.4.0/dealii-8.4.0.tar.gz"
+ url = "https://github.com/dealii/dealii/releases/download/v8.4.1/dealii-8.4.1.tar.gz"
+
+ # Don't add RPATHs to this package for the full build DAG.
+ # only add for immediate deps.
+ transitive_rpaths = False
+ version('8.4.2', '84c6bd3f250d3e0681b645d24cb987a7')
+ version('8.4.1', 'efbaf16f9ad59cfccad62302f36c3c1d')
version('8.4.0', 'ac5dbf676096ff61e092ce98c80c2b00')
- version('dev', git='https://github.com/dealii/dealii.git')
+ version('8.3.0', 'fc6cdcb16309ef4bea338a4f014de6fa')
+ version('8.2.1', '71c728dbec14f371297cd405776ccf08')
+ version('8.1.0', 'aa8fadc2ce5eb674f44f997461bf668d')
+ version('develop', git='https://github.com/dealii/dealii.git')
variant('mpi', default=True, description='Compile with MPI')
- variant('arpack', default=True, description='Compile with Arpack and PArpack (only with MPI)')
- variant('doc', default=False, description='Compile with documentation')
- variant('gsl' , default=True, description='Compile with GSL')
- variant('hdf5', default=True, description='Compile with HDF5 (only with MPI)')
+ variant('arpack', default=True,
+ description='Compile with Arpack and PArpack (only with MPI)')
+ variant('doc', default=False,
+ description='Compile with documentation')
+ variant('gsl', default=True, description='Compile with GSL')
+ variant('hdf5', default=True,
+ description='Compile with HDF5 (only with MPI)')
variant('metis', default=True, description='Compile with Metis')
- variant('netcdf', default=True, description='Compile with Netcdf (only with MPI)')
+ variant('netcdf', default=True,
+ description='Compile with Netcdf (only with MPI)')
variant('oce', default=True, description='Compile with OCE')
- variant('p4est', default=True, description='Compile with P4est (only with MPI)')
- variant('petsc', default=True, description='Compile with Petsc (only with MPI)')
- variant('slepc', default=True, description='Compile with Slepc (only with Petsc and MPI)')
- variant('trilinos', default=True, description='Compile with Trilinos (only with MPI)')
+ variant('p4est', default=True,
+ description='Compile with P4est (only with MPI)')
+ variant('petsc', default=True,
+ description='Compile with Petsc (only with MPI)')
+ variant('slepc', default=True,
+ description='Compile with Slepc (only with Petsc and MPI)')
+ variant('trilinos', default=True,
+ description='Compile with Trilinos (only with MPI)')
+ variant('python', default=True,
+ description='Compile with Python bindings')
+ variant('int64', default=False,
+ description='Compile with 64 bit indices support')
# required dependencies, light version
- depends_on ("blas")
- # Boost 1.58 is blacklisted, see https://github.com/dealii/dealii/issues/1591
- # require at least 1.59
- depends_on ("boost@1.59.0:", when='~mpi')
- depends_on ("boost@1.59.0:+mpi", when='+mpi')
- depends_on ("bzip2")
- depends_on ("cmake")
- depends_on ("lapack")
- depends_on ("muparser")
- depends_on ("suite-sparse")
- depends_on ("tbb")
- depends_on ("zlib")
+ depends_on("blas")
+ # Boost 1.58 is blacklisted, see
+ # https://github.com/dealii/dealii/issues/1591
+ # Require at least 1.59
+ # +python won't affect @:8.4.2
+ depends_on("boost@1.59.0:+thread+system+serialization+iostreams",
+ when='@:8.4.2~mpi')
+ depends_on("boost@1.59.0:+thread+system+serialization+iostreams+mpi",
+ when='@:8.4.2+mpi')
+ # since @8.5.0: (and @develop) python bindings are introduced:
+ depends_on("boost@1.59.0:+thread+system+serialization+iostreams",
+ when='@8.5.0:~mpi~python')
+ depends_on("boost@1.59.0:+thread+system+serialization+iostreams+mpi",
+ when='@8.5.0:+mpi~python')
+ depends_on("boost@1.59.0:+thread+system+serialization+iostreams+python",
+ when='@8.5.0:~mpi+python')
+ depends_on(
+ "boost@1.59.0:+thread+system+serialization+iostreams+mpi+python",
+ when='@8.5.0:+mpi+python')
+ depends_on("bzip2")
+ depends_on("lapack")
+ depends_on("muparser")
+ depends_on("suite-sparse")
+ depends_on("tbb")
+ depends_on("zlib")
# optional dependencies
- depends_on ("mpi", when="+mpi")
- depends_on ("arpack-ng+mpi", when='+arpack+mpi')
- depends_on ("doxygen", when='+doc')
- depends_on ("gsl", when='@8.5.0:+gsl')
- depends_on ("gsl", when='@dev+gsl')
- depends_on ("hdf5+mpi~cxx", when='+hdf5+mpi') #FIXME NetCDF declares dependency with ~cxx, why?
- depends_on ("metis@5:", when='+metis')
- depends_on ("netcdf+mpi", when="+netcdf+mpi")
- depends_on ("netcdf-cxx", when='+netcdf+mpi')
- depends_on ("oce", when='+oce')
- depends_on ("p4est", when='+p4est+mpi')
- depends_on ("petsc+mpi", when='+petsc+mpi')
- depends_on ("slepc", when='+slepc+petsc+mpi')
- depends_on ("trilinos", when='+trilinos+mpi')
-
- # developer dependnecies
- depends_on ("numdiff", when='@dev')
- depends_on ("astyle@2.04", when='@dev')
+ depends_on("mpi", when="+mpi")
+ depends_on("arpack-ng+mpi", when='+arpack+mpi')
+ depends_on("doxygen+graphviz", when='+doc')
+ depends_on("graphviz", when='+doc')
+ depends_on("gsl", when='@8.5.0:+gsl')
+ depends_on("hdf5+mpi", when='+hdf5+mpi')
+ depends_on("metis@5:", when='+metis')
+ depends_on("netcdf+mpi", when="+netcdf+mpi")
+ depends_on("netcdf-cxx", when='+netcdf+mpi')
+ depends_on("oce", when='+oce')
+ depends_on("p4est", when='+p4est+mpi')
+ depends_on("petsc+mpi", when='@8.4.2:+petsc+mpi~int64')
+ depends_on('python', when='@8.5.0:+python')
+ depends_on("slepc", when='@8.4.2:+slepc+petsc+mpi~int64')
+ depends_on("petsc@:3.6.4+mpi", when='@:8.4.1+petsc+mpi~int64')
+ depends_on("slepc@:3.6.3", when='@:8.4.1+slepc+petsc+mpi~int64')
+ depends_on("trilinos", when='+trilinos+mpi')
+
+ def build_type(self):
+ # CMAKE_BUILD_TYPE should be DebugRelease | Debug | Release
+ return 'DebugRelease'
- def install(self, spec, prefix):
+ def cmake_args(self):
+ spec = self.spec
options = []
- options.extend(std_cmake_args)
- # CMAKE_BUILD_TYPE should be DebugRelease | Debug | Release
- for word in options[:]:
- if word.startswith('-DCMAKE_BUILD_TYPE'):
- options.remove(word)
-
- dsuf = 'dylib' if sys.platform == 'darwin' else 'so'
+ lapack_blas = spec['lapack'].lapack_libs + spec['blas'].blas_libs
options.extend([
- '-DCMAKE_BUILD_TYPE=DebugRelease',
'-DDEAL_II_COMPONENT_EXAMPLES=ON',
'-DDEAL_II_WITH_THREADS:BOOL=ON',
'-DBOOST_DIR=%s' % spec['boost'].prefix,
'-DBZIP2_DIR=%s' % spec['bzip2'].prefix,
- # CMake's FindBlas/Lapack may pickup system's blas/lapack instead of Spack's.
- # Be more specific to avoid this.
- # Note that both lapack and blas are provided in -DLAPACK_XYZ variables
+ # CMake's FindBlas/Lapack may pickup system's blas/lapack instead
+ # of Spack's. Be more specific to avoid this.
+ # Note that both lapack and blas are provided in -DLAPACK_XYZ.
'-DLAPACK_FOUND=true',
- '-DLAPACK_INCLUDE_DIRS=%s;%s' %
- (spec['lapack'].prefix.include,
- spec['blas'].prefix.include),
- '-DLAPACK_LIBRARIES=%s;%s' %
- (join_path(spec['lapack'].prefix.lib,'liblapack.%s' % dsuf), # FIXME don't hardcode names
- join_path(spec['blas'].prefix.lib,'libblas.%s' % dsuf)), # FIXME don't hardcode names
- '-DMUPARSER_DIR=%s ' % spec['muparser'].prefix,
+ '-DLAPACK_INCLUDE_DIRS=%s;%s' % (
+ spec['lapack'].prefix.include, spec['blas'].prefix.include),
+ '-DLAPACK_LIBRARIES=%s' % lapack_blas.joined(';'),
+ '-DMUPARSER_DIR=%s' % spec['muparser'].prefix,
'-DUMFPACK_DIR=%s' % spec['suite-sparse'].prefix,
'-DTBB_DIR=%s' % spec['tbb'].prefix,
'-DZLIB_DIR=%s' % spec['zlib'].prefix
])
+ if spec.satisfies('@8.5.0:'):
+ options.extend([
+ '-DDEAL_II_COMPONENT_PYTHON_BINDINGS=%s' %
+ ('ON' if '+python' in spec else 'OFF')
+ ])
+
+ # Set directory structure:
+ if spec.satisfies('@:8.2.1'):
+ options.extend(['-DDEAL_II_COMPONENT_COMPAT_FILES=OFF'])
+ else:
+ options.extend([
+ '-DDEAL_II_EXAMPLES_RELDIR=share/deal.II/examples',
+ '-DDEAL_II_DOCREADME_RELDIR=share/deal.II/',
+ '-DDEAL_II_DOCHTML_RELDIR=share/deal.II/doc'
+ ])
+
# MPI
if '+mpi' in spec:
options.extend([
'-DDEAL_II_WITH_MPI:BOOL=ON',
- '-DCMAKE_C_COMPILER=%s' % join_path(self.spec['mpi'].prefix.bin, 'mpicc'), # FIXME: avoid hardcoding mpi wrappers names
- '-DCMAKE_CXX_COMPILER=%s' % join_path(self.spec['mpi'].prefix.bin, 'mpic++'),
- '-DCMAKE_Fortran_COMPILER=%s' % join_path(self.spec['mpi'].prefix.bin, 'mpif90'),
+ '-DCMAKE_C_COMPILER=%s' % spec['mpi'].mpicc,
+ '-DCMAKE_CXX_COMPILER=%s' % spec['mpi'].mpicxx,
+ '-DCMAKE_Fortran_COMPILER=%s' % spec['mpi'].mpifc,
])
else:
options.extend([
'-DDEAL_II_WITH_MPI:BOOL=OFF',
])
- # Optional dependencies for which librariy names are the same as CMake variables
- for library in ('gsl','hdf5','p4est','petsc','slepc','trilinos','metis'):
+ # Optional dependencies for which library names are the same as CMake
+ # variables:
+ for library in (
+ 'gsl', 'hdf5', 'p4est', 'petsc', 'slepc', 'trilinos', 'metis'):
if library in spec:
options.extend([
- '-D{library}_DIR={value}'.format(library=library.upper(), value=spec[library].prefix),
- '-DDEAL_II_WITH_{library}:BOOL=ON'.format(library=library.upper())
+ '-D%s_DIR=%s' % (library.upper(), spec[library].prefix),
+ '-DDEAL_II_WITH_%s:BOOL=ON' % library.upper()
])
else:
options.extend([
- '-DDEAL_II_WITH_{library}:BOOL=OFF'.format(library=library.upper())
+ '-DDEAL_II_WITH_%s:BOOL=OFF' % library.upper()
])
# doxygen
options.extend([
- '-DDEAL_II_COMPONENT_DOCUMENTATION=%s' % ('ON' if '+doc' in spec else 'OFF'),
+ '-DDEAL_II_COMPONENT_DOCUMENTATION=%s' %
+ ('ON' if '+doc' in spec else 'OFF'),
])
-
# arpack
if '+arpack' in spec:
options.extend([
@@ -157,13 +205,25 @@ class Dealii(Package):
# since Netcdf is spread among two, need to do it by hand:
if '+netcdf' in spec:
+ # take care of lib64 vs lib installed lib locations:
+ if os.path.isdir(spec['netcdf-cxx'].prefix.lib):
+ netcdfcxx_lib_dir = spec['netcdf-cxx'].prefix.lib
+ else:
+ netcdfcxx_lib_dir = spec['netcdf-cxx'].prefix.lib64
+ if os.path.isdir(spec['netcdf'].prefix.lib):
+ netcdf_lib_dir = spec['netcdf'].prefix.lib
+ else:
+ netcdf_lib_dir = spec['netcdf'].prefix.lib64
+
options.extend([
'-DNETCDF_FOUND=true',
- '-DNETCDF_LIBRARIES=%s;%s' %
- (join_path(spec['netcdf-cxx'].prefix.lib,'libnetcdf_c++.%s' % dsuf),
- join_path(spec['netcdf'].prefix.lib,'libnetcdf.%s' % dsuf)),
- '-DNETCDF_INCLUDE_DIRS=%s;%s' %
- (spec['netcdf-cxx'].prefix.include,
+ '-DNETCDF_LIBRARIES=%s;%s' % (
+ join_path(netcdfcxx_lib_dir,
+ 'libnetcdf_c++.%s' % dso_suffix),
+ join_path(netcdf_lib_dir,
+ 'libnetcdf.%s' % dso_suffix)),
+ '-DNETCDF_INCLUDE_DIRS=%s;%s' % (
+ spec['netcdf-cxx'].prefix.include,
spec['netcdf'].prefix.include),
])
else:
@@ -182,101 +242,12 @@ class Dealii(Package):
'-DDEAL_II_WITH_OPENCASCADE=OFF'
])
- cmake('.', *options)
-
- make()
- make("test")
- make("install")
-
- # run some MPI examples with different solvers from PETSc and Trilinos
- env['DEAL_II_DIR'] = prefix
- print('=====================================')
- print('============ EXAMPLES ===============')
- print('=====================================')
- # take bare-bones step-3
- print('=====================================')
- print('============ Step-3 =================')
- print('=====================================')
- with working_dir('examples/step-3'):
- cmake('.')
- make('release')
- make('run',parallel=False)
-
- # An example which uses Metis + PETSc
- # FIXME: switch step-18 to MPI
- with working_dir('examples/step-18'):
- print('=====================================')
- print('============= Step-18 ===============')
- print('=====================================')
- # list the number of cycles to speed up
- filter_file(r'(end_time = 10;)', ('end_time = 3;'), 'step-18.cc')
- if '^petsc' in spec and '^metis' in spec:
- cmake('.')
- make('release')
- make('run',parallel=False)
-
- # take step-40 which can use both PETSc and Trilinos
- # FIXME: switch step-40 to MPI run
- with working_dir('examples/step-40'):
- print('=====================================')
- print('========== Step-40 PETSc ============')
- print('=====================================')
- # list the number of cycles to speed up
- filter_file(r'(const unsigned int n_cycles = 8;)', ('const unsigned int n_cycles = 2;'), 'step-40.cc')
- cmake('.')
- if '^petsc' in spec:
- make('release')
- make('run',parallel=False)
-
- print('=====================================')
- print('========= Step-40 Trilinos ==========')
- print('=====================================')
- # change Linear Algebra to Trilinos
- filter_file(r'(\/\/ #define FORCE_USE_OF_TRILINOS.*)', ('#define FORCE_USE_OF_TRILINOS'), 'step-40.cc')
- if '^trilinos+hypre' in spec:
- make('release')
- make('run',parallel=False)
-
- print('=====================================')
- print('=== Step-40 Trilinos SuperluDist ====')
- print('=====================================')
- # change to direct solvers
- filter_file(r'(LA::SolverCG solver\(solver_control\);)', ('TrilinosWrappers::SolverDirect::AdditionalData data(false,"Amesos_Superludist"); TrilinosWrappers::SolverDirect solver(solver_control,data);'), 'step-40.cc')
- filter_file(r'(LA::MPI::PreconditionAMG preconditioner;)', (''), 'step-40.cc')
- filter_file(r'(LA::MPI::PreconditionAMG::AdditionalData data;)', (''), 'step-40.cc')
- filter_file(r'(preconditioner.initialize\(system_matrix, data\);)', (''), 'step-40.cc')
- filter_file(r'(solver\.solve \(system_matrix, completely_distributed_solution, system_rhs,)', ('solver.solve (system_matrix, completely_distributed_solution, system_rhs);'), 'step-40.cc')
- filter_file(r'(preconditioner\);)', (''), 'step-40.cc')
- if '^trilinos+superlu-dist' in spec:
- make('release')
- make('run',paralle=False)
-
- print('=====================================')
- print('====== Step-40 Trilinos MUMPS =======')
- print('=====================================')
- # switch to Mumps
- filter_file(r'(Amesos_Superludist)', ('Amesos_Mumps'), 'step-40.cc')
- if '^trilinos+mumps' in spec:
- make('release')
- make('run',parallel=False)
-
- print('=====================================')
- print('============ Step-36 ================')
- print('=====================================')
- with working_dir('examples/step-36'):
- if 'slepc' in spec:
- cmake('.')
- make('release')
- make('run',parallel=False)
+ # 64 bit indices
+ options.extend([
+ '-DDEAL_II_WITH_64BIT_INDICES=%s' % ('+int64' in spec)
+ ])
- print('=====================================')
- print('============ Step-54 ================')
- print('=====================================')
- with working_dir('examples/step-54'):
- if 'oce' in spec:
- cmake('.')
- make('release')
- make('run',parallel=False)
+ return options
def setup_environment(self, spack_env, env):
env.set('DEAL_II_DIR', self.prefix)
diff --git a/var/spack/repos/builtin/packages/dia/package.py b/var/spack/repos/builtin/packages/dia/package.py
index 38742b7e7d..94cd75656d 100644
--- a/var/spack/repos/builtin/packages/dia/package.py
+++ b/var/spack/repos/builtin/packages/dia/package.py
@@ -24,6 +24,7 @@
##############################################################################
from spack import *
+
class Dia(Package):
"""Dia is a program for drawing structured diagrams."""
homepage = 'https://wiki.gnome.org/Apps/Dia'
@@ -31,10 +32,10 @@ class Dia(Package):
version('0.97.3', '0e744a0f6a6c4cb6a089e4d955392c3c')
- depends_on('intltool')
+ depends_on('intltool', type='build')
depends_on('gtkplus@2.6.0:')
depends_on('cairo')
- #depends_on('libart') # optional dependency, not yet supported by spack.
+ # depends_on('libart') # optional dependency, not yet supported by spack.
depends_on('libpng')
depends_on('libxslt')
depends_on('python')
diff --git a/var/spack/repos/builtin/packages/dmxproto/package.py b/var/spack/repos/builtin/packages/dmxproto/package.py
new file mode 100644
index 0000000000..34213bba5f
--- /dev/null
+++ b/var/spack/repos/builtin/packages/dmxproto/package.py
@@ -0,0 +1,46 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class Dmxproto(Package):
+ """Distributed Multihead X (DMX) Extension.
+
+ This extension defines a protocol for clients to access a front-end proxy
+ X server that controls multiple back-end X servers making up a large
+ display."""
+
+ homepage = "http://dmx.sourceforge.net/"
+ url = "https://www.x.org/archive/individual/proto/dmxproto-2.3.1.tar.gz"
+
+ version('2.3.1', '7c52af95aac192e8de31bd9a588ce121')
+
+ depends_on('pkg-config@0.9.0:', type='build')
+ depends_on('util-macros', type='build')
+
+ def install(self, spec, prefix):
+ configure('--prefix={0}'.format(prefix))
+
+ make('install')
diff --git a/var/spack/repos/builtin/packages/docbook-xml/package.py b/var/spack/repos/builtin/packages/docbook-xml/package.py
index 9c22174610..f1e1a08c8f 100644
--- a/var/spack/repos/builtin/packages/docbook-xml/package.py
+++ b/var/spack/repos/builtin/packages/docbook-xml/package.py
@@ -23,7 +23,6 @@
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
import os
-import glob
from spack import *
@@ -35,9 +34,18 @@ class DocbookXml(Package):
version('4.5', '03083e288e87a7e829e437358da7ef9e')
def install(self, spec, prefix):
- cp = which('cp')
+ for item in os.listdir('.'):
+ src = os.path.abspath(item)
+ dst = os.path.join(prefix, item)
+ if os.path.isdir(item):
+ install_tree(src, dst, symlinks=True)
+ else:
+ install(src, dst)
- install_args = ['-a', '-t', prefix]
- install_args.extend(glob.glob('*'))
+ def setup_dependent_environment(self, spack_env, run_env, extension_spec):
+ catalog = os.path.join(self.spec.prefix, 'catalog.xml')
+ spack_env.set('XML_CATALOG_FILES', catalog, separator=' ')
- cp(*install_args)
+ def setup_environment(self, spack_env, run_env):
+ catalog = os.path.join(self.spec.prefix, 'catalog.xml')
+ run_env.set('XML_CATALOG_FILES', catalog, separator=' ')
diff --git a/var/spack/repos/builtin/packages/docbook-xsl/package.py b/var/spack/repos/builtin/packages/docbook-xsl/package.py
new file mode 100644
index 0000000000..5de9cecdbb
--- /dev/null
+++ b/var/spack/repos/builtin/packages/docbook-xsl/package.py
@@ -0,0 +1,53 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+import os
+from spack import *
+
+
+class DocbookXsl(Package):
+ """Docbook XSL vocabulary."""
+ homepage = "http://docbook.sourceforge.net/"
+ url = "https://downloads.sourceforge.net/project/docbook/docbook-xsl/1.79.1/docbook-xsl-1.79.1.tar.bz2"
+
+ version('1.79.1', 'b48cbf929a2ad85e6672f710777ca7bc')
+
+ depends_on('docbook-xml')
+
+ def install(self, spec, prefix):
+ for item in os.listdir('.'):
+ src = os.path.abspath(item)
+ dst = os.path.join(prefix, item)
+ if os.path.isdir(item):
+ install_tree(src, dst, symlinks=True)
+ else:
+ install(src, dst)
+
+ def setup_dependent_environment(self, spack_env, run_env, extension_spec):
+ catalog = os.path.join(self.spec.prefix, 'catalog.xml')
+ spack_env.set('XML_CATALOG_FILES', catalog, separator=' ')
+
+ def setup_environment(self, spack_env, run_env):
+ catalog = os.path.join(self.spec.prefix, 'catalog.xml')
+ run_env.set('XML_CATALOG_FILES', catalog, separator=' ')
diff --git a/var/spack/repos/builtin/packages/doxygen/package.py b/var/spack/repos/builtin/packages/doxygen/package.py
index b2e9582b5a..560e6aa95f 100644
--- a/var/spack/repos/builtin/packages/doxygen/package.py
+++ b/var/spack/repos/builtin/packages/doxygen/package.py
@@ -35,18 +35,20 @@ class Doxygen(Package):
homepage = "http://www.stack.nl/~dimitri/doxygen/"
url = "http://ftp.stack.nl/pub/users/dimitri/doxygen-1.8.10.src.tar.gz"
+ version('1.8.12', '08e0f7850c4d22cb5188da226b209a96')
version('1.8.11', 'f4697a444feaed739cfa2f0644abc19b')
version('1.8.10', '79767ccd986f12a0f949015efb5f058f')
# graphviz appears to be a run-time optional dependency
- variant('graphviz', default=True, description='Build with dot command support from Graphviz.') # NOQA: ignore=E501
+ variant('graphviz', default=False,
+ description='Build with dot command support from Graphviz.')
- depends_on("cmake@2.8.12:")
- depends_on("flex")
- depends_on("bison")
+ depends_on("cmake@2.8.12:", type='build')
+ depends_on("flex", type='build')
+ depends_on("bison", type='build')
# optional dependencies
- depends_on("graphviz", when="+graphviz")
+ depends_on("graphviz", when="+graphviz", type='run')
def install(self, spec, prefix):
cmake('.', *std_cmake_args)
diff --git a/var/spack/repos/builtin/packages/dri2proto/package.py b/var/spack/repos/builtin/packages/dri2proto/package.py
index 25ea783c0c..d05e7ea231 100644
--- a/var/spack/repos/builtin/packages/dri2proto/package.py
+++ b/var/spack/repos/builtin/packages/dri2proto/package.py
@@ -24,15 +24,23 @@
##############################################################################
from spack import *
+
class Dri2proto(Package):
- """DRI2 Protocol Headers."""
- homepage = "http://http://cgit.freedesktop.org/xorg/proto/dri2proto/"
- url = "http://xorg.freedesktop.org/releases/individual/proto/dri2proto-2.8.tar.gz"
+ """Direct Rendering Infrastructure 2 Extension.
+
+ This extension defines a protocol to securely allow user applications to
+ access the video hardware without requiring data to be passed through the
+ X server."""
+
+ homepage = "https://cgit.freedesktop.org/xorg/proto/dri2proto/"
+ url = "https://www.x.org/releases/individual/proto/dri2proto-2.8.tar.gz"
version('2.8', '19ea18f63d8ae8053c9fa84b60365b77')
+ depends_on('pkg-config@0.9.0:', type='build')
+ depends_on('util-macros', type='build')
+
def install(self, spec, prefix):
- configure("--prefix=%s" % prefix)
+ configure('--prefix={0}'.format(prefix))
- make()
- make("install")
+ make('install')
diff --git a/var/spack/repos/builtin/packages/dri3proto/package.py b/var/spack/repos/builtin/packages/dri3proto/package.py
new file mode 100644
index 0000000000..cd2594d5da
--- /dev/null
+++ b/var/spack/repos/builtin/packages/dri3proto/package.py
@@ -0,0 +1,46 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class Dri3proto(Package):
+ """Direct Rendering Infrastructure 3 Extension.
+
+ This extension defines a protocol to securely allow user applications to
+ access the video hardware without requiring data to be passed through the
+ X server."""
+
+ homepage = "https://cgit.freedesktop.org/xorg/proto/dri3proto/"
+ url = "https://www.x.org/releases/individual/proto/dri3proto-1.0.tar.gz"
+
+ version('1.0', '25e84a49a076862277ee12aebd49ff5f')
+
+ depends_on('pkg-config@0.9.0:', type='build')
+ depends_on('util-macros', type='build')
+
+ def install(self, spec, prefix):
+ configure('--prefix={0}'.format(prefix))
+
+ make('install')
diff --git a/var/spack/repos/builtin/packages/dtcmp/package.py b/var/spack/repos/builtin/packages/dtcmp/package.py
index b50b2ae3ae..e59e246d47 100644
--- a/var/spack/repos/builtin/packages/dtcmp/package.py
+++ b/var/spack/repos/builtin/packages/dtcmp/package.py
@@ -22,9 +22,9 @@
# License along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
-import os
from spack import *
+
class Dtcmp(Package):
"""The Datatype Comparison Library provides comparison operations and
parallel sort algorithms for MPI applications."""
diff --git a/var/spack/repos/builtin/packages/dyninst/package.py b/var/spack/repos/builtin/packages/dyninst/package.py
index 735936f087..420ab0fc68 100644
--- a/var/spack/repos/builtin/packages/dyninst/package.py
+++ b/var/spack/repos/builtin/packages/dyninst/package.py
@@ -24,13 +24,24 @@
##############################################################################
from spack import *
+
class Dyninst(Package):
"""API for dynamic binary instrumentation. Modify programs while they
are executing without recompiling, re-linking, or re-executing."""
+
homepage = "https://paradyn.org"
- url = "http://www.dyninst.org/sites/default/files/downloads/dyninst/8.1.2/DyninstAPI-8.1.2.tgz"
+ url = "https://github.com/dyninst/dyninst/archive/v9.2.0.tar.gz"
list_url = "http://www.dyninst.org/downloads/dyninst-8.x"
+ # version 9.2.1b was the latest git commit when trying to port to a
+ # ppc64le system to get fixes in computeAddrWidth independent of
+ # endianness. This version can be removed if the next release includes
+ # this change. The actual commit was
+ # b8596ad4023ec40ac07e669ff8ea3ec06e262703
+ version('9.2.1b', git='https://github.com/dyninst/dyninst.git',
+ commit='859cb778e20b619443c943c96dd1851da763142b')
+ version('9.2.0', 'ad023f85e8e57837ed9de073b59d6bab',
+ url="https://github.com/dyninst/dyninst/archive/v9.2.0.tar.gz")
version('9.1.0', '5c64b77521457199db44bec82e4988ac',
url="http://www.paradyn.org/release9.1.0/DyninstAPI-9.1.0.tgz")
version('8.2.1', 'abf60b7faabe7a2e4b54395757be39c7',
@@ -40,30 +51,47 @@ class Dyninst(Package):
version('8.1.1', 'd1a04e995b7aa70960cd1d1fac8bd6ac',
url="http://www.paradyn.org/release8.1/DyninstAPI-8.1.1.tgz")
+ variant('stat_dysect', default=False,
+ description="patch for STAT's DySectAPI")
+
depends_on("libelf")
depends_on("libdwarf")
depends_on("boost@1.42:")
+ depends_on('cmake', type='build')
+
+ patch('stat_dysect.patch', when='+stat_dysect')
+ patch('stackanalysis_h.patch', when='@9.2.0')
# new version uses cmake
def install(self, spec, prefix):
+ if spec.satisfies('@:8.1'):
+ configure("--prefix=" + prefix)
+ make()
+ make("install")
+ return
+
libelf = spec['libelf'].prefix
libdwarf = spec['libdwarf'].prefix
with working_dir('spack-build', create=True):
- cmake('..',
- '-DBoost_INCLUDE_DIR=%s' % spec['boost'].prefix.include,
- '-DBoost_LIBRARY_DIR=%s' % spec['boost'].prefix.lib,
- '-DBoost_NO_SYSTEM_PATHS=TRUE',
- '-DLIBELF_INCLUDE_DIR=%s' % join_path(libelf.include, 'libelf'),
- '-DLIBELF_LIBRARIES=%s' % join_path(libelf.lib, 'libelf.so'),
- '-DLIBDWARF_INCLUDE_DIR=%s' % libdwarf.include,
- '-DLIBDWARF_LIBRARIES=%s' % join_path(libdwarf.lib, 'libdwarf.so'),
- *std_cmake_args)
-
+ args = ['..',
+ '-DBoost_INCLUDE_DIR=%s' % spec['boost'].prefix.include,
+ '-DBoost_LIBRARY_DIR=%s' % spec['boost'].prefix.lib,
+ '-DBoost_NO_SYSTEM_PATHS=TRUE',
+ '-DLIBELF_INCLUDE_DIR=%s' % join_path(
+ libelf.include, 'libelf'),
+ '-DLIBELF_LIBRARIES=%s' % join_path(
+ libelf.lib, 'libelf.so'),
+ '-DLIBDWARF_INCLUDE_DIR=%s' % libdwarf.include,
+ '-DLIBDWARF_LIBRARIES=%s' % join_path(
+ libdwarf.lib, 'libdwarf.so')]
+ if spec.satisfies('arch=linux-redhat7-ppc64le'):
+ args.append('-Darch_ppc64_little_endian=1')
+ args += std_cmake_args
+ cmake(*args)
make()
make("install")
-
@when('@:8.1')
def install(self, spec, prefix):
configure("--prefix=" + prefix)
diff --git a/var/spack/repos/builtin/packages/dyninst/stackanalysis_h.patch b/var/spack/repos/builtin/packages/dyninst/stackanalysis_h.patch
new file mode 100644
index 0000000000..2c04d935d9
--- /dev/null
+++ b/var/spack/repos/builtin/packages/dyninst/stackanalysis_h.patch
@@ -0,0 +1,11 @@
+--- a/dataflowAPI/h/stackanalysis.h 2016-06-29 14:54:14.000000000 -0700
++++ b/dataflowAPI/h/stackanalysis.h 2016-08-02 09:50:13.619079000 -0700
+@@ -331,7 +331,7 @@
+
+ // To build intervals, we must replay the effect of each instruction.
+ // To avoid sucking enormous time, we keep those transfer functions around...
+- typedef std::map<ParseAPI::Block *, std::map<Offset, TransferFuncs>>
++ typedef std::map<ParseAPI::Block *, std::map<Offset, TransferFuncs> >
+ InstructionEffects;
+
+ DATAFLOW_EXPORT StackAnalysis();
diff --git a/var/spack/repos/builtin/packages/dyninst/stat_dysect.patch b/var/spack/repos/builtin/packages/dyninst/stat_dysect.patch
new file mode 100644
index 0000000000..c15403683f
--- /dev/null
+++ b/var/spack/repos/builtin/packages/dyninst/stat_dysect.patch
@@ -0,0 +1,96 @@
+From 3aebb41ce0ea5b578a1ebf6810446c660066c525 Mon Sep 17 00:00:00 2001
+From: Jesper Puge Nielsen <nielsen34@llnl.gov>
+Date: Wed, 12 Aug 2015 21:07:52 -0700
+Subject: [PATCH] =?UTF-8?q?Exposed=20stackwalker=20and=20proc=20callback=20status=20to=20DySect
+=20=C3c?=
+MIME-Version: 1.0
+Content-Type: text/plain; charset=UTF-8
+Content-Transfer-Encoding: 8bit
+
+---
+ dyninstAPI/h/BPatch_process.h | 13 +++++++++++++
+ dyninstAPI/src/BPatch_process.C | 18 ++++++++++++++++++
+ dyninstAPI/src/dynProcess.h | 3 ++-
+ 3 files changed, 33 insertions(+), 1 deletions(-)
+
+diff --git a/dyninstAPI/h/BPatch_process.h b/dyninstAPI/h/BPatch_process.h
+index 5e01bbb..1316bb2 100644
+--- a/dyninstAPI/h/BPatch_process.h
++++ b/dyninstAPI/h/BPatch_process.h
+@@ -225,6 +225,10 @@ class BPATCH_DLL_EXPORT BPatch_process : public BPatch_addressSpace {
+ //
+ // this function should go away as soon as Paradyn links against Dyninst
+ PCProcess *lowlevel_process() const { return llproc; }
++
++ // Expose walker from Dyninst proces
++ void *get_walker() const;
++
+ // These internal funcs trigger callbacks registered to matching events
+ bool triggerStopThread(instPoint *intPoint, func_instance *intFunc,
+ int cb_ID, void *retVal);
+@@ -281,6 +285,15 @@ class BPATCH_DLL_EXPORT BPatch_process : public BPatch_addressSpace {
+
+ bool continueExecution();
+
++ // BPatch_process::keepStopped
++ //
++ // Changes the desired process stat to prevent
++ // Dyninst from resuming the process after
++ // handling the current event.
++ // Must be called from an event handler.
++
++ void keepStopped();
++
+ // BPatch_process::terminateExecution
+ //
+ // Terminate mutatee process
+diff --git a/dyninstAPI/src/BPatch_process.C b/dyninstAPI/src/BPatch_process.C
+index 115f215..809e797 100644
+--- a/dyninstAPI/src/BPatch_process.C
++++ b/dyninstAPI/src/BPatch_process.C
+@@ -507,6 +507,19 @@ bool BPatch_process::continueExecution()
+ }
+
+ /*
++ * BPatch_process::keepStopped
++ *
++ * Changes the desired process stat to prevent
++ * Dyninst from resuming the process after
++ * handling the current event.
++ * Must be called from an event handler.
++ */
++void BPatch_process::keepStopped()
++{
++ llproc->setDesiredProcessState(PCProcess::ps_stopped);
++}
++
++/*
+ * BPatch_process::terminateExecution
+ *
+ * Kill the thread.
+@@ -1754,3 +1767,8 @@ bool BPatch_process::protectAnalyzedCode()
+ }
+ return ret;
+ }
++
++void *BPatch_process::get_walker() const
++{
++ return llproc->get_walker();
++}
+diff --git a/dyninstAPI/src/dynProcess.h b/dyninstAPI/src/dynProcess.h
+index 54b0c6e..00721d1 100644
+--- a/dyninstAPI/src/dynProcess.h
++++ b/dyninstAPI/src/dynProcess.h
+@@ -302,7 +302,8 @@ public:
+ // Stackwalking internals
+ bool walkStack(pdvector<Frame> &stackWalk, PCThread *thread);
+ bool getActiveFrame(Frame &frame, PCThread *thread);
+-
++ Dyninst::Stackwalker::Walker *get_walker() { return stackwalker_; }
++
+ void addSignalHandler(Address, unsigned);
+ bool isInSignalHandler(Address addr);
+
+--
+1.7.1
+
diff --git a/var/spack/repos/builtin/packages/editres/package.py b/var/spack/repos/builtin/packages/editres/package.py
new file mode 100644
index 0000000000..52ad33b133
--- /dev/null
+++ b/var/spack/repos/builtin/packages/editres/package.py
@@ -0,0 +1,48 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class Editres(Package):
+ """Dynamic resource editor for X Toolkit applications."""
+
+ homepage = "http://cgit.freedesktop.org/xorg/app/editres"
+ url = "https://www.x.org/archive/individual/app/editres-1.0.6.tar.gz"
+
+ version('1.0.6', '310c504347ca499874593ac96e935353')
+
+ depends_on('libxaw')
+ depends_on('libx11')
+ depends_on('libxt')
+ depends_on('libxmu')
+
+ depends_on('pkg-config@0.9.0:', type='build')
+ depends_on('util-macros', type='build')
+
+ def install(self, spec, prefix):
+ configure('--prefix={0}'.format(prefix))
+
+ make()
+ make('install')
diff --git a/var/spack/repos/builtin/packages/eigen/package.py b/var/spack/repos/builtin/packages/eigen/package.py
index e20fcffdd7..1d7df47b21 100644
--- a/var/spack/repos/builtin/packages/eigen/package.py
+++ b/var/spack/repos/builtin/packages/eigen/package.py
@@ -22,36 +22,42 @@
# License along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
-
from spack import *
class Eigen(Package):
- """
- Eigen is a C++ template library for linear algebra: matrices, vectors, numerical solvers, and related algorithms
+ """Eigen is a C++ template library for linear algebra matrices,
+ vectors, numerical solvers, and related algorithms.
"""
homepage = 'http://eigen.tuxfamily.org/'
- url = 'http://bitbucket.org/eigen/eigen/get/3.2.7.tar.bz2'
+ url = 'https://bitbucket.org/eigen/eigen/get/3.2.7.tar.bz2'
- version('3.2.7', 'cc1bacbad97558b97da6b77c9644f184', url='http://bitbucket.org/eigen/eigen/get/3.2.7.tar.bz2')
+ version('3.3.1', 'edb6799ef413b0868aace20d2403864c')
+ version('3.2.10', 'a85bb68c82988648c3d53ba9768d7dcbcfe105f8')
+ version('3.2.9', '59ab81212f8eb2534b1545a9b42c38bf618a0d71')
+ version('3.2.8', '64f4aef8012a424c7e079eaf0be71793ab9bc6e0')
+ version('3.2.7', 'cc1bacbad97558b97da6b77c9644f184')
- variant('debug', default=False, description='Builds the library in debug mode')
+ variant('debug', default=False,
+ description='Builds the library in debug mode')
variant('metis', default=True, description='Enables metis backend')
variant('scotch', default=True, description='Enables scotch backend')
variant('fftw', default=True, description='Enables FFTW backend')
- variant('suitesparse', default=True, description='Enables SuiteSparse support')
+ variant('suitesparse', default=True,
+ description='Enables SuiteSparse support')
+ variant('mpfr', default=True,
+ description='Enables support for multi-precisions FP via mpfr')
# TODO : dependency on googlehash, superlu, adolc missing
-
- depends_on('cmake')
+ depends_on('cmake', type='build')
depends_on('metis@5:', when='+metis')
depends_on('scotch', when='+scotch')
depends_on('fftw', when='+fftw')
depends_on('suite-sparse', when='+suitesparse')
- depends_on('mpfr@2.3.0:') # Eigen 3.2.7 requires at least 2.3.0
- depends_on('gmp')
+ depends_on('mpfr@2.3.0:', when="+mpfr")
+ depends_on('gmp', when="+mpfr")
def install(self, spec, prefix):
diff --git a/var/spack/repos/builtin/packages/elfutils/package.py b/var/spack/repos/builtin/packages/elfutils/package.py
index ecb5759ddc..ef8c2433c9 100644
--- a/var/spack/repos/builtin/packages/elfutils/package.py
+++ b/var/spack/repos/builtin/packages/elfutils/package.py
@@ -24,6 +24,7 @@
##############################################################################
from spack import *
+
class Elfutils(Package):
"""elfutils is a collection of various binary tools such as
eu-objdump, eu-readelf, and other utilities that allow you to
@@ -47,4 +48,3 @@ class Elfutils(Package):
configure('--prefix=%s' % prefix, '--enable-maintainer-mode')
make()
make("install")
-
diff --git a/var/spack/repos/builtin/packages/elk/package.py b/var/spack/repos/builtin/packages/elk/package.py
index b089e585dd..acaf863935 100644
--- a/var/spack/repos/builtin/packages/elk/package.py
+++ b/var/spack/repos/builtin/packages/elk/package.py
@@ -87,12 +87,12 @@ class Elk(Package):
# BLAS/LAPACK support
# Note: BLAS/LAPACK must be compiled with OpenMP support
# if the +openmp variant is chosen
- blas = 'blas.a'
+ blas = 'blas.a'
lapack = 'lapack.a'
if '+blas' in spec:
- blas = spec['blas'].blas_shared_lib
+ blas = spec['blas'].blas_libs.joined()
if '+lapack' in spec:
- lapack = spec['lapack'].lapack_shared_lib
+ lapack = spec['lapack'].lapack_libs.joined()
# lapack must come before blas
config['LIB_LPK'] = ' '.join([lapack, blas])
diff --git a/var/spack/repos/builtin/packages/elpa/package.py b/var/spack/repos/builtin/packages/elpa/package.py
index ae81422495..b433bf40b0 100644
--- a/var/spack/repos/builtin/packages/elpa/package.py
+++ b/var/spack/repos/builtin/packages/elpa/package.py
@@ -34,7 +34,16 @@ class Elpa(Package):
homepage = 'http://elpa.mpcdf.mpg.de/'
url = 'http://elpa.mpcdf.mpg.de/elpa-2015.11.001.tar.gz'
- version('2015.11.001', 'de0f35b7ee7c971fd0dca35c900b87e6', url='http://elpa.mpcdf.mpg.de/elpa-2015.11.001.tar.gz')
+ version(
+ '2016.05.003',
+ '88a9f3f3bfb63e16509dd1be089dcf2c',
+ url='http://elpa.mpcdf.mpg.de/html/Releases/2016.05.003/elpa-2016.05.003.tar.gz'
+ )
+ version(
+ '2015.11.001',
+ 'de0f35b7ee7c971fd0dca35c900b87e6',
+ url='http://elpa.mpcdf.mpg.de/elpa-2015.11.001.tar.gz'
+ )
variant('openmp', default=False, description='Activates OpenMP support')
@@ -45,7 +54,24 @@ class Elpa(Package):
def install(self, spec, prefix):
- options = ["--prefix=%s" % prefix]
+ options = [
+ 'CC={0}'.format(self.spec['mpi'].mpicc),
+ 'FC={0}'.format(self.spec['mpi'].mpifc),
+ 'CXX={0}'.format(self.spec['mpi'].mpicxx),
+ 'FCFLAGS={0}'.format(
+ spec['lapack'].lapack_libs.joined()
+ ),
+ 'LDFLAGS={0}'.format(
+ spec['lapack'].lapack_libs.joined()
+ ),
+ 'SCALAPACK_FCFLAGS={0}'.format(
+ spec['scalapack'].scalapack_libs.joined()
+ ),
+ 'SCALAPACK_LDFLAGS={0}'.format(
+ spec['scalapack'].scalapack_libs.joined()
+ ),
+ '--prefix={0}'.format(self.prefix)
+ ]
if '+openmp' in spec:
options.append("--enable-openmp")
diff --git a/var/spack/repos/builtin/packages/emacs/package.py b/var/spack/repos/builtin/packages/emacs/package.py
index 4b05864a1e..1f9caee24c 100644
--- a/var/spack/repos/builtin/packages/emacs/package.py
+++ b/var/spack/repos/builtin/packages/emacs/package.py
@@ -24,22 +24,44 @@
##############################################################################
from spack import *
+
class Emacs(Package):
"""The Emacs programmable text editor."""
+
homepage = "https://www.gnu.org/software/emacs"
url = "http://ftp.gnu.org/gnu/emacs/emacs-24.5.tar.gz"
+ version('25.1', '95c12e6a9afdf0dcbdd7d2efa26ca42c')
version('24.5', 'd74b597503a68105e61b5b9f6d065b44')
+ variant('X', default=False, description="Enable an X toolkit")
+ variant('toolkit', default='gtk',
+ description="Select an X toolkit (gtk, athena)")
+
depends_on('ncurses')
- # Emacs also depends on:
- # GTK or other widget library
- # libtiff, png, etc.
- # For now, we assume the system provides all that stuff.
- # For Ubuntu 14.04 LTS:
- # sudo apt-get install libgtk-3-dev libxpm-dev libtiff5-dev libjpeg8-dev libgif-dev libpng12-dev
+ depends_on('libtiff', when='+X')
+ depends_on('libpng', when='+X')
+ depends_on('libxpm', when='+X')
+ depends_on('giflib', when='+X')
+ depends_on('libx11', when='+X')
+ depends_on('libxaw', when='+X toolkit=athena')
+ depends_on('gtkplus+X', when='+X toolkit=gtk')
def install(self, spec, prefix):
- configure('--prefix=%s' % prefix)
+ args = []
+ toolkit = spec.variants['toolkit'].value
+ if '+X' in spec:
+ if toolkit not in ('gtk', 'athena'):
+ raise InstallError("toolkit must be in (gtk, athena), not %s" %
+ toolkit)
+ args = [
+ '--with-x',
+ '--with-x-toolkit={0}'.format(toolkit)
+ ]
+ else:
+ args = ['--without-x']
+
+ configure('--prefix={0}'.format(prefix), *args)
+
make()
make("install")
diff --git a/var/spack/repos/builtin/packages/encodings/package.py b/var/spack/repos/builtin/packages/encodings/package.py
new file mode 100644
index 0000000000..67b21a6e07
--- /dev/null
+++ b/var/spack/repos/builtin/packages/encodings/package.py
@@ -0,0 +1,50 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class Encodings(Package):
+ """X.org encodings font."""
+
+ homepage = "http://cgit.freedesktop.org/xorg/font/encodings"
+ url = "https://www.x.org/archive/individual/font/encodings-1.0.4.tar.gz"
+
+ version('1.0.4', '1a631784ce204d667abcc329b851670c')
+
+ depends_on('font-util')
+
+ depends_on('mkfontscale', type='build')
+ depends_on('pkg-config@0.9.0:', type='build')
+ depends_on('util-macros', type='build')
+
+ def install(self, spec, prefix):
+ configure('--prefix={0}'.format(prefix))
+
+ make('install')
+
+ # `make install` copies the files to the font-util installation.
+ # Create a fake directory to convince Spack that we actually
+ # installed something.
+ mkdir(prefix.lib)
diff --git a/var/spack/repos/builtin/packages/environment-modules/package.py b/var/spack/repos/builtin/packages/environment-modules/package.py
index fe7a5ee4f6..11ddb12876 100644
--- a/var/spack/repos/builtin/packages/environment-modules/package.py
+++ b/var/spack/repos/builtin/packages/environment-modules/package.py
@@ -23,6 +23,7 @@
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
from spack import *
+import os
class EnvironmentModules(Package):
@@ -30,33 +31,50 @@ class EnvironmentModules(Package):
modification of a user's environment via modulefiles."""
homepage = "https://sourceforge.net/p/modules/wiki/Home/"
- url = "http://prdownloads.sourceforge.net/modules/modules-3.2.10.tar.gz"
+ url = "http://prdownloads.sourceforge.net/modules/modules-3.2.10.tar.gz"
version('3.2.10', '8b097fdcb90c514d7540bb55a3cb90fb')
# Dependencies:
- depends_on('tcl')
+ depends_on('tcl', type=('build', 'link', 'run'))
def install(self, spec, prefix):
tcl_spec = spec['tcl']
+ # We are looking for tclConfig.sh
+ tcl_config_name = 'tclConfig.sh'
+ tcl_config_dir_options = [tcl_spec.prefix.lib,
+ tcl_spec.prefix.lib64]
+
+ tcl_config_found = False
+ for tcl_config_dir in tcl_config_dir_options:
+ tcl_config_found = os.path.exists(
+ join_path(tcl_config_dir, tcl_config_name))
+ if tcl_config_found:
+ break
+
+ if not tcl_config_found:
+ raise InstallError('Failed to locate ' + tcl_config_name)
+
# See: https://sourceforge.net/p/modules/bugs/62/
- CPPFLAGS = ['-DUSE_INTERP_ERRORLINE']
+ cpp_flags = ['-DUSE_INTERP_ERRORLINE']
+
config_args = [
"--without-tclx",
"--with-tclx-ver=0.0",
- "--prefix=%s" % prefix,
- "--with-tcl=%s" % join_path(tcl_spec.prefix, 'lib'), # It looks for tclConfig.sh
- "--with-tcl-ver=%d.%d" % (tcl_spec.version.version[0], tcl_spec.version.version[1]),
+ "--prefix=" + prefix,
+ # It looks for tclConfig.sh
+ "--with-tcl=" + tcl_config_dir,
+ "--with-tcl-ver=%d.%d" % (
+ tcl_spec.version.version[0], tcl_spec.version.version[1]),
'--disable-debug',
'--disable-dependency-tracking',
'--disable-silent-rules',
- '--disable-versioning',
- '--datarootdir=%s' % prefix.share,
- 'CPPFLAGS=%s' % ' '.join(CPPFLAGS)
+ '--disable-versioning',
+ '--datarootdir=' + prefix.share,
+ 'CPPFLAGS=' + ' '.join(cpp_flags)
]
-
configure(*config_args)
make()
make('install')
diff --git a/var/spack/repos/builtin/packages/espresso/package.py b/var/spack/repos/builtin/packages/espresso/package.py
index ef6a3ccc7b..d2c825513c 100644
--- a/var/spack/repos/builtin/packages/espresso/package.py
+++ b/var/spack/repos/builtin/packages/espresso/package.py
@@ -23,23 +23,30 @@
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
from spack import *
-
import os
+
class Espresso(Package):
+ """QE is an integrated suite of Open-Source computer codes for
+ electronic-structure calculations and materials modeling at
+ the nanoscale. It is based on density-functional theory, plane
+ waves, and pseudopotentials.
"""
- QE is an integrated suite of Open-Source computer codes for electronic-structure calculations and materials
- modeling at the nanoscale. It is based on density-functional theory, plane waves, and pseudopotentials.
- """
+
homepage = 'http://quantum-espresso.org'
url = 'http://www.qe-forge.org/gf/download/frsrelease/204/912/espresso-5.3.0.tar.gz'
+ version(
+ '5.4.0',
+ '8bb78181b39bd084ae5cb7a512c1cfe7',
+ url='http://www.qe-forge.org/gf/download/frsrelease/211/968/espresso-5.4.0.tar.gz'
+ )
version('5.3.0', '6848fcfaeb118587d6be36bd10b7f2c3')
- variant('mpi', default=True, description='Build Quantum-ESPRESSO with mpi support')
+ variant('mpi', default=True, description='Builds with mpi support')
variant('openmp', default=False, description='Enables openMP support')
variant('scalapack', default=True, description='Enables scalapack support')
- variant('elpa', default=True, description='Use elpa as an eigenvalue solver')
+ variant('elpa', default=True, description='Uses elpa as an eigenvalue solver')
depends_on('blas')
depends_on('lapack')
@@ -47,7 +54,12 @@ class Espresso(Package):
depends_on('mpi', when='+mpi')
depends_on('fftw~mpi', when='~mpi')
depends_on('fftw+mpi', when='+mpi')
- depends_on('scalapack', when='+scalapack+mpi') # TODO : + mpi needed to avoid false dependencies installation
+ # TODO : + mpi needed to avoid false dependencies installation
+ depends_on('scalapack', when='+scalapack+mpi')
+
+ # Spurious problems running in parallel the Makefile
+ # generated by qe configure
+ parallel = False
def check_variants(self, spec):
error = 'you cannot ask for \'+{variant}\' when \'+mpi\' is not active'
@@ -76,7 +88,7 @@ class Espresso(Package):
# Add a list of directories to search
search_list = []
- for name, dependency_spec in spec.dependencies.iteritems():
+ for dependency_spec in spec.dependencies():
search_list.extend([dependency_spec.prefix.lib,
dependency_spec.prefix.lib64])
@@ -87,10 +99,9 @@ class Espresso(Package):
configure(*options)
make('all')
- if spec.architecture.startswith('darwin'):
+ if spec.satisfies('platform=darwin'):
mkdirp(prefix.bin)
for filename in glob("bin/*.x"):
install(filename, prefix.bin)
else:
make('install')
-
diff --git a/var/spack/repos/builtin/packages/espressopp/package.py b/var/spack/repos/builtin/packages/espressopp/package.py
new file mode 100644
index 0000000000..2903a02f7d
--- /dev/null
+++ b/var/spack/repos/builtin/packages/espressopp/package.py
@@ -0,0 +1,82 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+
+from spack import *
+
+
+class Espressopp(CMakePackage):
+ """ESPResSo++ is an extensible, flexible, fast and parallel simulation
+ software for soft matter research. It is a highly versatile software
+ package for the scientific simulation and analysis of coarse-grained
+ atomistic or bead-spring models as they are used in soft matter research
+ """
+ homepage = "https://espressopp.github.io"
+ url = "https://github.com/espressopp/espressopp/tarball/v1.9.4.1"
+
+ version('develop', git='https://github.com/espressopp/espressopp.git', branch='master')
+ version('1.9.4.1', '0da74a6d4e1bfa6a2a24fca354245a4f')
+ version('1.9.4', 'f2a27993a83547ad014335006eea74ea')
+
+ variant('debug', default=False, description='Build debug version')
+ variant('ug', default=False, description='Build user guide')
+ variant('pdf', default=False, description='Build user guide in pdf format')
+ variant('dg', default=False, description='Build developer guide')
+
+ depends_on("cmake@2.8:", type='build')
+ depends_on("mpi")
+ depends_on("boost+serialization+filesystem+system+python+mpi", when='@1.9.4:')
+ extends("python")
+ depends_on("python@2:2.7.13")
+ depends_on("py-mpi4py@2.0.0:", when='@1.9.4', type=('build', 'run'))
+ depends_on("py-mpi4py@1.3.1:", when='@1.9.4.1:', type=('build', 'run'))
+ depends_on("fftw")
+ depends_on("py-sphinx", when="+ug", type='build')
+ depends_on("py-sphinx", when="+pdf", type='build')
+ depends_on('py-numpy', when="+ug", type='build')
+ depends_on('py-numpy', when="+pdf", type='build')
+ depends_on('py-matplotlib', when="+ug", type='build')
+ depends_on('py-matplotlib', when="+pdf", type='build')
+ depends_on("texlive", when="+pdf", type='build')
+ depends_on("doxygen", when="+dg", type='build')
+
+ def build_type(self):
+ spec = self.spec
+ if '+debug' in spec:
+ return 'Debug'
+ else:
+ return 'Release'
+
+ def cmake_args(self):
+ return ['-DEXTERNAL_MPI4PY=ON', '-DEXTERNAL_BOOST=ON']
+
+ def build(self, spec, prefix):
+ with working_dir(self.build_directory()):
+ make()
+ if '+ug' in spec:
+ make("ug", parallel=False)
+ if '+pdf' in spec:
+ make("ug-pdf", parallel=False)
+ if '+dg' in spec:
+ make("doc", parallel=False)
diff --git a/var/spack/repos/builtin/packages/etsf-io/package.py b/var/spack/repos/builtin/packages/etsf-io/package.py
new file mode 100644
index 0000000000..c1e6f2eded
--- /dev/null
+++ b/var/spack/repos/builtin/packages/etsf-io/package.py
@@ -0,0 +1,67 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+
+from spack import *
+
+
+class EtsfIo(Package):
+ """ETSF_IO is a library implementing the Nanoquanta/ETSF file
+ format specifications.
+
+ ETSF_IO enables an architecture-independent exchange of crystallographic
+ data, electronic wavefunctions, densities and potentials, as well as
+ spectroscopic data. It is meant to be used by quantum-physical and
+ quantum-chemical applications relying upon Density Functional Theory (DFT).
+ """
+
+ homepage = "http://www.etsf.eu/resources/software/libraries_and_tools"
+ url = "https://launchpad.net/etsf-io/1.0/1.0.4/+download/etsf_io-1.0.4.tar.gz"
+
+ version('1.0.4', '32d0f7143278bd925b334c69fa425da1')
+
+ depends_on("netcdf-fortran")
+ depends_on("hdf5+mpi~cxx", when='+mpi') # required for NetCDF-4 support
+
+ def install(self, spec, prefix):
+ options = ['--prefix=%s' % prefix]
+ oapp = options.append
+
+ # Specify installation directory for Fortran module files
+ # Default is [INCLUDEDIR/FC_TYPE]
+ oapp("--with-moduledir=%s" % prefix.include)
+
+ # Netcdf4/HDF
+ hdf_libs = "-L%s -lhdf5_hl -lhdf5" % spec["hdf5"].prefix.lib
+ options.extend([
+ "--with-netcdf-incs=-I%s" % spec["netcdf-fortran"].prefix.include,
+ "--with-netcdf-libs=-L%s -lnetcdff -lnetcdf %s" % (
+ spec["netcdf-fortran"].prefix.lib, hdf_libs),
+ ])
+
+ configure(*options)
+
+ make()
+ make("check")
+ make("install")
diff --git a/var/spack/repos/builtin/packages/everytrace-example/package.py b/var/spack/repos/builtin/packages/everytrace-example/package.py
new file mode 100644
index 0000000000..76b437a38a
--- /dev/null
+++ b/var/spack/repos/builtin/packages/everytrace-example/package.py
@@ -0,0 +1,42 @@
+##############################################################################
+# Copyright (c) 2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class EverytraceExample(CMakePackage):
+ """Get stack trace EVERY time a program exits."""
+
+ homepage = "https://github.com/citibeth/everytrace-example"
+ version('develop',
+ git='https://github.com/citibeth/everytrace-example.git',
+ branch='develop')
+
+ depends_on('everytrace+mpi+fortran')
+
+ # Currently the only MPI this everytrace works with.
+ depends_on('openmpi')
+
+ def setup_environment(self, spack_env, env):
+ env.prepend_path('PATH', join_path(self.prefix, 'bin'))
diff --git a/var/spack/repos/builtin/packages/everytrace/package.py b/var/spack/repos/builtin/packages/everytrace/package.py
new file mode 100644
index 0000000000..d884c7b165
--- /dev/null
+++ b/var/spack/repos/builtin/packages/everytrace/package.py
@@ -0,0 +1,51 @@
+##############################################################################
+# Copyright (c) 2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class Everytrace(CMakePackage):
+ """Get stack trace EVERY time a program exits."""
+
+ homepage = "https://github.com/citibeth/everytrace"
+ url = "https://github.com/citibeth/everytrace/tarball/0.2.0"
+
+ version('0.2.0', '2af0e5b6255064d5191accebaa70d222')
+ version('develop',
+ git='https://github.com/citibeth/everytrace.git', branch='develop')
+
+ variant('mpi', default=True, description='Enables MPI parallelism')
+ variant('fortran', default=True,
+ description='Enable use with Fortran programs')
+
+ depends_on('mpi', when='+mpi')
+
+ def cmake_args(self):
+ spec = self.spec
+ return [
+ '-DUSE_MPI=%s' % ('YES' if '+mpi' in spec else 'NO'),
+ '-DUSE_FORTRAN=%s' % ('YES' if '+fortran' in spec else 'NO')]
+
+ def setup_environment(self, spack_env, env):
+ env.prepend_path('PATH', join_path(self.prefix, 'bin'))
diff --git a/var/spack/repos/builtin/packages/evieext/package.py b/var/spack/repos/builtin/packages/evieext/package.py
new file mode 100644
index 0000000000..afc0245f50
--- /dev/null
+++ b/var/spack/repos/builtin/packages/evieext/package.py
@@ -0,0 +1,45 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class Evieext(Package):
+ """Extended Visual Information Extension (XEVIE).
+
+ This extension defines a protocol for a client to determine information
+ about core X visuals beyond what the core protocol provides."""
+
+ homepage = "http://cgit.freedesktop.org/xorg/proto/evieproto"
+ url = "https://www.x.org/archive/individual/proto/evieext-1.1.1.tar.gz"
+
+ version('1.1.1', '018a7d24d0c7926d594246320bcb6a86')
+
+ depends_on('pkg-config@0.9.0:', type='build')
+ depends_on('util-macros', type='build')
+
+ def install(self, spec, prefix):
+ configure('--prefix={0}'.format(prefix))
+
+ make('install')
diff --git a/var/spack/repos/builtin/packages/exodusii/cmake-exodus.patch b/var/spack/repos/builtin/packages/exodusii/cmake-exodus.patch
new file mode 100644
index 0000000000..014381de88
--- /dev/null
+++ b/var/spack/repos/builtin/packages/exodusii/cmake-exodus.patch
@@ -0,0 +1,9 @@
+diff --git a/cmake-exodus b/cmake-exodus
+index 67ccd34..9b749e3 100755
+--- a/cmake-exodus
++++ b/cmake-exodus
+@@ -1,3 +1,4 @@
++#!/bin/bash
+ EXTRA_ARGS=$@
+
+ ### The following assumes you are building in a subdirectory of ACCESS Root
diff --git a/var/spack/repos/builtin/packages/exodusii/exodus-cmake.patch b/var/spack/repos/builtin/packages/exodusii/exodus-cmake.patch
deleted file mode 100644
index 25355269ca..0000000000
--- a/var/spack/repos/builtin/packages/exodusii/exodus-cmake.patch
+++ /dev/null
@@ -1,12 +0,0 @@
-diff --git a/cmake-exodus b/cmake-exodus
-index 787fd9d..ed073a2 100755
---- a/cmake-exodus
-+++ b/cmake-exodus
-@@ -1,4 +1,6 @@
--EXTRA_ARGS=$@
-+#!/bin/bash
-+
-+EXTRA_ARGS=-DSEACASProj_ENABLE_CXX11=OFF
-
- ### Change this to point to the compilers you want to use
- CC=gcc
diff --git a/var/spack/repos/builtin/packages/exodusii/package.py b/var/spack/repos/builtin/packages/exodusii/package.py
index 5e9227af46..67024673b2 100644
--- a/var/spack/repos/builtin/packages/exodusii/package.py
+++ b/var/spack/repos/builtin/packages/exodusii/package.py
@@ -27,47 +27,60 @@ from spack import *
# TODO: Add support for a C++11 enabled installation that filters out the
# TODO: "C++11-Disabled" flag (but only if the spec compiler supports C++11).
-# TODO: Add support for parallel installation that uses MPI.
+# TODO: Use variant forwarding to forward the 'mpi' variant to the direct
+# TODO: dependencies 'hdf5' and 'netcdf'.
-# TODO: Create installation options for NetCDF that support larger page size
-# TODO: suggested by Exodus (see the repository "README" file).
class Exodusii(Package):
- """Exodus II is a C++/Fortran library developed to store and retrieve data for
- finite element analyses. It's used for preprocessing (problem definition),
- postprocessing (results visualization), and data transfer between codes.
- An Exodus II data file is a random access, machine independent, binary
- file that is written and read via C, C++, or Fortran API routines."""
+ """Exodus II is a C++/Fortran library developed to store and retrieve
+ data for finite element analyses. It's used for preprocessing
+ (problem definition), postprocessing (results visualization), and
+ data transfer between codes. An Exodus II data file is a random
+ access, machine independent, binary file that is written and read
+ via C, C++, or Fortran API routines.
+
+ """
homepage = "https://github.com/gsjaardema/seacas"
url = "https://github.com/gsjaardema/seacas/archive/master.zip"
- version('2016-02-08', git='https://github.com/gsjaardema/seacas.git', commit='dcf3529')
+ version('2016-08-09', git='https://github.com/gsjaardema/seacas.git', commit='2ffeb1b')
- # TODO: Make this a build dependency once build dependencies are supported
- # (see: https://github.com/LLNL/spack/pull/378).
- depends_on('cmake@2.8.7:')
- depends_on('hdf5~shared~mpi')
- depends_on('netcdf~mpi')
+ variant('mpi', default=True, description='Enables MPI parallelism.')
- patch('exodus-cmake.patch')
+ depends_on('cmake@2.8.11:', type='build')
+ depends_on('mpi', when='+mpi')
- def patch(self):
- ff = FileFilter('cmake-exodus')
+ # https://github.com/gsjaardema/seacas/blob/master/NetCDF-Mapping.md
+ depends_on('netcdf maxdims=65536 maxvars=524288')
+ depends_on('hdf5+shared')
- ff.filter('CMAKE_INSTALL_PREFIX:PATH=${ACCESS}',
- 'CMAKE_INSTALL_PREFIX:PATH=%s' % self.spec.prefix, string=True)
- ff.filter('NetCDF_DIR:PATH=${TPL}',
- 'NetCDF_DIR:PATH=%s' % self.spec['netcdf'].prefix, string=True)
- ff.filter('HDF5_ROOT:PATH=${TPL}',
- 'HDF5_ROOT:PATH=%s' % self.spec['hdf5'].prefix, string=True)
+ patch('cmake-exodus.patch')
def install(self, spec, prefix):
- mkdirp('build')
- cd('build')
+ cc_path = spec['mpi'].mpicc if '+mpi' in spec else self.compiler.cc
+ cxx_path = spec['mpi'].mpicxx if '+mpi' in spec else self.compiler.cxx
+
+ config_args = std_cmake_args[:]
+ config_args.extend([
+ # General Flags #
+ '-DSEACASProj_ENABLE_CXX11:BOOL=OFF',
+ '-DSEACASProj_ENABLE_Zoltan:BOOL=OFF',
+ '-DHDF5_ROOT:PATH={0}'.format(spec['hdf5'].prefix),
+ '-DNetCDF_DIR:PATH={0}'.format(spec['netcdf'].prefix),
+
+ # MPI Flags #
+ '-DTPL_ENABLE_MPI={0}'.format('ON' if '+mpi' in spec else 'OFF'),
+ '-DCMAKE_C_COMPILER={0}'.format(cc_path),
+ '-DCMAKE_CXX_COMPILER={0}'.format(cxx_path),
+ ])
+
+ build_directory = join_path(self.stage.source_path, 'spack-build')
+ source_directory = self.stage.source_path
- cmake_exodus = Executable('../cmake-exodus')
- cmake_exodus()
+ with working_dir(build_directory, create=True):
+ mcmake = Executable(join_path(source_directory, 'cmake-exodus'))
+ mcmake(*config_args)
- make()
- make('install')
+ make()
+ make('install')
diff --git a/var/spack/repos/builtin/packages/exonerate/package.py b/var/spack/repos/builtin/packages/exonerate/package.py
new file mode 100644
index 0000000000..7921e64058
--- /dev/null
+++ b/var/spack/repos/builtin/packages/exonerate/package.py
@@ -0,0 +1,45 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class Exonerate(Package):
+ """Pairwise sequence alignment of DNA and proteins"""
+
+ homepage = "http://www.ebi.ac.uk/about/vertebrate-genomics/software/exonerate"
+ url = "http://ftp.ebi.ac.uk/pub/software/vertebrategenomics/exonerate/exonerate-2.2.0.tar.gz"
+
+ version('2.4.0', '126fbade003b80b663a1d530c56f1904')
+
+ depends_on('pkg-config', type="build")
+ depends_on('glib')
+
+ parallel = False
+
+ def install(self, spec, prefix):
+ configure('--prefix={0}'.format(prefix), '--disable-debug',
+ '--disable-dependency-tracking')
+ make()
+ make('install')
diff --git a/var/spack/repos/builtin/packages/expat/package.py b/var/spack/repos/builtin/packages/expat/package.py
index 51e827b2c5..13ac816ea5 100644
--- a/var/spack/repos/builtin/packages/expat/package.py
+++ b/var/spack/repos/builtin/packages/expat/package.py
@@ -24,19 +24,10 @@
##############################################################################
from spack import *
-class Expat(Package):
- """<eXpat/> is an XML parser library written in C"""
- homepage = "http://expat.sourceforge.net/"
- url = "http://downloads.sourceforge.net/project/expat/expat/2.1.0/expat-2.1.0.tar.gz"
-
- version('2.1.0', 'dd7dab7a5fea97d2a6a43f511449b7cd')
-
- depends_on('cmake')
- def install(self, spec, prefix):
-
- with working_dir('spack-build', create=True):
- cmake('..', *std_cmake_args)
- make()
- make('install')
+class Expat(AutotoolsPackage):
+ """Expat is an XML parser library written in C."""
+ homepage = "http://expat.sourceforge.net/"
+ url = "http://downloads.sourceforge.net/project/expat/expat/2.2.0/expat-2.2.0.tar.bz2"
+ version('2.2.0', '2f47841c829facb346eb6e3fab5212e2')
diff --git a/var/spack/repos/builtin/packages/extrae/package.py b/var/spack/repos/builtin/packages/extrae/package.py
index 84c410e4ba..cb20a8109e 100644
--- a/var/spack/repos/builtin/packages/extrae/package.py
+++ b/var/spack/repos/builtin/packages/extrae/package.py
@@ -25,7 +25,23 @@
from spack import *
# typical working line with extrae 3.0.1
-# ./configure --prefix=/usr/local --with-mpi=/usr/lib64/mpi/gcc/openmpi --with-unwind=/usr/local --with-papi=/usr --with-dwarf=/usr --with-elf=/usr --with-dyninst=/usr --with-binutils=/usr --with-xml-prefix=/usr --enable-openmp --enable-nanos --enable-pthread --disable-parallel-merge LDFLAGS=-pthread
+# ./configure
+# --prefix=/usr/local
+# --with-mpi=/usr/lib64/mpi/gcc/openmpi
+# --with-unwind=/usr/local
+# --with-papi=/usr
+# --with-dwarf=/usr
+# --with-elf=/usr
+# --with-dyninst=/usr
+# --with-binutils=/usr
+# --with-xml-prefix=/usr
+# --enable-openmp
+# --enable-nanos
+# --enable-pthread
+# --disable-parallel-merge
+#
+# LDFLAGS=-pthread
+
class Extrae(Package):
"""Extrae is the package devoted to generate tracefiles which can
@@ -37,8 +53,8 @@ class Extrae(Package):
programming models either alone or in conjunction with MPI :
OpenMP, CUDA, OpenCL, pthread, OmpSs"""
homepage = "http://www.bsc.es/computer-sciences/extrae"
- url = "http://www.bsc.es/ssl/apps/performanceTools/files/extrae-3.0.1.tar.bz2"
- version('3.0.1', 'a6a8ca96cd877723cd8cc5df6bdb922b')
+ url = "http://www.bsc.es/ssl/apps/performanceTools/files/extrae-3.3.0.tar.bz2"
+ version('3.3.0', 'f46e3f1a6086b5b3ac41c9585b42952d')
depends_on("mpi")
depends_on("dyninst")
@@ -46,6 +62,9 @@ class Extrae(Package):
depends_on("boost")
depends_on("libdwarf")
depends_on("papi")
+ depends_on("libelf")
+ depends_on("libxml2")
+ depends_on("binutils+libiberty")
def install(self, spec, prefix):
if 'openmpi' in spec:
@@ -55,16 +74,19 @@ class Extrae(Package):
elif 'mvapich2' in spec:
mpi = spec['mvapich2']
- configure("--prefix=%s" % prefix,
- "--with-mpi=%s" % mpi.prefix,
- "--with-unwind=%s" % spec['libunwind'].prefix,
- "--with-dyninst=%s" % spec['dyninst'].prefix,
- "--with-boost=%s" % spec['boost'].prefix,
- "--with-dwarf=%s" % spec['libdwarf'].prefix,
- "--with-papi=%s" % spec['papi'].prefix,
- "--with-dyninst-headers=%s" % spec['dyninst'].prefix.include,
- "--with-dyninst-libs=%s" % spec['dyninst'].prefix.lib)
+ configure("--prefix=%s" % prefix,
+ "--with-mpi=%s" % mpi.prefix,
+ "--with-unwind=%s" % spec['libunwind'].prefix,
+ "--with-dyninst=%s" % spec['dyninst'].prefix,
+ "--with-boost=%s" % spec['boost'].prefix,
+ "--with-dwarf=%s" % spec['libdwarf'].prefix,
+ "--with-papi=%s" % spec['papi'].prefix,
+ "--with-dyninst-headers=%s" % spec[
+ 'dyninst'].prefix.include,
+ "--with-elf=%s" % spec['libelf'].prefix,
+ "--with-xml-prefix=%s" % spec['libxml2'].prefix,
+ "--with-binutils=%s" % spec['binutils'].prefix,
+ "--with-dyninst-libs=%s" % spec['dyninst'].prefix.lib)
make()
make("install", parallel=False)
-
diff --git a/var/spack/repos/builtin/packages/exuberant-ctags/package.py b/var/spack/repos/builtin/packages/exuberant-ctags/package.py
index c49d0624f6..10be30ab8b 100644
--- a/var/spack/repos/builtin/packages/exuberant-ctags/package.py
+++ b/var/spack/repos/builtin/packages/exuberant-ctags/package.py
@@ -24,6 +24,7 @@
##############################################################################
from spack import *
+
class ExuberantCtags(Package):
"""The canonical ctags generator"""
homepage = "ctags.sourceforge.net"
diff --git a/var/spack/repos/builtin/packages/fastqc/fastqc.patch b/var/spack/repos/builtin/packages/fastqc/fastqc.patch
new file mode 100644
index 0000000000..a0a1155905
--- /dev/null
+++ b/var/spack/repos/builtin/packages/fastqc/fastqc.patch
@@ -0,0 +1,30 @@
+--- fastqc.orig 2015-11-20 09:25:00.000000000 +0000
++++ fastqc 2016-10-24 19:06:16.734000000 +0000
+@@ -38,12 +38,21 @@
+ $delimiter = ';';
+ }
+
+-if ($ENV{CLASSPATH}) {
+- $ENV{CLASSPATH} .= "$delimiter$RealBin$delimiter$RealBin/sam-1.103.jar$delimiter$RealBin/jbzip2-0.9.jar$delimiter$RealBin/cisd-jhdf5.jar";
+-}
+-else {
+- $ENV{CLASSPATH} = "$RealBin$delimiter$RealBin/sam-1.103.jar$delimiter$RealBin/jbzip2-0.9.jar$delimiter$RealBin/cisd-jhdf5.jar";
+-}
++# The lib dir is $RealBin/../lib
++# start with list of jars we need and prefix them with the lib dir
++# then stick CLASSPATH onto the front (empty or otherwise...)
++# then filter out anything that's empty (perhaps CLASSPATH...)
++# then join all the remainings bits with the delimiter.
++use File::Basename;
++use File::Spec::Functions;
++my $_lib = catfile(dirname($RealBin), 'lib');
++$ENV{CLASSPATH} =
++ join($delimiter,
++ grep {$_}
++ ($ENV{CLASSPATH},
++ $_lib,
++ map {"$_lib/$_"}
++ qw(sam-1.103.jar jbzip2-0.9.jar cisd-jhdf5.jar)));
+
+ my @java_args;
+ my @files;
diff --git a/var/spack/repos/builtin/packages/fastqc/package.py b/var/spack/repos/builtin/packages/fastqc/package.py
new file mode 100644
index 0000000000..e2a1b54210
--- /dev/null
+++ b/var/spack/repos/builtin/packages/fastqc/package.py
@@ -0,0 +1,60 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+from distutils.dir_util import copy_tree, mkpath
+from distutils.file_util import copy_file
+
+
+class Fastqc(Package):
+ """A quality control tool for high throughput sequence data."""
+
+ homepage = "http://www.bioinformatics.babraham.ac.uk/projects/fastqc/"
+ url = "http://www.bioinformatics.babraham.ac.uk/projects/fastqc/fastqc_v0.11.5.zip"
+
+ version('0.11.5', '3524f101c0ab0bae77c7595983170a76')
+
+ depends_on('jdk', type='run')
+ depends_on('perl') # for fastqc "script", any perl will do
+
+ patch('fastqc.patch', level=0)
+
+ def install(self, spec, prefix):
+ mkpath(self.prefix.bin)
+ mkpath(self.prefix.lib)
+ copy_file('fastqc', self.prefix.bin)
+ for j in ['cisd-jhdf5.jar', 'jbzip2-0.9.jar', 'sam-1.103.jar']:
+ copy_file(j, self.prefix.lib)
+ for d in ['Configuration', 'net', 'org', 'Templates', 'uk']:
+ copy_tree(d, join_path(self.prefix.lib, d))
+ chmod = which('chmod')
+ chmod('+x', join_path(self.prefix.bin, 'fastqc'))
+
+ # In theory the 'run' dependency on 'jdk' above should take
+ # care of this for me. In practice, it does not.
+ def setup_environment(self, spack_env, env):
+ """Add <prefix> to the path; the package has a script at the
+ top level.
+ """
+ env.prepend_path('PATH', join_path(self.spec['jdk'].prefix, 'bin'))
diff --git a/var/spack/repos/builtin/packages/fastx-toolkit/package.py b/var/spack/repos/builtin/packages/fastx-toolkit/package.py
new file mode 100644
index 0000000000..04b4d24b39
--- /dev/null
+++ b/var/spack/repos/builtin/packages/fastx-toolkit/package.py
@@ -0,0 +1,43 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class FastxToolkit(Package):
+ """The FASTX-Toolkit is a collection of command line tools for
+ Short-Reads FASTA/FASTQ files preprocessing."""
+
+ homepage = "http://hannonlab.cshl.edu/fastx_toolkit/"
+ url = "https://github.com/agordon/fastx_toolkit/releases/download/0.0.14/fastx_toolkit-0.0.14.tar.bz2"
+
+ version('0.0.14', 'bf1993c898626bb147de3d6695c20b40')
+
+ depends_on('libgtextutils')
+
+ def install(self, spec, prefix):
+ configure('--prefix={0}'.format(prefix))
+
+ make()
+ make('install')
diff --git a/var/spack/repos/builtin/packages/fenics/hdf5~cxx-detection.patch b/var/spack/repos/builtin/packages/fenics/hdf5~cxx-detection.patch
new file mode 100644
index 0000000000..52e36ab00b
--- /dev/null
+++ b/var/spack/repos/builtin/packages/fenics/hdf5~cxx-detection.patch
@@ -0,0 +1,11 @@
+--- a/CMakeLists.txt 2016-08-16 02:30:13.466078087 +0200
++++ b/CMakeLists.txt 2016-08-16 02:30:36.879586772 +0200
+@@ -553,7 +553,7 @@
+ set(ENV{HDF5_ROOT} "$ENV{HDF5_DIR}")
+ endif()
+ set(HDF5_PREFER_PARALLEL TRUE)
+- find_package(HDF5)
++ find_package(HDF5 COMPONENTS C)
+ set_package_properties(HDF5 PROPERTIES TYPE OPTIONAL
+ DESCRIPTION "Hierarchical Data Format 5 (HDF5)"
+ URL "https://www.hdfgroup.org/HDF5")
diff --git a/var/spack/repos/builtin/packages/fenics/package.py b/var/spack/repos/builtin/packages/fenics/package.py
new file mode 100644
index 0000000000..4be94a96b8
--- /dev/null
+++ b/var/spack/repos/builtin/packages/fenics/package.py
@@ -0,0 +1,198 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class Fenics(Package):
+ """FEniCS is organized as a collection of interoperable components
+ that together form the FEniCS Project. These components include
+ the problem-solving environment DOLFIN, the form compiler FFC, the
+ finite element tabulator FIAT, the just-in-time compiler Instant,
+ the code generation interface UFC, the form language UFL and a
+ range of additional components."""
+
+ homepage = "http://fenicsproject.org/"
+ url = "https://bitbucket.org/fenics-project/dolfin/downloads/dolfin-1.6.0.tar.gz"
+
+ base_url = "https://bitbucket.org/fenics-project/{pkg}/downloads/{pkg}-{version}.tar.gz"
+
+ variant('hdf5', default=True, description='Compile with HDF5')
+ variant('parmetis', default=True, description='Compile with ParMETIS')
+ variant('scotch', default=True, description='Compile with Scotch')
+ variant('petsc', default=True, description='Compile with PETSc')
+ variant('slepc', default=True, description='Compile with SLEPc')
+ variant('trilinos', default=True, description='Compile with Trilinos')
+ variant('suite-sparse', default=True,
+ description='Compile with SuiteSparse solvers')
+ variant('vtk', default=False, description='Compile with VTK')
+ variant('qt', default=False, description='Compile with QT')
+ variant('mpi', default=True,
+ description='Enables the distributed memory support')
+ variant('openmp', default=True,
+ description='Enables the shared memory support')
+ variant('shared', default=True,
+ description='Enables the build of shared libraries')
+ variant('debug', default=False,
+ description='Builds a debug version of the libraries')
+
+ # not part of spack list for now
+ # variant('petsc4py', default=True, description='Uses PETSc4py')
+ # variant('slepc4py', default=True, description='Uses SLEPc4py')
+ # variant('pastix', default=True, description='Compile with Pastix')
+
+ patch('petsc-3.7.patch', when='@1.6.1^petsc@3.7:')
+ patch('petsc-version-detection.patch', when='@:1.6.1')
+ patch('hdf5~cxx-detection.patch')
+
+ extends('python')
+
+ depends_on('eigen@3.2.0:', type='build')
+ depends_on('boost+filesystem+program_options+system+iostreams+timer+regex+chrono')
+
+ depends_on('mpi', when='+mpi')
+ depends_on('hdf5', when='+hdf5')
+ depends_on('parmetis@4.0.2:^metis+real64', when='+parmetis')
+ depends_on('scotch~metis', when='+scotch~mpi')
+ depends_on('scotch+mpi~metis', when='+scotch+mpi')
+ depends_on('petsc@3.4:', when='+petsc')
+ depends_on('slepc@3.4:', when='+slepc')
+ depends_on('trilinos', when='+trilinos')
+ depends_on('vtk', when='+vtk')
+ depends_on('suite-sparse', when='+suite-sparse')
+ depends_on('qt', when='+qt')
+
+ depends_on('py-ply', type=('build', 'run'))
+ depends_on('py-six', type=('build', 'run'))
+ depends_on('py-numpy', type=('build', 'run'))
+ depends_on('py-sympy', type=('build', 'run'))
+ depends_on('swig@3.0.3:', type=('build', 'run'))
+ depends_on('cmake@2.8.12:', type=('build', 'run'))
+
+ depends_on('py-setuptools', type='build')
+ depends_on('py-sphinx@1.0.1:', when='+doc', type='build')
+
+ releases = [
+ {
+ 'version': '2016.1.0',
+ 'md5': '92e8d00f6487a575987201f0b0d19173',
+ 'resources': {
+ 'ffc': '35457ae164e481ba5c9189ebae060a47',
+ 'fiat': 'ac0c49942831ee434301228842bcc280',
+ 'instant': '0e3dbb464c4d90d691f31f0fdd63d4f6',
+ 'ufl': '37433336e5c9b58d1d5ab4acca9104a7',
+ }
+ },
+ {
+ 'version': '1.6.0',
+ 'md5': '35cb4baf7ab4152a40fb7310b34d5800',
+ 'resources': {
+ 'ffc': '358faa3e9da62a1b1a717070217b793e',
+ 'fiat': 'f4509d05c911fd93cea8d288a78a6c6f',
+ 'instant': '5f2522eb032a5bebbad6597b6fe0732a',
+ 'ufl': 'c40c5f04eaa847377ab2323122284016',
+ }
+ },
+ {
+ 'version': '1.5.0',
+ 'md5': '9b589a3534299a5e6d22c13c5eb30bb8',
+ 'resources': {
+ 'ffc': '343f6d30e7e77d329a400fd8e73e0b63',
+ 'fiat': 'da3fa4dd8177bb251e7f68ec9c7cf6c5',
+ 'instant': 'b744023ded27ee9df4a8d8c6698c0d58',
+ 'ufl': '130d7829cf5a4bd5b52bf6d0955116fd',
+ }
+ },
+ ]
+
+ for release in releases:
+ version(release['version'], release['md5'], url=base_url.format(
+ pkg='dolfin', version=release['version']))
+ for name, md5 in release['resources'].items():
+ resource(name=name,
+ url=base_url.format(pkg=name, **release),
+ md5=md5,
+ destination='depends',
+ when='@{version}'.format(**release),
+ placement=name)
+
+ def cmake_is_on(self, option):
+ return 'ON' if option in self.spec else 'OFF'
+
+ def install(self, spec, prefix):
+ for package in ['ufl', 'ffc', 'fiat', 'instant']:
+ with working_dir(join_path('depends', package)):
+ setup_py('install', '--prefix=%s' % prefix)
+
+ cmake_args = [
+ '-DCMAKE_BUILD_TYPE:STRING={0}'.format(
+ 'Debug' if '+debug' in spec else 'RelWithDebInfo'),
+ '-DBUILD_SHARED_LIBS:BOOL={0}'.format(
+ self.cmake_is_on('+shared')),
+ '-DDOLFIN_SKIP_BUILD_TESTS:BOOL=ON',
+ '-DDOLFIN_ENABLE_OPENMP:BOOL={0}'.format(
+ self.cmake_is_on('+openmp')),
+ '-DDOLFIN_ENABLE_CHOLMOD:BOOL={0}'.format(
+ self.cmake_is_on('suite-sparse')),
+ '-DDOLFIN_ENABLE_HDF5:BOOL={0}'.format(
+ self.cmake_is_on('hdf5')),
+ '-DDOLFIN_ENABLE_MPI:BOOL={0}'.format(
+ self.cmake_is_on('mpi')),
+ '-DDOLFIN_ENABLE_PARMETIS:BOOL={0}'.format(
+ self.cmake_is_on('parmetis')),
+ '-DDOLFIN_ENABLE_PASTIX:BOOL={0}'.format(
+ self.cmake_is_on('pastix')),
+ '-DDOLFIN_ENABLE_PETSC:BOOL={0}'.format(
+ self.cmake_is_on('petsc')),
+ '-DDOLFIN_ENABLE_PETSC4PY:BOOL={0}'.format(
+ self.cmake_is_on('py-petsc4py')),
+ '-DDOLFIN_ENABLE_PYTHON:BOOL={0}'.format(
+ self.cmake_is_on('python')),
+ '-DDOLFIN_ENABLE_QT:BOOL={0}'.format(
+ self.cmake_is_on('qt')),
+ '-DDOLFIN_ENABLE_SCOTCH:BOOL={0}'.format(
+ self.cmake_is_on('scotch')),
+ '-DDOLFIN_ENABLE_SLEPC:BOOL={0}'.format(
+ self.cmake_is_on('slepc')),
+ '-DDOLFIN_ENABLE_SLEPC4PY:BOOL={0}'.format(
+ self.cmake_is_on('py-slepc4py')),
+ '-DDOLFIN_ENABLE_SPHINX:BOOL={0}'.format(
+ self.cmake_is_on('py-sphinx')),
+ '-DDOLFIN_ENABLE_TRILINOS:BOOL={0}'.format(
+ self.cmake_is_on('trilinos')),
+ '-DDOLFIN_ENABLE_UMFPACK:BOOL={0}'.format(
+ self.cmake_is_on('suite-sparse')),
+ '-DDOLFIN_ENABLE_VTK:BOOL={0}'.format(
+ self.cmake_is_on('vtk')),
+ '-DDOLFIN_ENABLE_ZLIB:BOOL={0}'.format(
+ self.cmake_is_on('zlib')),
+ ]
+
+ cmake_args.extend(std_cmake_args)
+
+ with working_dir('build', create=True):
+ cmake('..', *cmake_args)
+
+ make()
+ make('install')
diff --git a/var/spack/repos/builtin/packages/fenics/petsc-3.7.patch b/var/spack/repos/builtin/packages/fenics/petsc-3.7.patch
new file mode 100644
index 0000000000..c1ba5c4da6
--- /dev/null
+++ b/var/spack/repos/builtin/packages/fenics/petsc-3.7.patch
@@ -0,0 +1,394 @@
+diff -Naur dolfin-1.6.0/dolfin/common/SubSystemsManager.cpp dolfin-1.6.0.new/dolfin/common/SubSystemsManager.cpp
+--- dolfin-1.6.0/dolfin/common/SubSystemsManager.cpp 2015-07-28 17:05:55.000000000 +0200
++++ dolfin-1.6.0.new/dolfin/common/SubSystemsManager.cpp 2016-06-26 23:42:56.391929550 +0200
+@@ -179,7 +179,7 @@
+ PetscInitialized(&is_initialized);
+ if (is_initialized)
+ {
+- PetscOptionsInsert(&argc, &argv, PETSC_NULL);
++ PetscOptionsInsert(NULL, &argc, &argv, PETSC_NULL);
+ }
+ else
+ {
+@@ -187,12 +187,12 @@
+ PetscInitializeNoArguments();
+
+ // Set options to avoid common failures with some 3rd party solvers
+- PetscOptionsSetValue("-mat_mumps_icntl_7", "0");
+- PetscOptionsSetValue("-mat_superlu_dist_colperm", "MMD_AT_PLUS_A");
++ PetscOptionsSetValue(NULL, "-mat_mumps_icntl_7", "0");
++ PetscOptionsSetValue(NULL, "-mat_superlu_dist_colperm", "MMD_AT_PLUS_A");
+
+ // Pass command line arguments to PETSc (will overwrite any
+ // default above)
+- PetscOptionsInsert(&argc, &argv, PETSC_NULL);
++ PetscOptionsInsert(NULL, &argc, &argv, PETSC_NULL);
+ }
+
+ // Set PETSc
+diff -Naur dolfin-1.6.0/dolfin/la/PETScKrylovSolver.cpp dolfin-1.6.0.new/dolfin/la/PETScKrylovSolver.cpp
+--- dolfin-1.6.0/dolfin/la/PETScKrylovSolver.cpp 2015-07-28 17:05:55.000000000 +0200
++++ dolfin-1.6.0.new/dolfin/la/PETScKrylovSolver.cpp 2016-06-26 23:33:02.418351380 +0200
+@@ -564,6 +564,11 @@
+ return solve(x, b);
+ }
+ //-----------------------------------------------------------------------------
++PetscErrorCode PETScKrylovSolver::ksp_monitor_norm(KSP ksp, PetscInt n, PetscReal rnorm, void *vf)
++{
++ KSPMonitorTrueResidualNorm(ksp, n, rnorm, static_cast<PetscViewerAndFormat *>(vf));
++}
++//-----------------------------------------------------------------------------
+ void PETScKrylovSolver::set_petsc_ksp_options()
+ {
+ PetscErrorCode ierr;
+@@ -585,7 +590,8 @@
+ const bool monitor_convergence = parameters["monitor_convergence"];
+ if (monitor_convergence)
+ {
+- ierr = KSPMonitorSet(_ksp, KSPMonitorTrueResidualNorm, 0, 0);
++ PetscViewerAndFormatCreate(PETSC_VIEWER_STDOUT_WORLD, PETSC_VIEWER_DEFAULT, &_vf);
++ ierr = KSPMonitorSet(_ksp, ksp_monitor_norm, _vf, 0);
+ if (ierr != 0) petsc_error(ierr, __FILE__, "KSPMonitorSet");
+ }
+
+diff -Naur dolfin-1.6.0/dolfin/la/PETScKrylovSolver.h dolfin-1.6.0.new/dolfin/la/PETScKrylovSolver.h
+--- dolfin-1.6.0/dolfin/la/PETScKrylovSolver.h 2015-07-28 17:05:55.000000000 +0200
++++ dolfin-1.6.0.new/dolfin/la/PETScKrylovSolver.h 2016-06-26 23:19:40.767042975 +0200
+@@ -179,6 +179,12 @@
+ // PETSc solver pointer
+ KSP _ksp;
+
++ // viewer for monitoring
++ PetscViewerAndFormat* _vf;
++
++ // monitoring function
++ static PetscErrorCode ksp_monitor_norm(KSP ksp, PetscInt n, PetscReal rnorm, void *ctx);
++
+ // DOLFIN-defined PETScUserPreconditioner
+ PETScUserPreconditioner* pc_dolfin;
+
+diff -Naur dolfin-1.6.0/dolfin/la/PETScLUSolver.cpp dolfin-1.6.0.new/dolfin/la/PETScLUSolver.cpp
+--- dolfin-1.6.0/dolfin/la/PETScLUSolver.cpp 2015-07-28 17:05:55.000000000 +0200
++++ dolfin-1.6.0.new/dolfin/la/PETScLUSolver.cpp 2016-06-26 23:13:36.903732301 +0200
+@@ -246,14 +246,14 @@
+ if (parameters["num_threads"].is_set())
+ {
+ // Use number of threads specified for LU solver
+- ierr = PetscOptionsSetValue("-mat_pastix_threadnbr",
++ ierr = PetscOptionsSetValue(NULL, "-mat_pastix_threadnbr",
+ parameters["num_threads"].value_str().c_str());
+ if (ierr != 0) petsc_error(ierr, __FILE__, "PetscOptionsSetValue");
+ }
+ else
+ {
+ // Use global number of threads
+- ierr = PetscOptionsSetValue("-mat_pastix_threadnbr",
++ ierr = PetscOptionsSetValue(NULL, "-mat_pastix_threadnbr",
+ dolfin::parameters["num_threads"].value_str().c_str());
+ if (ierr != 0) petsc_error(ierr, __FILE__, "PetscOptionsSetValue");
+ }
+diff -Naur dolfin-1.6.0/dolfin/la/PETScOptions.cpp dolfin-1.6.0.new/dolfin/la/PETScOptions.cpp
+--- dolfin-1.6.0/dolfin/la/PETScOptions.cpp 2015-07-28 17:05:55.000000000 +0200
++++ dolfin-1.6.0.new/dolfin/la/PETScOptions.cpp 2016-06-26 23:37:58.613121118 +0200
+@@ -54,7 +54,7 @@
+ {
+ SubSystemsManager::init_petsc();
+ PetscErrorCode ierr;
+- ierr = PetscOptionsClearValue(option.c_str());
++ ierr = PetscOptionsClearValue(NULL, option.c_str());
+ if (ierr != 0)
+ {
+ dolfin_error("PETScOptions.cpp",
+diff -Naur dolfin-1.6.0/dolfin/la/PETScOptions.h dolfin-1.6.0.new/dolfin/la/PETScOptions.h
+--- dolfin-1.6.0/dolfin/la/PETScOptions.h 2015-07-28 17:05:55.000000000 +0200
++++ dolfin-1.6.0.new/dolfin/la/PETScOptions.h 2016-06-26 23:37:54.352023709 +0200
+@@ -65,7 +65,7 @@
+
+ PetscErrorCode ierr;
+ std::string _option = "-" + option;
+- ierr = PetscOptionsSetValue(_option.c_str(),
++ ierr = PetscOptionsSetValue(NULL, _option.c_str(),
+ boost::lexical_cast<std::string>(value).c_str());
+ if (ierr != 0)
+ {
+diff -Naur dolfin-1.6.0/dolfin/la/SLEPcEigenSolver.cpp dolfin-1.6.0.new/dolfin/la/SLEPcEigenSolver.cpp
+--- dolfin-1.6.0/dolfin/la/SLEPcEigenSolver.cpp 2015-07-28 17:05:55.000000000 +0200
++++ dolfin-1.6.0.new/dolfin/la/SLEPcEigenSolver.cpp 2016-06-26 23:35:25.930631132 +0200
+@@ -99,6 +99,19 @@
+ solve(_matA->size(0));
+ }
+ //-----------------------------------------------------------------------------
++PetscErrorCode SLEPcEigenSolver::eps_monitor(EPS eps, PetscInt its, PetscInt nconv, PetscScalar* eigr,
++ PetscScalar* eigi, PetscReal* errest,
++ PetscInt nest, void *vf)
++{
++ EPSMonitorAll(eps, its, nconv, eigr, eigi, errest, nest,
++ static_cast<PetscViewerAndFormat *>(vf));
++}
++//-----------------------------------------------------------------------------
++PetscErrorCode SLEPcEigenSolver::ksp_monitor(KSP ksp, PetscInt n, PetscReal rnorm, void *vf)
++{
++ KSPMonitorDefault(ksp, n, rnorm, static_cast<PetscViewerAndFormat *>(vf));
++}
++//-----------------------------------------------------------------------------
+ void SLEPcEigenSolver::solve(std::size_t n)
+ {
+ dolfin_assert(_matA);
+@@ -139,10 +152,11 @@
+ {
+ KSP ksp;
+ ST st;
+- EPSMonitorSet(_eps, EPSMonitorAll, NULL, NULL);
++ PetscViewerAndFormatCreate(PETSC_VIEWER_STDOUT_WORLD, PETSC_VIEWER_DEFAULT, &_vf);
++ EPSMonitorSet(_eps, eps_monitor, _vf, NULL);
+ EPSGetST(_eps, &st);
+ STGetKSP(st, &ksp);
+- KSPMonitorSet(ksp, KSPMonitorDefault, NULL, NULL);
++ KSPMonitorSet(ksp, ksp_monitor, _vf, NULL);
+ EPSView(_eps, PETSC_VIEWER_STDOUT_SELF);
+ }
+
+diff -Naur dolfin-1.6.0/dolfin/la/SLEPcEigenSolver.h dolfin-1.6.0.new/dolfin/la/SLEPcEigenSolver.h
+--- dolfin-1.6.0/dolfin/la/SLEPcEigenSolver.h 2015-07-28 17:05:55.000000000 +0200
++++ dolfin-1.6.0.new/dolfin/la/SLEPcEigenSolver.h 2016-06-26 23:36:08.784610612 +0200
+@@ -231,6 +231,11 @@
+ // SLEPc solver pointer
+ EPS _eps;
+
++ PetscViewerAndFormat* _vf;
++ static PetscErrorCode eps_monitor(EPS eps, int its, int nconv, PetscScalar *eigr,
++ PetscScalar *eigi, PetscReal* errest,
++ int nest, void *mctx);
++ static PetscErrorCode ksp_monitor(KSP ksp, PetscInt n, PetscReal rnorm, void *vf);
+ };
+
+ }
+diff -Naur dolfin-1.6.0/dolfin/nls/PETScSNESSolver.cpp dolfin-1.6.0.new/dolfin/nls/PETScSNESSolver.cpp
+--- dolfin-1.6.0/dolfin/nls/PETScSNESSolver.cpp 2015-07-28 17:05:55.000000000 +0200
++++ dolfin-1.6.0.new/dolfin/nls/PETScSNESSolver.cpp 2016-06-26 23:19:40.166029256 +0200
+@@ -195,6 +195,15 @@
+ return this->solve(nonlinear_problem, x);
+ }
+ //-----------------------------------------------------------------------------
++PetscErrorCode
++PETScSNESSolver::snes_monitor(SNES snes, PetscInt its,
++ PetscReal fgnorm, void *vf)
++{
++ PetscViewerAndFormat * _vf = static_cast<PetscViewerAndFormat *>(vf);
++ SNESMonitorDefault(snes, its, fgnorm, _vf);
++ return(0);
++}
++//-----------------------------------------------------------------------------
+ void
+ PETScSNESSolver::init(NonlinearProblem& nonlinear_problem,
+ GenericVector& x)
+@@ -237,9 +246,11 @@
+ }
+
+ // Set some options from the parameters
+- if (report)
+- SNESMonitorSet(_snes, SNESMonitorDefault, PETSC_NULL, PETSC_NULL);
+-
++ if (report) {
++ PetscViewerAndFormatCreate(PETSC_VIEWER_STDOUT_WORLD, PETSC_VIEWER_DEFAULT, &_snes_ctx.vf);
++ SNESMonitorSet(_snes, PETScSNESSolver::snes_monitor, _snes_ctx.vf, PETSC_NULL);
++ }
++
+ // Set the bounds, if any
+ set_bounds(x);
+
+@@ -293,8 +304,8 @@
+ SNESGetLineSearch(_snes, &linesearch);
+ #endif
+
+- if (report)
+- SNESLineSearchSetMonitor(linesearch, PETSC_TRUE);
++ // if (report)
++ // SNESLineSearchSetMonitor(linesearch, PETSC_TRUE);
+ const std::string line_search_type = std::string(parameters["line_search"]);
+ SNESLineSearchSetType(linesearch, line_search_type.c_str());
+
+@@ -466,6 +477,24 @@
+ }
+ #endif
+ //-----------------------------------------------------------------------------
++PetscErrorCode
++PETScSNESSolver::ksp_monitor(KSP ksp, PetscInt n,
++ PetscReal rnorm, void *vf)
++{
++ PetscViewerAndFormat * _vf = static_cast<PetscViewerAndFormat *>(vf);
++ KSPMonitorDefault(ksp, n, rnorm, _vf);
++ return(0);
++}
++//-----------------------------------------------------------------------------
++PetscErrorCode
++PETScSNESSolver::ksp_monitor_norm(KSP ksp, PetscInt n,
++ PetscReal rnorm, void *vf)
++{
++ PetscViewerAndFormat * _vf = static_cast<PetscViewerAndFormat *>(vf);
++ KSPMonitorTrueResidualNorm(ksp, n, rnorm, _vf);
++ return(0);
++}
++//-----------------------------------------------------------------------------
+ void PETScSNESSolver::set_linear_solver_parameters()
+ {
+ KSP ksp;
+@@ -482,7 +511,7 @@
+ PetscObjectGetComm((PetscObject)_snes, &comm);
+
+ if (parameters["report"])
+- KSPMonitorSet(ksp, KSPMonitorDefault, PETSC_NULL, PETSC_NULL);
++ KSPMonitorSet(ksp, PETScSNESSolver::ksp_monitor, _snes_ctx.vf, PETSC_NULL);
+
+ const std::string linear_solver = parameters["linear_solver"];
+ const std::string preconditioner = parameters["preconditioner"];
+@@ -518,7 +547,7 @@
+ KSPSetInitialGuessNonzero(ksp, PETSC_FALSE);
+
+ if (krylov_parameters["monitor_convergence"])
+- KSPMonitorSet(ksp, KSPMonitorTrueResidualNorm, 0, 0);
++ KSPMonitorSet(ksp, PETScSNESSolver::ksp_monitor_norm, _snes_ctx.vf, 0);
+
+ // Set tolerances
+ const int max_iters = krylov_parameters["maximum_iterations"];
+diff -Naur dolfin-1.6.0/dolfin/nls/PETScSNESSolver.h dolfin-1.6.0.new/dolfin/nls/PETScSNESSolver.h
+--- dolfin-1.6.0/dolfin/nls/PETScSNESSolver.h 2015-07-28 17:05:55.000000000 +0200
++++ dolfin-1.6.0.new/dolfin/nls/PETScSNESSolver.h 2016-06-26 22:31:21.554129282 +0200
+@@ -24,6 +24,7 @@
+
+ #include <map>
+ #include <petscsnes.h>
++#include <petscviewer.h>
+ #include <memory>
+ #include <dolfin/nls/NewtonSolver.h>
+ #include <dolfin/parameter/Parameters.h>
+@@ -124,8 +125,15 @@
+ Vec f_tmp;
+ const PETScVector* xl;
+ const PETScVector* xu;
++ PetscViewerAndFormat* vf;
+ };
+
++
++ // monitoring functions
++ static PetscErrorCode snes_monitor(SNES snes, PetscInt its, PetscReal fgnorm, void* ctx);
++ static PetscErrorCode ksp_monitor(KSP ksp, PetscInt n, PetscReal rnorm, void *ctx);
++ static PetscErrorCode ksp_monitor_norm(KSP ksp, PetscInt n, PetscReal rnorm, void *ctx);
++
+ // PETSc solver pointer
+ SNES _snes;
+
+diff -Naur dolfin-1.6.0/dolfin/nls/PETScTAOSolver.cpp dolfin-1.6.0.new/dolfin/nls/PETScTAOSolver.cpp
+--- dolfin-1.6.0/dolfin/nls/PETScTAOSolver.cpp 2015-07-28 17:05:55.000000000 +0200
++++ dolfin-1.6.0.new/dolfin/nls/PETScTAOSolver.cpp 2016-06-27 19:14:47.367885081 +0200
+@@ -186,6 +186,15 @@
+ init(optimisation_problem, x.down_cast<PETScVector>(), lb, ub);
+ }
+ //-----------------------------------------------------------------------------
++PetscErrorCode
++PETScTAOSolver::ksp_monitor_norm(KSP ksp, PetscInt n,
++ PetscReal rnorm, void * vf)
++{
++ PetscViewerAndFormat * _vf = static_cast<PetscViewerAndFormat *>(vf);
++ KSPMonitorTrueResidualNorm(ksp, n, rnorm, _vf);
++ return(0);
++}
++//-----------------------------------------------------------------------------
+ void PETScTAOSolver::init(OptimisationProblem& optimisation_problem,
+ PETScVector& x,
+ const PETScVector& lb,
+@@ -365,9 +374,7 @@
+ set_tao(parameters["method"]);
+
+ // Set tolerances
+- TaoSetTolerances(_tao, parameters["function_absolute_tol"],
+- parameters["function_relative_tol"],
+- parameters["gradient_absolute_tol"],
++ TaoSetTolerances(_tao, parameters["gradient_absolute_tol"],
+ parameters["gradient_relative_tol"],
+ parameters["gradient_t_tol"]);
+
+@@ -490,8 +497,10 @@
+ KSPSetInitialGuessNonzero(ksp, PETSC_FALSE);
+
+ // KSP monitor
+- if (krylov_parameters["monitor_convergence"])
+- KSPMonitorSet(ksp, KSPMonitorTrueResidualNorm, 0, 0);
++ if (krylov_parameters["monitor_convergence"]) {
++ PetscViewerAndFormatCreate(PETSC_VIEWER_STDOUT_WORLD, PETSC_VIEWER_DEFAULT, &vf);
++ KSPMonitorSet(ksp, ksp_monitor_norm, vf, 0);
++ }
+
+ // Get integer tolerances (to take care of casting to PetscInt)
+ const int max_iter = krylov_parameters["maximum_iterations"];
+diff -Naur dolfin-1.6.0/dolfin/nls/PETScTAOSolver.h dolfin-1.6.0.new/dolfin/nls/PETScTAOSolver.h
+--- dolfin-1.6.0/dolfin/nls/PETScTAOSolver.h 2015-07-28 17:05:55.000000000 +0200
++++ dolfin-1.6.0.new/dolfin/nls/PETScTAOSolver.h 2016-06-27 19:08:00.634361160 +0200
+@@ -146,6 +146,10 @@
+ // TAO pointer
+ Tao _tao;
+
++ // monitoring functions
++ PetscViewerAndFormat* vf;
++ static PetscErrorCode ksp_monitor_norm(KSP ksp, PetscInt n, PetscReal rnorm, void *ctx);
++
+ // Update parameters when tao/ksp/pc_types are explictly given
+ void update_parameters(const std::string tao_type,
+ const std::string ksp_type,
+diff -Naur dolfin-1.6.0/dolfin/nls/TAOLinearBoundSolver.cpp dolfin-1.6.0.new/dolfin/nls/TAOLinearBoundSolver.cpp
+--- dolfin-1.6.0/dolfin/nls/TAOLinearBoundSolver.cpp 2015-07-28 17:05:55.000000000 +0200
++++ dolfin-1.6.0.new/dolfin/nls/TAOLinearBoundSolver.cpp 2016-06-27 19:17:10.390216576 +0200
+@@ -313,9 +313,7 @@
+ dolfin_assert(_tao);
+
+ // Set tolerances
+- TaoSetTolerances(_tao, parameters["function_absolute_tol"],
+- parameters["function_relative_tol"],
+- parameters["gradient_absolute_tol"],
++ TaoSetTolerances(_tao, parameters["gradient_absolute_tol"],
+ parameters["gradient_relative_tol"],
+ parameters["gradient_t_tol"]);
+
+@@ -340,6 +338,15 @@
+ set_solver(method);
+ }
+ //-----------------------------------------------------------------------------
++PetscErrorCode
++TAOLinearBoundSolver::ksp_monitor_norm(KSP ksp, PetscInt n,
++ PetscReal rnorm, void * vf)
++{
++ PetscViewerAndFormat * _vf = static_cast<PetscViewerAndFormat *>(vf);
++ KSPMonitorTrueResidualNorm(ksp, n, rnorm, _vf);
++ return(0);
++}
++//-----------------------------------------------------------------------------
+ void TAOLinearBoundSolver::set_ksp_options()
+ {
+ dolfin_assert(_tao);
+@@ -360,8 +367,10 @@
+ else
+ KSPSetInitialGuessNonzero(ksp, PETSC_FALSE);
+
+- if (krylov_parameters["monitor_convergence"])
+- KSPMonitorSet(ksp, KSPMonitorTrueResidualNorm, 0, 0);
++ if (krylov_parameters["monitor_convergence"]) {
++ PetscViewerAndFormatCreate(PETSC_VIEWER_STDOUT_WORLD, PETSC_VIEWER_DEFAULT, &vf);
++ KSPMonitorSet(ksp, ksp_monitor_norm, vf, 0);
++ }
+
+ // Set tolerances
+ const int max_ksp_it = krylov_parameters["maximum_iterations"];
+diff -Naur dolfin-1.6.0/dolfin/nls/TAOLinearBoundSolver.h dolfin-1.6.0.new/dolfin/nls/TAOLinearBoundSolver.h
+--- dolfin-1.6.0/dolfin/nls/TAOLinearBoundSolver.h 2015-07-28 17:05:55.000000000 +0200
++++ dolfin-1.6.0.new/dolfin/nls/TAOLinearBoundSolver.h 2016-06-27 19:08:09.157561005 +0200
+@@ -174,6 +174,10 @@
+ // Tao solver pointer
+ Tao _tao;
+
++ // monitoring functions
++ PetscViewerAndFormat* vf;
++ static PetscErrorCode ksp_monitor_norm(KSP ksp, PetscInt n, PetscReal rnorm, void *ctx);
++
+ // Petsc preconditioner
+ std::shared_ptr<PETScPreconditioner> preconditioner;
+
diff --git a/var/spack/repos/builtin/packages/fenics/petsc-version-detection.patch b/var/spack/repos/builtin/packages/fenics/petsc-version-detection.patch
new file mode 100644
index 0000000000..1429cd7bc6
--- /dev/null
+++ b/var/spack/repos/builtin/packages/fenics/petsc-version-detection.patch
@@ -0,0 +1,39 @@
+--- dolfin-1.6.0.orig/cmake/modules/FindPETSc.cmake 2015-07-28 17:05:55.000000000 +0200
++++ dolfin-1.6.0/cmake/modules/FindPETSc.cmake 2016-06-27 17:16:02.484402705 +0200
+@@ -207,13 +207,7 @@
+
+ endif()
+
+-# Build PETSc test program
+-if (DOLFIN_SKIP_BUILD_TESTS)
+- set(PETSC_TEST_RUNS TRUE)
+- set(PETSC_VERSION "UNKNOWN")
+- set(PETSC_VERSION_OK TRUE)
+-elseif (FOUND_PETSC_CONF)
+-
++if (FOUND_PETSC_CONF)
+ # Set flags for building test program
+ set(CMAKE_REQUIRED_INCLUDES ${PETSC_INCLUDE_DIRS})
+ set(CMAKE_REQUIRED_LIBRARIES ${PETSC_LIBRARIES})
+@@ -271,7 +265,10 @@
+ set(PETSC_VERSION_OK TRUE)
+ endif()
+ mark_as_advanced(PETSC_VERSION_OK)
++endif()
+
++ # Build PETSc test program
++if (NOT DOLFIN_SKIP_BUILD_TESTS AND FOUND_PETSC_CONF)
+ # Run PETSc test program
+ set(PETSC_TEST_LIB_CPP
+ "${CMAKE_CURRENT_BINARY_DIR}/CMakeFiles/petsc_test_lib.cpp")
+@@ -359,7 +356,9 @@
+ else()
+ message(STATUS "PETSc configured without Cusp support")
+ endif()
+-
++else()
++ set(PETSC_TEST_RUNS TRUE)
++
+ endif()
+
+ # Check sizeof(PetscInt)
diff --git a/var/spack/repos/builtin/packages/ferret/package.py b/var/spack/repos/builtin/packages/ferret/package.py
new file mode 100644
index 0000000000..15ddfcee16
--- /dev/null
+++ b/var/spack/repos/builtin/packages/ferret/package.py
@@ -0,0 +1,103 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+
+from spack import *
+import os
+
+
+class Ferret(Package):
+ """Ferret is an interactive computer visualization and analysis environment
+ designed to meet the needs of oceanographers and meteorologists
+ analyzing large and complex gridded data sets."""
+ homepage = "http://ferret.noaa.gov/Ferret/"
+ url = "ftp://ftp.pmel.noaa.gov/ferret/pub/source/fer_source.tar.gz"
+
+ version('6.96', '51722027c864369f41bab5751dfff8cc',
+ url="ftp://ftp.pmel.noaa.gov/ferret/pub/source/fer_source.tar.gz")
+
+ depends_on("hdf5~mpi~fortran")
+ depends_on("netcdf~mpi")
+ depends_on("netcdf-fortran")
+ depends_on("readline")
+ depends_on("zlib")
+
+ def patch(self):
+ hdf5_prefix = self.spec['hdf5'].prefix
+ netcdff_prefix = self.spec['netcdf-fortran'].prefix
+ readline_prefix = self.spec['readline'].prefix
+ libz_prefix = self.spec['zlib'].prefix
+
+ filter_file(r'^BUILDTYPE.+',
+ 'BUILDTYPE = x86_64-linux',
+ 'FERRET/site_specific.mk')
+ filter_file(r'^INSTALL_FER_DIR.+',
+ 'INSTALL_FER_DIR = %s' % self.spec.prefix,
+ 'FERRET/site_specific.mk')
+ filter_file(r'^HDF5_DIR.+',
+ 'HDF5_DIR = %s' % hdf5_prefix,
+ 'FERRET/site_specific.mk')
+ filter_file(r'^NETCDF4_DIR.+',
+ 'NETCDF4_DIR = %s' % netcdff_prefix,
+ 'FERRET/site_specific.mk')
+ filter_file(r'^READLINE_DIR.+',
+ 'READLINE_DIR = %s' % readline_prefix,
+ 'FERRET/site_specific.mk')
+ filter_file(r'^LIBZ_DIR.+',
+ 'LIBZ_DIR = %s' % libz_prefix,
+ 'FERRET/site_specific.mk')
+ filter_file(r'^JAVA_HOME.+',
+ ' ',
+ 'FERRET/site_specific.mk')
+ filter_file(r'-lm',
+ '-lgfortran -lm',
+ 'FERRET/platform_specific.mk.x86_64-linux')
+
+ def install(self, spec, prefix):
+ hdf5_prefix = spec['hdf5'].prefix
+ netcdff_prefix = spec['netcdf-fortran'].prefix
+ netcdf_prefix = spec['netcdf'].prefix
+ libz_prefix = spec['zlib'].prefix
+ ln = which('ln')
+ ln('-sf',
+ hdf5_prefix + '/lib',
+ hdf5_prefix + '/lib64')
+ ln('-sf',
+ netcdff_prefix + '/lib',
+ netcdff_prefix + '/lib64')
+ ln('-sf',
+ netcdf_prefix + '/lib/libnetcdf.a',
+ netcdff_prefix + '/lib/libnetcdf.a')
+ ln('-sf',
+ netcdf_prefix + '/lib/libnetcdf.la',
+ netcdff_prefix + '/lib/libnetcdf.la')
+ ln('-sf',
+ libz_prefix + '/lib',
+ libz_prefix + '/lib64')
+ os.environ['LDFLAGS'] = '-lquadmath'
+ with working_dir('FERRET', create=False):
+ os.environ['LD_X11'] = '-L/usr/lib/X11 -lX11'
+ os.environ['HOSTTYPE'] = 'x86_64-linux'
+ make(parallel=False)
+ make("install")
diff --git a/var/spack/repos/builtin/packages/fftw/package.py b/var/spack/repos/builtin/packages/fftw/package.py
index 434aeea616..838f6ff744 100644
--- a/var/spack/repos/builtin/packages/fftw/package.py
+++ b/var/spack/repos/builtin/packages/fftw/package.py
@@ -22,63 +22,94 @@
# License along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
-
-
from spack import *
class Fftw(Package):
- """
- FFTW is a C subroutine library for computing the discrete Fourier transform (DFT) in one or more dimensions, of
- arbitrary input size, and of both real and complex data (as well as of even/odd data, i.e. the discrete cosine/sine
- transforms or DCT/DST). We believe that FFTW, which is free software, should become the FFT library of choice for
- most applications.
- """
+ """FFTW is a C subroutine library for computing the discrete Fourier
+ transform (DFT) in one or more dimensions, of arbitrary input
+ size, and of both real and complex data (as well as of even/odd
+ data, i.e. the discrete cosine/sine transforms or DCT/DST). We
+ believe that FFTW, which is free software, should become the FFT
+ library of choice for most applications."""
+
homepage = "http://www.fftw.org"
url = "http://www.fftw.org/fftw-3.3.4.tar.gz"
+ version('3.3.5', '6cc08a3b9c7ee06fdd5b9eb02e06f569')
version('3.3.4', '2edab8c06b24feeb3b82bbb3ebf3e7b3')
- variant('float', default=True, description='Produces a single precision version of the library')
- variant('long_double', default=True, description='Produces a long double precision version of the library')
- variant('quad', default=False, description='Produces a quad precision version of the library (works only with GCC and libquadmath)')
+ patch('pfft-3.3.5.patch', when="@3.3.5+pfft_patches", level=0)
+ patch('pfft-3.3.4.patch', when="@3.3.4+pfft_patches", level=0)
+
+ variant(
+ 'float', default=True,
+ description='Produces a single precision version of the library')
+ variant(
+ 'long_double', default=True,
+ description='Produces a long double precision version of the library')
+ variant(
+ 'quad', default=False,
+ description='Produces a quad precision version of the library '
+ '(works only with GCC and libquadmath)')
variant('openmp', default=False, description="Enable OpenMP support.")
- variant('mpi', default=False, description='Activate MPI support')
+ variant('mpi', default=True, description='Activate MPI support')
+ variant(
+ 'pfft_patches', default=False,
+ description='Add extra transpose functions for PFFT compatibility')
depends_on('mpi', when='+mpi')
+ depends_on('automake', type='build', when='+pfft_patches')
+ depends_on('autoconf', type='build', when='+pfft_patches')
- # TODO : add support for architecture specific optimizations as soon as targets are supported
+ # TODO : add support for architecture specific optimizations as soon as
+ # targets are supported
def install(self, spec, prefix):
- options = ['--prefix=%s' % prefix,
- '--enable-shared',
- '--enable-threads']
- # Add support for OpenMP
+ options = [
+ '--prefix={0}'.format(prefix),
+ '--enable-shared',
+ '--enable-threads'
+ ]
+
+ # Add support for OpenMP
if '+openmp' in spec:
# Note: Apple's Clang does not support OpenMP.
if spec.satisfies('%clang'):
- ver = str(self.compiler.version)
- if ver.endswith('-apple'):
- raise InstallError("Apple's clang does not support OpenMP")
+ ver = str(self.compiler.version)
+ if ver.endswith('-apple'):
+ raise InstallError("Apple's clang does not support OpenMP")
options.append('--enable-openmp')
if not self.compiler.f77 or not self.compiler.fc:
options.append("--disable-fortran")
if '+mpi' in spec:
options.append('--enable-mpi')
+ if '+pfft_patches' in spec:
+ autoreconf = which('autoreconf')
+ autoreconf('-ifv')
+
configure(*options)
make()
+ if self.run_tests:
+ make("check")
make("install")
if '+float' in spec:
configure('--enable-float', *options)
make()
+ if self.run_tests:
+ make("check")
make("install")
if '+long_double' in spec:
configure('--enable-long-double', *options)
make()
+ if self.run_tests:
+ make("check")
make("install")
if '+quad' in spec:
configure('--enable-quad-precision', *options)
make()
+ if self.run_tests:
+ make("check")
make("install")
diff --git a/var/spack/repos/builtin/packages/fftw/pfft-3.3.4.patch b/var/spack/repos/builtin/packages/fftw/pfft-3.3.4.patch
new file mode 100644
index 0000000000..4740a60ae4
--- /dev/null
+++ b/var/spack/repos/builtin/packages/fftw/pfft-3.3.4.patch
@@ -0,0 +1,865 @@
+--- mpi/conf.c 2014-03-04 19:41:03.000000000 +0100
++++ mpi/conf.c 2015-09-05 05:53:19.085516467 +0200
+@@ -29,6 +29,8 @@ static const solvtab s =
+ SOLVTAB(XM(transpose_pairwise_register)),
+ SOLVTAB(XM(transpose_alltoall_register)),
+ SOLVTAB(XM(transpose_recurse_register)),
++ SOLVTAB(XM(transpose_pairwise_transposed_register)),
++ SOLVTAB(XM(transpose_alltoall_transposed_register)),
+ SOLVTAB(XM(dft_rank_geq2_register)),
+ SOLVTAB(XM(dft_rank_geq2_transposed_register)),
+ SOLVTAB(XM(dft_serial_register)),
+
+--- mpi/Makefile.am 2013-03-18 13:10:45.000000000 +0100
++++ mpi/Makefile.am 2015-09-05 05:53:19.084516437 +0200
+@@ -16,6 +16,7 @@ BUILT_SOURCES = fftw3-mpi.f03.in fftw3-m
+ CLEANFILES = fftw3-mpi.f03 fftw3l-mpi.f03
+
+ TRANSPOSE_SRC = transpose-alltoall.c transpose-pairwise.c transpose-recurse.c transpose-problem.c transpose-solve.c mpi-transpose.h
++TRANSPOSE_SRC += transpose-alltoall-transposed.c transpose-pairwise-transposed.c
+ DFT_SRC = dft-serial.c dft-rank-geq2.c dft-rank-geq2-transposed.c dft-rank1.c dft-rank1-bigvec.c dft-problem.c dft-solve.c mpi-dft.h
+ RDFT_SRC = rdft-serial.c rdft-rank-geq2.c rdft-rank-geq2-transposed.c rdft-rank1-bigvec.c rdft-problem.c rdft-solve.c mpi-rdft.h
+ RDFT2_SRC = rdft2-serial.c rdft2-rank-geq2.c rdft2-rank-geq2-transposed.c rdft2-problem.c rdft2-solve.c mpi-rdft2.h
+
+--- mpi/mpi-transpose.h 2014-03-04 19:41:03.000000000 +0100
++++ mpi/mpi-transpose.h 2015-09-05 05:53:19.085516467 +0200
+@@ -59,3 +59,5 @@ int XM(mkplans_posttranspose)(const prob
+ void XM(transpose_pairwise_register)(planner *p);
+ void XM(transpose_alltoall_register)(planner *p);
+ void XM(transpose_recurse_register)(planner *p);
++void XM(transpose_pairwise_transposed_register)(planner *p);
++void XM(transpose_alltoall_transposed_register)(planner *p);
+
+--- mpi/transpose-alltoall-transposed.c 1970-01-01 01:00:00.000000000 +0100
++++ mpi/transpose-alltoall-transposed.c 2015-09-05 05:53:19.085516467 +0200
+@@ -0,0 +1,280 @@
++/*
++ * Copyright (c) 2003, 2007-11 Matteo Frigo
++ * Copyright (c) 2003, 2007-11 Massachusetts Institute of Technology
++ * Copyright (c) 2012 Michael Pippig
++ *
++ * This program is free software; you can redistribute it and/or modify
++ * it under the terms of the GNU General Public License as published by
++ * the Free Software Foundation; either version 2 of the License, or
++ * (at your option) any later version.
++ *
++ * This program is distributed in the hope that it will be useful,
++ * but WITHOUT ANY WARRANTY; without even the implied warranty of
++ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
++ * GNU General Public License for more details.
++ *
++ * You should have received a copy of the GNU General Public License
++ * along with this program; if not, write to the Free Software
++ * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
++ *
++ */
++
++/* plans for distributed out-of-place transpose using MPI_Alltoall,
++ and which destroy the input array (also if TRANSPOSED_IN is used) */
++
++#include "mpi-transpose.h"
++#include <string.h>
++
++typedef struct {
++ solver super;
++ int copy_transposed_out; /* whether to copy the output for TRANSPOSED_OUT,
++ which makes the first transpose out-of-place
++ but costs an extra copy and requires us
++ to destroy the input */
++} S;
++
++typedef struct {
++ plan_mpi_transpose super;
++
++ plan *cld1, *cld2, *cld2rest, *cld3;
++
++ MPI_Comm comm;
++ int *send_block_sizes, *send_block_offsets;
++ int *recv_block_sizes, *recv_block_offsets;
++
++ INT rest_Ioff, rest_Ooff;
++
++ int equal_blocks;
++} P;
++
++/* transpose locally to get contiguous chunks
++ this may take two transposes if the block sizes are unequal
++ (3 subplans, two of which operate on disjoint data) */
++static void apply_pretranspose(
++ const P *ego, R *I, R *O
++ )
++{
++ plan_rdft *cld2, *cld2rest, *cld3;
++
++ cld3 = (plan_rdft *) ego->cld3;
++ if (cld3)
++ cld3->apply(ego->cld3, O, O);
++ /* else TRANSPOSED_IN is true and user wants I transposed */
++
++ cld2 = (plan_rdft *) ego->cld2;
++ cld2->apply(ego->cld2, I, O);
++ cld2rest = (plan_rdft *) ego->cld2rest;
++ if (cld2rest) {
++ cld2rest->apply(ego->cld2rest,
++ I + ego->rest_Ioff, O + ego->rest_Ooff);
++ }
++}
++
++static void apply(const plan *ego_, R *I, R *O)
++{
++ const P *ego = (const P *) ego_;
++ plan_rdft *cld1 = (plan_rdft *) ego->cld1;
++
++ if (cld1) {
++ /* transpose locally to get contiguous chunks */
++ apply_pretranspose(ego, I, O);
++
++ /* transpose chunks globally */
++ if (ego->equal_blocks)
++ MPI_Alltoall(O, ego->send_block_sizes[0], FFTW_MPI_TYPE,
++ I, ego->recv_block_sizes[0], FFTW_MPI_TYPE,
++ ego->comm);
++ else
++ MPI_Alltoallv(O, ego->send_block_sizes, ego->send_block_offsets,
++ FFTW_MPI_TYPE,
++ I, ego->recv_block_sizes, ego->recv_block_offsets,
++ FFTW_MPI_TYPE,
++ ego->comm);
++
++ /* transpose locally to get non-transposed output */
++ cld1->apply(ego->cld1, I, O);
++ } /* else TRANSPOSED_OUT is true and user wants O transposed */
++ else {
++ /* transpose locally to get contiguous chunks */
++ apply_pretranspose(ego, I, I);
++
++ /* transpose chunks globally */
++ if (ego->equal_blocks)
++ MPI_Alltoall(I, ego->send_block_sizes[0], FFTW_MPI_TYPE,
++ O, ego->recv_block_sizes[0], FFTW_MPI_TYPE,
++ ego->comm);
++ else
++ MPI_Alltoallv(I, ego->send_block_sizes, ego->send_block_offsets,
++ FFTW_MPI_TYPE,
++ O, ego->recv_block_sizes, ego->recv_block_offsets,
++ FFTW_MPI_TYPE,
++ ego->comm);
++ }
++}
++
++static int applicable(const S *ego, const problem *p_,
++ const planner *plnr)
++{
++ /* in contrast to transpose-alltoall this algorithm can not preserve the input,
++ * since we need at least one transpose before the (out-of-place) Alltoall */
++ const problem_mpi_transpose *p = (const problem_mpi_transpose *) p_;
++ return (1
++ && p->I != p->O
++ && (!NO_DESTROY_INPUTP(plnr))
++ && ((p->flags & TRANSPOSED_OUT) || !ego->copy_transposed_out)
++ && ONLY_TRANSPOSEDP(p->flags)
++ );
++}
++
++static void awake(plan *ego_, enum wakefulness wakefulness)
++{
++ P *ego = (P *) ego_;
++ X(plan_awake)(ego->cld1, wakefulness);
++ X(plan_awake)(ego->cld2, wakefulness);
++ X(plan_awake)(ego->cld2rest, wakefulness);
++ X(plan_awake)(ego->cld3, wakefulness);
++}
++
++static void destroy(plan *ego_)
++{
++ P *ego = (P *) ego_;
++ X(ifree0)(ego->send_block_sizes);
++ MPI_Comm_free(&ego->comm);
++ X(plan_destroy_internal)(ego->cld3);
++ X(plan_destroy_internal)(ego->cld2rest);
++ X(plan_destroy_internal)(ego->cld2);
++ X(plan_destroy_internal)(ego->cld1);
++}
++
++static void print(const plan *ego_, printer *p)
++{
++ const P *ego = (const P *) ego_;
++ p->print(p, "(mpi-transpose-alltoall-transposed%s%(%p%)%(%p%)%(%p%)%(%p%))",
++ ego->equal_blocks ? "/e" : "",
++ ego->cld1, ego->cld2, ego->cld2rest, ego->cld3);
++}
++
++static plan *mkplan(const solver *ego_, const problem *p_, planner *plnr)
++{
++ const S *ego = (const S *) ego_;
++ const problem_mpi_transpose *p;
++ P *pln;
++ plan *cld1 = 0, *cld2 = 0, *cld2rest = 0, *cld3 = 0;
++ INT b, bt, vn, rest_Ioff, rest_Ooff;
++ R *O;
++ int *sbs, *sbo, *rbs, *rbo;
++ int pe, my_pe, n_pes;
++ int equal_blocks = 1;
++ static const plan_adt padt = {
++ XM(transpose_solve), awake, print, destroy
++ };
++
++ if (!applicable(ego, p_, plnr))
++ return (plan *) 0;
++
++ p = (const problem_mpi_transpose *) p_;
++ vn = p->vn;
++
++ MPI_Comm_rank(p->comm, &my_pe);
++ MPI_Comm_size(p->comm, &n_pes);
++
++ bt = XM(block)(p->ny, p->tblock, my_pe);
++
++ if (p->flags & TRANSPOSED_OUT) { /* O stays transposed */
++ if (ego->copy_transposed_out) {
++ cld1 = X(mkplan_f_d)(plnr,
++ X(mkproblem_rdft_0_d)(X(mktensor_1d)
++ (bt * p->nx * vn, 1, 1),
++ p->I, O = p->O),
++ 0, 0, NO_SLOW);
++ if (XM(any_true)(!cld1, p->comm)) goto nada;
++ }
++ else /* first transpose is in-place */
++ O = p->I;
++ }
++ else { /* transpose nx x bt x vn -> bt x nx x vn */
++ cld1 = X(mkplan_f_d)(plnr,
++ X(mkproblem_rdft_0_d)(X(mktensor_3d)
++ (bt, vn, p->nx * vn,
++ p->nx, bt * vn, vn,
++ vn, 1, 1),
++ p->I, O = p->O),
++ 0, 0, NO_SLOW);
++ if (XM(any_true)(!cld1, p->comm)) goto nada;
++ }
++
++ if (XM(any_true)(!XM(mkplans_pretranspose)(p, plnr, p->I, O, my_pe,
++ &cld2, &cld2rest, &cld3,
++ &rest_Ioff, &rest_Ooff),
++ p->comm)) goto nada;
++
++
++ pln = MKPLAN_MPI_TRANSPOSE(P, &padt, apply);
++
++ pln->cld1 = cld1;
++ pln->cld2 = cld2;
++ pln->cld2rest = cld2rest;
++ pln->rest_Ioff = rest_Ioff;
++ pln->rest_Ooff = rest_Ooff;
++ pln->cld3 = cld3;
++
++ MPI_Comm_dup(p->comm, &pln->comm);
++
++ /* Compute sizes/offsets of blocks to send for all-to-all command. */
++ sbs = (int *) MALLOC(4 * n_pes * sizeof(int), PLANS);
++ sbo = sbs + n_pes;
++ rbs = sbo + n_pes;
++ rbo = rbs + n_pes;
++ b = XM(block)(p->nx, p->block, my_pe);
++ bt = XM(block)(p->ny, p->tblock, my_pe);
++ for (pe = 0; pe < n_pes; ++pe) {
++ INT db, dbt; /* destination block sizes */
++ db = XM(block)(p->nx, p->block, pe);
++ dbt = XM(block)(p->ny, p->tblock, pe);
++ if (db != p->block || dbt != p->tblock)
++ equal_blocks = 0;
++
++ /* MPI requires type "int" here; apparently it
++ has no 64-bit API? Grrr. */
++ sbs[pe] = (int) (b * dbt * vn);
++ sbo[pe] = (int) (pe * (b * p->tblock) * vn);
++ rbs[pe] = (int) (db * bt * vn);
++ rbo[pe] = (int) (pe * (p->block * bt) * vn);
++ }
++ pln->send_block_sizes = sbs;
++ pln->send_block_offsets = sbo;
++ pln->recv_block_sizes = rbs;
++ pln->recv_block_offsets = rbo;
++ pln->equal_blocks = equal_blocks;
++
++ X(ops_zero)(&pln->super.super.ops);
++ if (cld1) X(ops_add2)(&cld1->ops, &pln->super.super.ops);
++ if (cld2) X(ops_add2)(&cld2->ops, &pln->super.super.ops);
++ if (cld2rest) X(ops_add2)(&cld2rest->ops, &pln->super.super.ops);
++ if (cld3) X(ops_add2)(&cld3->ops, &pln->super.super.ops);
++ /* FIXME: should MPI operations be counted in "other" somehow? */
++
++ return &(pln->super.super);
++
++ nada:
++ X(plan_destroy_internal)(cld3);
++ X(plan_destroy_internal)(cld2rest);
++ X(plan_destroy_internal)(cld2);
++ X(plan_destroy_internal)(cld1);
++ return (plan *) 0;
++}
++
++static solver *mksolver(int copy_transposed_out)
++{
++ static const solver_adt sadt = { PROBLEM_MPI_TRANSPOSE, mkplan, 0 };
++ S *slv = MKSOLVER(S, &sadt);
++ slv->copy_transposed_out = copy_transposed_out;
++ return &(slv->super);
++}
++
++void XM(transpose_alltoall_transposed_register)(planner *p)
++{
++ int cto;
++ for (cto = 0; cto <= 1; ++cto)
++ REGISTER_SOLVER(p, mksolver(cto));
++}
+
+--- mpi/transpose-pairwise.c 2014-03-04 19:41:03.000000000 +0100
++++ mpi/transpose-pairwise.c 2015-09-05 06:00:05.715433709 +0200
+@@ -53,7 +53,6 @@ static void transpose_chunks(int *sched,
+ {
+ if (sched) {
+ int i;
+- MPI_Status status;
+
+ /* TODO: explore non-synchronous send/recv? */
+
+@@ -74,7 +73,7 @@ static void transpose_chunks(int *sched,
+ O + rbo[pe], (int) (rbs[pe]),
+ FFTW_MPI_TYPE,
+ pe, (pe * n_pes + my_pe) & 0xffff,
+- comm, &status);
++ comm, MPI_STATUS_IGNORE);
+ }
+ }
+
+@@ -92,7 +91,7 @@ static void transpose_chunks(int *sched,
+ O + rbo[pe], (int) (rbs[pe]),
+ FFTW_MPI_TYPE,
+ pe, (pe * n_pes + my_pe) & 0xffff,
+- comm, &status);
++ comm, MPI_STATUS_IGNORE);
+ }
+ }
+ }
+@@ -350,6 +349,7 @@ nada:
+ X(plan_destroy_internal)(*cld3);
+ X(plan_destroy_internal)(*cld2rest);
+ X(plan_destroy_internal)(*cld2);
++ *cld2 = *cld2rest = *cld3 = NULL;
+ return 0;
+ }
+
+--- mpi/transpose-pairwise-transposed.c 1970-01-01 01:00:00.000000000 +0100
++++ mpi/transpose-pairwise-transposed.c 2015-09-05 06:00:07.280481042 +0200
+@@ -0,0 +1,510 @@
++/*
++ * Copyright (c) 2003, 2007-11 Matteo Frigo
++ * Copyright (c) 2003, 2007-11 Massachusetts Institute of Technology
++ * Copyright (c) 2012 Michael Pippig
++ *
++ * This program is free software; you can redistribute it and/or modify
++ * it under the terms of the GNU General Public License as published by
++ * the Free Software Foundation; either version 2 of the License, or
++ * (at your option) any later version.
++ *
++ * This program is distributed in the hope that it will be useful,
++ * but WITHOUT ANY WARRANTY; without even the implied warranty of
++ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
++ * GNU General Public License for more details.
++ *
++ * You should have received a copy of the GNU General Public License
++ * along with this program; if not, write to the Free Software
++ * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
++ *
++ */
++
++/* Distributed transposes using a sequence of carefully scheduled
++ pairwise exchanges. This has the advantage that it can be done
++ in-place, or out-of-place while preserving the input, using buffer
++ space proportional to the local size divided by the number of
++ processes (i.e. to the total array size divided by the number of
++ processes squared). */
++
++#include "mpi-transpose.h"
++#include <string.h>
++
++typedef struct {
++ solver super;
++ int preserve_input; /* preserve input even if DESTROY_INPUT was passed */
++} S;
++
++typedef struct {
++ plan_mpi_transpose super;
++
++ plan *cld1, *cld2, *cld2rest, *cld3;
++ INT rest_Ioff, rest_Ooff;
++
++ int n_pes, my_pe, *sched;
++ INT *send_block_sizes, *send_block_offsets;
++ INT *recv_block_sizes, *recv_block_offsets;
++ MPI_Comm comm;
++ int preserve_input;
++} P;
++
++static void transpose_chunks(int *sched, int n_pes, int my_pe,
++ INT *sbs, INT *sbo, INT *rbs, INT *rbo,
++ MPI_Comm comm,
++ R *I, R *O)
++{
++ if (sched) {
++ int i;
++
++ /* TODO: explore non-synchronous send/recv? */
++
++ if (I == O) {
++ R *buf = (R*) MALLOC(sizeof(R) * sbs[0], BUFFERS);
++
++ for (i = 0; i < n_pes; ++i) {
++ int pe = sched[i];
++ if (my_pe == pe) {
++ if (rbo[pe] != sbo[pe])
++ memmove(O + rbo[pe], O + sbo[pe],
++ sbs[pe] * sizeof(R));
++ }
++ else {
++ memcpy(buf, O + sbo[pe], sbs[pe] * sizeof(R));
++ MPI_Sendrecv(buf, (int) (sbs[pe]), FFTW_MPI_TYPE,
++ pe, (my_pe * n_pes + pe) & 0xffff,
++ O + rbo[pe], (int) (rbs[pe]),
++ FFTW_MPI_TYPE,
++ pe, (pe * n_pes + my_pe) & 0xffff,
++ comm, MPI_STATUS_IGNORE);
++ }
++ }
++
++ X(ifree)(buf);
++ }
++ else { /* I != O */
++ for (i = 0; i < n_pes; ++i) {
++ int pe = sched[i];
++ if (my_pe == pe)
++ memcpy(O + rbo[pe], I + sbo[pe], sbs[pe] * sizeof(R));
++ else
++ MPI_Sendrecv(I + sbo[pe], (int) (sbs[pe]),
++ FFTW_MPI_TYPE,
++ pe, (my_pe * n_pes + pe) & 0xffff,
++ O + rbo[pe], (int) (rbs[pe]),
++ FFTW_MPI_TYPE,
++ pe, (pe * n_pes + my_pe) & 0xffff,
++ comm, MPI_STATUS_IGNORE);
++ }
++ }
++ }
++}
++
++/* transpose locally to get contiguous chunks
++ this may take two transposes if the block sizes are unequal
++ (3 subplans, two of which operate on disjoint data) */
++static void apply_pretranspose(
++ const P *ego, R *I, R *O
++ )
++{
++ plan_rdft *cld2, *cld2rest, *cld3;
++
++ cld3 = (plan_rdft *) ego->cld3;
++ if (cld3)
++ cld3->apply(ego->cld3, O, O);
++ /* else TRANSPOSED_IN is true and user wants I transposed */
++
++ cld2 = (plan_rdft *) ego->cld2;
++ cld2->apply(ego->cld2, I, O);
++ cld2rest = (plan_rdft *) ego->cld2rest;
++ if (cld2rest) {
++ cld2rest->apply(ego->cld2rest,
++ I + ego->rest_Ioff, O + ego->rest_Ooff);
++ }
++}
++
++static void apply(const plan *ego_, R *I, R *O)
++{
++ const P *ego = (const P *) ego_;
++ plan_rdft *cld1 = (plan_rdft *) ego->cld1;
++
++ if (cld1) {
++ /* transpose locally to get contiguous chunks */
++ apply_pretranspose(ego, I, O);
++
++ if(ego->preserve_input) I = O;
++
++ /* transpose chunks globally */
++ transpose_chunks(ego->sched, ego->n_pes, ego->my_pe,
++ ego->send_block_sizes, ego->send_block_offsets,
++ ego->recv_block_sizes, ego->recv_block_offsets,
++ ego->comm, O, I);
++
++ /* transpose locally to get non-transposed output */
++ cld1->apply(ego->cld1, I, O);
++ } /* else TRANSPOSED_OUT is true and user wants O transposed */
++ else if (ego->preserve_input) {
++ /* transpose locally to get contiguous chunks */
++ apply_pretranspose(ego, I, O);
++
++ /* transpose chunks globally */
++ transpose_chunks(ego->sched, ego->n_pes, ego->my_pe,
++ ego->send_block_sizes, ego->send_block_offsets,
++ ego->recv_block_sizes, ego->recv_block_offsets,
++ ego->comm, O, O);
++ }
++ else {
++ /* transpose locally to get contiguous chunks */
++ apply_pretranspose(ego, I, I);
++
++ /* transpose chunks globally */
++ transpose_chunks(ego->sched, ego->n_pes, ego->my_pe,
++ ego->send_block_sizes, ego->send_block_offsets,
++ ego->recv_block_sizes, ego->recv_block_offsets,
++ ego->comm, I, O);
++ }
++}
++
++static int applicable(const S *ego, const problem *p_,
++ const planner *plnr)
++{
++ const problem_mpi_transpose *p = (const problem_mpi_transpose *) p_;
++ /* Note: this is *not* UGLY for out-of-place, destroy-input plans;
++ the planner often prefers transpose-pairwise to transpose-alltoall,
++ at least with LAM MPI on my machine. */
++ return (1
++ && (!ego->preserve_input || (!NO_DESTROY_INPUTP(plnr)
++ && p->I != p->O))
++ && ONLY_TRANSPOSEDP(p->flags));
++}
++
++static void awake(plan *ego_, enum wakefulness wakefulness)
++{
++ P *ego = (P *) ego_;
++ X(plan_awake)(ego->cld1, wakefulness);
++ X(plan_awake)(ego->cld2, wakefulness);
++ X(plan_awake)(ego->cld2rest, wakefulness);
++ X(plan_awake)(ego->cld3, wakefulness);
++}
++
++static void destroy(plan *ego_)
++{
++ P *ego = (P *) ego_;
++ X(ifree0)(ego->sched);
++ X(ifree0)(ego->send_block_sizes);
++ MPI_Comm_free(&ego->comm);
++ X(plan_destroy_internal)(ego->cld3);
++ X(plan_destroy_internal)(ego->cld2rest);
++ X(plan_destroy_internal)(ego->cld2);
++ X(plan_destroy_internal)(ego->cld1);
++}
++
++static void print(const plan *ego_, printer *p)
++{
++ const P *ego = (const P *) ego_;
++ p->print(p, "(mpi-transpose-pairwise-transposed%s%(%p%)%(%p%)%(%p%)%(%p%))",
++ ego->preserve_input==2 ?"/p":"",
++ ego->cld1, ego->cld2, ego->cld2rest, ego->cld3);
++}
++
++/* Given a process which_pe and a number of processes npes, fills
++ the array sched[npes] with a sequence of processes to communicate
++ with for a deadlock-free, optimum-overlap all-to-all communication.
++ (All processes must call this routine to get their own schedules.)
++ The schedule can be re-ordered arbitrarily as long as all processes
++ apply the same permutation to their schedules.
++
++ The algorithm here is based upon the one described in:
++ J. A. M. Schreuder, "Constructing timetables for sport
++ competitions," Mathematical Programming Study 13, pp. 58-67 (1980).
++ In a sport competition, you have N teams and want every team to
++ play every other team in as short a time as possible (maximum overlap
++ between games). This timetabling problem is therefore identical
++ to that of an all-to-all communications problem. In our case, there
++ is one wrinkle: as part of the schedule, the process must do
++ some data transfer with itself (local data movement), analogous
++ to a requirement that each team "play itself" in addition to other
++ teams. With this wrinkle, it turns out that an optimal timetable
++ (N parallel games) can be constructed for any N, not just for even
++ N as in the original problem described by Schreuder.
++*/
++static void fill1_comm_sched(int *sched, int which_pe, int npes)
++{
++ int pe, i, n, s = 0;
++ A(which_pe >= 0 && which_pe < npes);
++ if (npes % 2 == 0) {
++ n = npes;
++ sched[s++] = which_pe;
++ }
++ else
++ n = npes + 1;
++ for (pe = 0; pe < n - 1; ++pe) {
++ if (npes % 2 == 0) {
++ if (pe == which_pe) sched[s++] = npes - 1;
++ else if (npes - 1 == which_pe) sched[s++] = pe;
++ }
++ else if (pe == which_pe) sched[s++] = pe;
++
++ if (pe != which_pe && which_pe < n - 1) {
++ i = (pe - which_pe + (n - 1)) % (n - 1);
++ if (i < n/2)
++ sched[s++] = (pe + i) % (n - 1);
++
++ i = (which_pe - pe + (n - 1)) % (n - 1);
++ if (i < n/2)
++ sched[s++] = (pe - i + (n - 1)) % (n - 1);
++ }
++ }
++ A(s == npes);
++}
++
++/* Sort the communication schedule sched for npes so that the schedule
++ on process sortpe is ascending or descending (!ascending). This is
++ necessary to allow in-place transposes when the problem does not
++ divide equally among the processes. In this case there is one
++ process where the incoming blocks are bigger/smaller than the
++ outgoing blocks and thus have to be received in
++ descending/ascending order, respectively, to avoid overwriting data
++ before it is sent. */
++static void sort1_comm_sched(int *sched, int npes, int sortpe, int ascending)
++{
++ int *sortsched, i;
++ sortsched = (int *) MALLOC(npes * sizeof(int) * 2, OTHER);
++ fill1_comm_sched(sortsched, sortpe, npes);
++ if (ascending)
++ for (i = 0; i < npes; ++i)
++ sortsched[npes + sortsched[i]] = sched[i];
++ else
++ for (i = 0; i < npes; ++i)
++ sortsched[2*npes - 1 - sortsched[i]] = sched[i];
++ for (i = 0; i < npes; ++i)
++ sched[i] = sortsched[npes + i];
++ X(ifree)(sortsched);
++}
++
++/* make the plans to do the pre-MPI transpositions (shared with
++ transpose-alltoall-transposed) */
++int XM(mkplans_pretranspose)(const problem_mpi_transpose *p, planner *plnr,
++ R *I, R *O, int my_pe,
++ plan **cld2, plan **cld2rest, plan **cld3,
++ INT *rest_Ioff, INT *rest_Ooff)
++{
++ INT vn = p->vn;
++ INT b = XM(block)(p->nx, p->block, my_pe);
++ INT bt = p->tblock;
++ INT nyb = p->ny / bt; /* number of equal-sized blocks */
++ INT nyr = p->ny - nyb * bt; /* leftover rows after equal blocks */
++
++ *cld2 = *cld2rest = *cld3 = NULL;
++ *rest_Ioff = *rest_Ooff = 0;
++
++ if (!(p->flags & TRANSPOSED_IN) && (nyr == 0 || I != O)) {
++ INT ny = p->ny * vn;
++ bt *= vn;
++ *cld2 = X(mkplan_f_d)(plnr,
++ X(mkproblem_rdft_0_d)(X(mktensor_3d)
++ (nyb, bt, b * bt,
++ b, ny, bt,
++ bt, 1, 1),
++ I, O),
++ 0, 0, NO_SLOW);
++ if (!*cld2) goto nada;
++
++ if (nyr > 0) {
++ *rest_Ioff = nyb * bt;
++ *rest_Ooff = nyb * b * bt;
++ bt = nyr * vn;
++ *cld2rest = X(mkplan_f_d)(plnr,
++ X(mkproblem_rdft_0_d)(X(mktensor_2d)
++ (b, ny, bt,
++ bt, 1, 1),
++ I + *rest_Ioff,
++ O + *rest_Ooff),
++ 0, 0, NO_SLOW);
++ if (!*cld2rest) goto nada;
++ }
++ }
++ else {
++ *cld2 = X(mkplan_f_d)(plnr,
++ X(mkproblem_rdft_0_d)(
++ X(mktensor_4d)
++ (nyb, b * bt * vn, b * bt * vn,
++ b, vn, bt * vn,
++ bt, b * vn, vn,
++ vn, 1, 1),
++ I, O),
++ 0, 0, NO_SLOW);
++ if (!*cld2) goto nada;
++
++ *rest_Ioff = *rest_Ooff = nyb * bt * b * vn;
++ *cld2rest = X(mkplan_f_d)(plnr,
++ X(mkproblem_rdft_0_d)(
++ X(mktensor_3d)
++ (b, vn, nyr * vn,
++ nyr, b * vn, vn,
++ vn, 1, 1),
++ I + *rest_Ioff, O + *rest_Ooff),
++ 0, 0, NO_SLOW);
++ if (!*cld2rest) goto nada;
++
++ if (!(p->flags & TRANSPOSED_IN)) {
++ *cld3 = X(mkplan_f_d)(plnr,
++ X(mkproblem_rdft_0_d)(
++ X(mktensor_3d)
++ (p->ny, vn, b * vn,
++ b, p->ny * vn, vn,
++ vn, 1, 1),
++ I, I),
++ 0, 0, NO_SLOW);
++ if (!*cld3) goto nada;
++ }
++ }
++
++ return 1;
++
++nada:
++ X(plan_destroy_internal)(*cld3);
++ X(plan_destroy_internal)(*cld2rest);
++ X(plan_destroy_internal)(*cld2);
++ *cld2 = *cld2rest = *cld3 = NULL;
++ return 0;
++}
++
++static plan *mkplan(const solver *ego_, const problem *p_, planner *plnr)
++{
++ const S *ego = (const S *) ego_;
++ const problem_mpi_transpose *p;
++ P *pln;
++ plan *cld1 = 0, *cld2 = 0, *cld2rest = 0, *cld3 = 0;
++ INT b, bt, vn, rest_Ioff, rest_Ooff;
++ INT *sbs, *sbo, *rbs, *rbo;
++ int pe, my_pe, n_pes, sort_pe = -1, ascending = 1;
++ R *I, *O;
++ static const plan_adt padt = {
++ XM(transpose_solve), awake, print, destroy
++ };
++
++ UNUSED(ego);
++
++ if (!applicable(ego, p_, plnr))
++ return (plan *) 0;
++
++ p = (const problem_mpi_transpose *) p_;
++ vn = p->vn;
++ I = p->I; O = p->O;
++
++ MPI_Comm_rank(p->comm, &my_pe);
++ MPI_Comm_size(p->comm, &n_pes);
++
++ bt = XM(block)(p->ny, p->tblock, my_pe);
++
++
++ if (ego->preserve_input || NO_DESTROY_INPUTP(plnr)) I = p->O;
++
++ if (!(p->flags & TRANSPOSED_OUT)) { /* nx x bt x vn -> bt x nx x vn */
++ cld1 = X(mkplan_f_d)(plnr,
++ X(mkproblem_rdft_0_d)(X(mktensor_3d)
++ (bt, vn, p->nx * vn,
++ p->nx, bt * vn, vn,
++ vn, 1, 1),
++ I, O = p->O),
++ 0, 0, NO_SLOW);
++ if (XM(any_true)(!cld1, p->comm)) goto nada;
++
++ }
++ else {
++ if (ego->preserve_input || NO_DESTROY_INPUTP(plnr))
++ O = p->O;
++ else
++ O = p->I;
++ }
++
++ if (XM(any_true)(!XM(mkplans_pretranspose)(p, plnr, p->I, O, my_pe,
++ &cld2, &cld2rest, &cld3,
++ &rest_Ioff, &rest_Ooff),
++ p->comm)) goto nada;
++
++ pln = MKPLAN_MPI_TRANSPOSE(P, &padt, apply);
++
++ pln->cld1 = cld1;
++ pln->cld2 = cld2;
++ pln->cld2rest = cld2rest;
++ pln->rest_Ioff = rest_Ioff;
++ pln->rest_Ooff = rest_Ooff;
++ pln->cld3 = cld3;
++ pln->preserve_input = ego->preserve_input ? 2 : NO_DESTROY_INPUTP(plnr);
++
++ MPI_Comm_dup(p->comm, &pln->comm);
++
++ n_pes = (int) X(imax)(XM(num_blocks)(p->nx, p->block),
++ XM(num_blocks)(p->ny, p->tblock));
++
++ /* Compute sizes/offsets of blocks to exchange between processors */
++ sbs = (INT *) MALLOC(4 * n_pes * sizeof(INT), PLANS);
++ sbo = sbs + n_pes;
++ rbs = sbo + n_pes;
++ rbo = rbs + n_pes;
++ b = XM(block)(p->nx, p->block, my_pe);
++ bt = XM(block)(p->ny, p->tblock, my_pe);
++ for (pe = 0; pe < n_pes; ++pe) {
++ INT db, dbt; /* destination block sizes */
++ db = XM(block)(p->nx, p->block, pe);
++ dbt = XM(block)(p->ny, p->tblock, pe);
++
++ sbs[pe] = b * dbt * vn;
++ sbo[pe] = pe * (b * p->tblock) * vn;
++ rbs[pe] = db * bt * vn;
++ rbo[pe] = pe * (p->block * bt) * vn;
++
++ if (db * dbt > 0 && db * p->tblock != p->block * dbt) {
++ A(sort_pe == -1); /* only one process should need sorting */
++ sort_pe = pe;
++ ascending = db * p->tblock > p->block * dbt;
++ }
++ }
++ pln->n_pes = n_pes;
++ pln->my_pe = my_pe;
++ pln->send_block_sizes = sbs;
++ pln->send_block_offsets = sbo;
++ pln->recv_block_sizes = rbs;
++ pln->recv_block_offsets = rbo;
++
++ if (my_pe >= n_pes) {
++ pln->sched = 0; /* this process is not doing anything */
++ }
++ else {
++ pln->sched = (int *) MALLOC(n_pes * sizeof(int), PLANS);
++ fill1_comm_sched(pln->sched, my_pe, n_pes);
++ if (sort_pe >= 0)
++ sort1_comm_sched(pln->sched, n_pes, sort_pe, ascending);
++ }
++
++ X(ops_zero)(&pln->super.super.ops);
++ if (cld1) X(ops_add2)(&cld1->ops, &pln->super.super.ops);
++ if (cld2) X(ops_add2)(&cld2->ops, &pln->super.super.ops);
++ if (cld2rest) X(ops_add2)(&cld2rest->ops, &pln->super.super.ops);
++ if (cld3) X(ops_add2)(&cld3->ops, &pln->super.super.ops);
++ /* FIXME: should MPI operations be counted in "other" somehow? */
++
++ return &(pln->super.super);
++
++ nada:
++ X(plan_destroy_internal)(cld3);
++ X(plan_destroy_internal)(cld2rest);
++ X(plan_destroy_internal)(cld2);
++ X(plan_destroy_internal)(cld1);
++ return (plan *) 0;
++}
++
++static solver *mksolver(int preserve_input)
++{
++ static const solver_adt sadt = { PROBLEM_MPI_TRANSPOSE, mkplan, 0 };
++ S *slv = MKSOLVER(S, &sadt);
++ slv->preserve_input = preserve_input;
++ return &(slv->super);
++}
++
++void XM(transpose_pairwise_transposed_register)(planner *p)
++{
++ int preserve_input;
++ for (preserve_input = 0; preserve_input <= 1; ++preserve_input)
++ REGISTER_SOLVER(p, mksolver(preserve_input));
++}
diff --git a/var/spack/repos/builtin/packages/fftw/pfft-3.3.5.patch b/var/spack/repos/builtin/packages/fftw/pfft-3.3.5.patch
new file mode 100644
index 0000000000..360a3757f9
--- /dev/null
+++ b/var/spack/repos/builtin/packages/fftw/pfft-3.3.5.patch
@@ -0,0 +1,858 @@
+--- mpi/conf.c 2014-03-04 19:41:03.000000000 +0100
++++ mpi/conf.c 2015-09-05 05:53:19.085516467 +0200
+@@ -29,6 +29,8 @@ static const solvtab s =
+ SOLVTAB(XM(transpose_pairwise_register)),
+ SOLVTAB(XM(transpose_alltoall_register)),
+ SOLVTAB(XM(transpose_recurse_register)),
++ SOLVTAB(XM(transpose_pairwise_transposed_register)),
++ SOLVTAB(XM(transpose_alltoall_transposed_register)),
+ SOLVTAB(XM(dft_rank_geq2_register)),
+ SOLVTAB(XM(dft_rank_geq2_transposed_register)),
+ SOLVTAB(XM(dft_serial_register)),
+
+--- mpi/Makefile.am 2013-03-18 13:10:45.000000000 +0100
++++ mpi/Makefile.am 2015-09-05 05:53:19.084516437 +0200
+@@ -16,6 +16,7 @@ BUILT_SOURCES = fftw3-mpi.f03.in fftw3-m
+ CLEANFILES = fftw3-mpi.f03 fftw3l-mpi.f03
+
+ TRANSPOSE_SRC = transpose-alltoall.c transpose-pairwise.c transpose-recurse.c transpose-problem.c transpose-solve.c mpi-transpose.h
++TRANSPOSE_SRC += transpose-alltoall-transposed.c transpose-pairwise-transposed.c
+ DFT_SRC = dft-serial.c dft-rank-geq2.c dft-rank-geq2-transposed.c dft-rank1.c dft-rank1-bigvec.c dft-problem.c dft-solve.c mpi-dft.h
+ RDFT_SRC = rdft-serial.c rdft-rank-geq2.c rdft-rank-geq2-transposed.c rdft-rank1-bigvec.c rdft-problem.c rdft-solve.c mpi-rdft.h
+ RDFT2_SRC = rdft2-serial.c rdft2-rank-geq2.c rdft2-rank-geq2-transposed.c rdft2-problem.c rdft2-solve.c mpi-rdft2.h
+
+--- mpi/mpi-transpose.h 2014-03-04 19:41:03.000000000 +0100
++++ mpi/mpi-transpose.h 2015-09-05 05:53:19.085516467 +0200
+@@ -59,3 +59,5 @@ int XM(mkplans_posttranspose)(const prob
+ void XM(transpose_pairwise_register)(planner *p);
+ void XM(transpose_alltoall_register)(planner *p);
+ void XM(transpose_recurse_register)(planner *p);
++void XM(transpose_pairwise_transposed_register)(planner *p);
++void XM(transpose_alltoall_transposed_register)(planner *p);
+
+--- mpi/transpose-alltoall-transposed.c 1970-01-01 01:00:00.000000000 +0100
++++ mpi/transpose-alltoall-transposed.c 2015-09-05 05:53:19.085516467 +0200
+@@ -0,0 +1,280 @@
++/*
++ * Copyright (c) 2003, 2007-11 Matteo Frigo
++ * Copyright (c) 2003, 2007-11 Massachusetts Institute of Technology
++ * Copyright (c) 2012 Michael Pippig
++ *
++ * This program is free software; you can redistribute it and/or modify
++ * it under the terms of the GNU General Public License as published by
++ * the Free Software Foundation; either version 2 of the License, or
++ * (at your option) any later version.
++ *
++ * This program is distributed in the hope that it will be useful,
++ * but WITHOUT ANY WARRANTY; without even the implied warranty of
++ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
++ * GNU General Public License for more details.
++ *
++ * You should have received a copy of the GNU General Public License
++ * along with this program; if not, write to the Free Software
++ * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
++ *
++ */
++
++/* plans for distributed out-of-place transpose using MPI_Alltoall,
++ and which destroy the input array (also if TRANSPOSED_IN is used) */
++
++#include "mpi-transpose.h"
++#include <string.h>
++
++typedef struct {
++ solver super;
++ int copy_transposed_out; /* whether to copy the output for TRANSPOSED_OUT,
++ which makes the first transpose out-of-place
++ but costs an extra copy and requires us
++ to destroy the input */
++} S;
++
++typedef struct {
++ plan_mpi_transpose super;
++
++ plan *cld1, *cld2, *cld2rest, *cld3;
++
++ MPI_Comm comm;
++ int *send_block_sizes, *send_block_offsets;
++ int *recv_block_sizes, *recv_block_offsets;
++
++ INT rest_Ioff, rest_Ooff;
++
++ int equal_blocks;
++} P;
++
++/* transpose locally to get contiguous chunks
++ this may take two transposes if the block sizes are unequal
++ (3 subplans, two of which operate on disjoint data) */
++static void apply_pretranspose(
++ const P *ego, R *I, R *O
++ )
++{
++ plan_rdft *cld2, *cld2rest, *cld3;
++
++ cld3 = (plan_rdft *) ego->cld3;
++ if (cld3)
++ cld3->apply(ego->cld3, O, O);
++ /* else TRANSPOSED_IN is true and user wants I transposed */
++
++ cld2 = (plan_rdft *) ego->cld2;
++ cld2->apply(ego->cld2, I, O);
++ cld2rest = (plan_rdft *) ego->cld2rest;
++ if (cld2rest) {
++ cld2rest->apply(ego->cld2rest,
++ I + ego->rest_Ioff, O + ego->rest_Ooff);
++ }
++}
++
++static void apply(const plan *ego_, R *I, R *O)
++{
++ const P *ego = (const P *) ego_;
++ plan_rdft *cld1 = (plan_rdft *) ego->cld1;
++
++ if (cld1) {
++ /* transpose locally to get contiguous chunks */
++ apply_pretranspose(ego, I, O);
++
++ /* transpose chunks globally */
++ if (ego->equal_blocks)
++ MPI_Alltoall(O, ego->send_block_sizes[0], FFTW_MPI_TYPE,
++ I, ego->recv_block_sizes[0], FFTW_MPI_TYPE,
++ ego->comm);
++ else
++ MPI_Alltoallv(O, ego->send_block_sizes, ego->send_block_offsets,
++ FFTW_MPI_TYPE,
++ I, ego->recv_block_sizes, ego->recv_block_offsets,
++ FFTW_MPI_TYPE,
++ ego->comm);
++
++ /* transpose locally to get non-transposed output */
++ cld1->apply(ego->cld1, I, O);
++ } /* else TRANSPOSED_OUT is true and user wants O transposed */
++ else {
++ /* transpose locally to get contiguous chunks */
++ apply_pretranspose(ego, I, I);
++
++ /* transpose chunks globally */
++ if (ego->equal_blocks)
++ MPI_Alltoall(I, ego->send_block_sizes[0], FFTW_MPI_TYPE,
++ O, ego->recv_block_sizes[0], FFTW_MPI_TYPE,
++ ego->comm);
++ else
++ MPI_Alltoallv(I, ego->send_block_sizes, ego->send_block_offsets,
++ FFTW_MPI_TYPE,
++ O, ego->recv_block_sizes, ego->recv_block_offsets,
++ FFTW_MPI_TYPE,
++ ego->comm);
++ }
++}
++
++static int applicable(const S *ego, const problem *p_,
++ const planner *plnr)
++{
++ /* in contrast to transpose-alltoall this algorithm can not preserve the input,
++ * since we need at least one transpose before the (out-of-place) Alltoall */
++ const problem_mpi_transpose *p = (const problem_mpi_transpose *) p_;
++ return (1
++ && p->I != p->O
++ && (!NO_DESTROY_INPUTP(plnr))
++ && ((p->flags & TRANSPOSED_OUT) || !ego->copy_transposed_out)
++ && ONLY_TRANSPOSEDP(p->flags)
++ );
++}
++
++static void awake(plan *ego_, enum wakefulness wakefulness)
++{
++ P *ego = (P *) ego_;
++ X(plan_awake)(ego->cld1, wakefulness);
++ X(plan_awake)(ego->cld2, wakefulness);
++ X(plan_awake)(ego->cld2rest, wakefulness);
++ X(plan_awake)(ego->cld3, wakefulness);
++}
++
++static void destroy(plan *ego_)
++{
++ P *ego = (P *) ego_;
++ X(ifree0)(ego->send_block_sizes);
++ MPI_Comm_free(&ego->comm);
++ X(plan_destroy_internal)(ego->cld3);
++ X(plan_destroy_internal)(ego->cld2rest);
++ X(plan_destroy_internal)(ego->cld2);
++ X(plan_destroy_internal)(ego->cld1);
++}
++
++static void print(const plan *ego_, printer *p)
++{
++ const P *ego = (const P *) ego_;
++ p->print(p, "(mpi-transpose-alltoall-transposed%s%(%p%)%(%p%)%(%p%)%(%p%))",
++ ego->equal_blocks ? "/e" : "",
++ ego->cld1, ego->cld2, ego->cld2rest, ego->cld3);
++}
++
++static plan *mkplan(const solver *ego_, const problem *p_, planner *plnr)
++{
++ const S *ego = (const S *) ego_;
++ const problem_mpi_transpose *p;
++ P *pln;
++ plan *cld1 = 0, *cld2 = 0, *cld2rest = 0, *cld3 = 0;
++ INT b, bt, vn, rest_Ioff, rest_Ooff;
++ R *O;
++ int *sbs, *sbo, *rbs, *rbo;
++ int pe, my_pe, n_pes;
++ int equal_blocks = 1;
++ static const plan_adt padt = {
++ XM(transpose_solve), awake, print, destroy
++ };
++
++ if (!applicable(ego, p_, plnr))
++ return (plan *) 0;
++
++ p = (const problem_mpi_transpose *) p_;
++ vn = p->vn;
++
++ MPI_Comm_rank(p->comm, &my_pe);
++ MPI_Comm_size(p->comm, &n_pes);
++
++ bt = XM(block)(p->ny, p->tblock, my_pe);
++
++ if (p->flags & TRANSPOSED_OUT) { /* O stays transposed */
++ if (ego->copy_transposed_out) {
++ cld1 = X(mkplan_f_d)(plnr,
++ X(mkproblem_rdft_0_d)(X(mktensor_1d)
++ (bt * p->nx * vn, 1, 1),
++ p->I, O = p->O),
++ 0, 0, NO_SLOW);
++ if (XM(any_true)(!cld1, p->comm)) goto nada;
++ }
++ else /* first transpose is in-place */
++ O = p->I;
++ }
++ else { /* transpose nx x bt x vn -> bt x nx x vn */
++ cld1 = X(mkplan_f_d)(plnr,
++ X(mkproblem_rdft_0_d)(X(mktensor_3d)
++ (bt, vn, p->nx * vn,
++ p->nx, bt * vn, vn,
++ vn, 1, 1),
++ p->I, O = p->O),
++ 0, 0, NO_SLOW);
++ if (XM(any_true)(!cld1, p->comm)) goto nada;
++ }
++
++ if (XM(any_true)(!XM(mkplans_pretranspose)(p, plnr, p->I, O, my_pe,
++ &cld2, &cld2rest, &cld3,
++ &rest_Ioff, &rest_Ooff),
++ p->comm)) goto nada;
++
++
++ pln = MKPLAN_MPI_TRANSPOSE(P, &padt, apply);
++
++ pln->cld1 = cld1;
++ pln->cld2 = cld2;
++ pln->cld2rest = cld2rest;
++ pln->rest_Ioff = rest_Ioff;
++ pln->rest_Ooff = rest_Ooff;
++ pln->cld3 = cld3;
++
++ MPI_Comm_dup(p->comm, &pln->comm);
++
++ /* Compute sizes/offsets of blocks to send for all-to-all command. */
++ sbs = (int *) MALLOC(4 * n_pes * sizeof(int), PLANS);
++ sbo = sbs + n_pes;
++ rbs = sbo + n_pes;
++ rbo = rbs + n_pes;
++ b = XM(block)(p->nx, p->block, my_pe);
++ bt = XM(block)(p->ny, p->tblock, my_pe);
++ for (pe = 0; pe < n_pes; ++pe) {
++ INT db, dbt; /* destination block sizes */
++ db = XM(block)(p->nx, p->block, pe);
++ dbt = XM(block)(p->ny, p->tblock, pe);
++ if (db != p->block || dbt != p->tblock)
++ equal_blocks = 0;
++
++ /* MPI requires type "int" here; apparently it
++ has no 64-bit API? Grrr. */
++ sbs[pe] = (int) (b * dbt * vn);
++ sbo[pe] = (int) (pe * (b * p->tblock) * vn);
++ rbs[pe] = (int) (db * bt * vn);
++ rbo[pe] = (int) (pe * (p->block * bt) * vn);
++ }
++ pln->send_block_sizes = sbs;
++ pln->send_block_offsets = sbo;
++ pln->recv_block_sizes = rbs;
++ pln->recv_block_offsets = rbo;
++ pln->equal_blocks = equal_blocks;
++
++ X(ops_zero)(&pln->super.super.ops);
++ if (cld1) X(ops_add2)(&cld1->ops, &pln->super.super.ops);
++ if (cld2) X(ops_add2)(&cld2->ops, &pln->super.super.ops);
++ if (cld2rest) X(ops_add2)(&cld2rest->ops, &pln->super.super.ops);
++ if (cld3) X(ops_add2)(&cld3->ops, &pln->super.super.ops);
++ /* FIXME: should MPI operations be counted in "other" somehow? */
++
++ return &(pln->super.super);
++
++ nada:
++ X(plan_destroy_internal)(cld3);
++ X(plan_destroy_internal)(cld2rest);
++ X(plan_destroy_internal)(cld2);
++ X(plan_destroy_internal)(cld1);
++ return (plan *) 0;
++}
++
++static solver *mksolver(int copy_transposed_out)
++{
++ static const solver_adt sadt = { PROBLEM_MPI_TRANSPOSE, mkplan, 0 };
++ S *slv = MKSOLVER(S, &sadt);
++ slv->copy_transposed_out = copy_transposed_out;
++ return &(slv->super);
++}
++
++void XM(transpose_alltoall_transposed_register)(planner *p)
++{
++ int cto;
++ for (cto = 0; cto <= 1; ++cto)
++ REGISTER_SOLVER(p, mksolver(cto));
++}
+
+--- mpi/transpose-pairwise.c 2014-03-04 19:41:03.000000000 +0100
++++ mpi/transpose-pairwise.c 2015-09-05 06:00:05.715433709 +0200
+@@ -53,7 +53,6 @@ static void transpose_chunks(int *sched,
+ {
+ if (sched) {
+ int i;
+- MPI_Status status;
+
+ /* TODO: explore non-synchronous send/recv? */
+
+@@ -74,7 +73,7 @@ static void transpose_chunks(int *sched,
+ O + rbo[pe], (int) (rbs[pe]),
+ FFTW_MPI_TYPE,
+ pe, (pe * n_pes + my_pe) & 0xffff,
+- comm, &status);
++ comm, MPI_STATUS_IGNORE);
+ }
+ }
+
+@@ -92,7 +91,7 @@ static void transpose_chunks(int *sched,
+ O + rbo[pe], (int) (rbs[pe]),
+ FFTW_MPI_TYPE,
+ pe, (pe * n_pes + my_pe) & 0xffff,
+- comm, &status);
++ comm, MPI_STATUS_IGNORE);
+ }
+ }
+ }
+
+--- mpi/transpose-pairwise-transposed.c 1970-01-01 01:00:00.000000000 +0100
++++ mpi/transpose-pairwise-transposed.c 2015-09-05 06:00:07.280481042 +0200
+@@ -0,0 +1,510 @@
++/*
++ * Copyright (c) 2003, 2007-11 Matteo Frigo
++ * Copyright (c) 2003, 2007-11 Massachusetts Institute of Technology
++ * Copyright (c) 2012 Michael Pippig
++ *
++ * This program is free software; you can redistribute it and/or modify
++ * it under the terms of the GNU General Public License as published by
++ * the Free Software Foundation; either version 2 of the License, or
++ * (at your option) any later version.
++ *
++ * This program is distributed in the hope that it will be useful,
++ * but WITHOUT ANY WARRANTY; without even the implied warranty of
++ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
++ * GNU General Public License for more details.
++ *
++ * You should have received a copy of the GNU General Public License
++ * along with this program; if not, write to the Free Software
++ * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
++ *
++ */
++
++/* Distributed transposes using a sequence of carefully scheduled
++ pairwise exchanges. This has the advantage that it can be done
++ in-place, or out-of-place while preserving the input, using buffer
++ space proportional to the local size divided by the number of
++ processes (i.e. to the total array size divided by the number of
++ processes squared). */
++
++#include "mpi-transpose.h"
++#include <string.h>
++
++typedef struct {
++ solver super;
++ int preserve_input; /* preserve input even if DESTROY_INPUT was passed */
++} S;
++
++typedef struct {
++ plan_mpi_transpose super;
++
++ plan *cld1, *cld2, *cld2rest, *cld3;
++ INT rest_Ioff, rest_Ooff;
++
++ int n_pes, my_pe, *sched;
++ INT *send_block_sizes, *send_block_offsets;
++ INT *recv_block_sizes, *recv_block_offsets;
++ MPI_Comm comm;
++ int preserve_input;
++} P;
++
++static void transpose_chunks(int *sched, int n_pes, int my_pe,
++ INT *sbs, INT *sbo, INT *rbs, INT *rbo,
++ MPI_Comm comm,
++ R *I, R *O)
++{
++ if (sched) {
++ int i;
++
++ /* TODO: explore non-synchronous send/recv? */
++
++ if (I == O) {
++ R *buf = (R*) MALLOC(sizeof(R) * sbs[0], BUFFERS);
++
++ for (i = 0; i < n_pes; ++i) {
++ int pe = sched[i];
++ if (my_pe == pe) {
++ if (rbo[pe] != sbo[pe])
++ memmove(O + rbo[pe], O + sbo[pe],
++ sbs[pe] * sizeof(R));
++ }
++ else {
++ memcpy(buf, O + sbo[pe], sbs[pe] * sizeof(R));
++ MPI_Sendrecv(buf, (int) (sbs[pe]), FFTW_MPI_TYPE,
++ pe, (my_pe * n_pes + pe) & 0xffff,
++ O + rbo[pe], (int) (rbs[pe]),
++ FFTW_MPI_TYPE,
++ pe, (pe * n_pes + my_pe) & 0xffff,
++ comm, MPI_STATUS_IGNORE);
++ }
++ }
++
++ X(ifree)(buf);
++ }
++ else { /* I != O */
++ for (i = 0; i < n_pes; ++i) {
++ int pe = sched[i];
++ if (my_pe == pe)
++ memcpy(O + rbo[pe], I + sbo[pe], sbs[pe] * sizeof(R));
++ else
++ MPI_Sendrecv(I + sbo[pe], (int) (sbs[pe]),
++ FFTW_MPI_TYPE,
++ pe, (my_pe * n_pes + pe) & 0xffff,
++ O + rbo[pe], (int) (rbs[pe]),
++ FFTW_MPI_TYPE,
++ pe, (pe * n_pes + my_pe) & 0xffff,
++ comm, MPI_STATUS_IGNORE);
++ }
++ }
++ }
++}
++
++/* transpose locally to get contiguous chunks
++ this may take two transposes if the block sizes are unequal
++ (3 subplans, two of which operate on disjoint data) */
++static void apply_pretranspose(
++ const P *ego, R *I, R *O
++ )
++{
++ plan_rdft *cld2, *cld2rest, *cld3;
++
++ cld3 = (plan_rdft *) ego->cld3;
++ if (cld3)
++ cld3->apply(ego->cld3, O, O);
++ /* else TRANSPOSED_IN is true and user wants I transposed */
++
++ cld2 = (plan_rdft *) ego->cld2;
++ cld2->apply(ego->cld2, I, O);
++ cld2rest = (plan_rdft *) ego->cld2rest;
++ if (cld2rest) {
++ cld2rest->apply(ego->cld2rest,
++ I + ego->rest_Ioff, O + ego->rest_Ooff);
++ }
++}
++
++static void apply(const plan *ego_, R *I, R *O)
++{
++ const P *ego = (const P *) ego_;
++ plan_rdft *cld1 = (plan_rdft *) ego->cld1;
++
++ if (cld1) {
++ /* transpose locally to get contiguous chunks */
++ apply_pretranspose(ego, I, O);
++
++ if(ego->preserve_input) I = O;
++
++ /* transpose chunks globally */
++ transpose_chunks(ego->sched, ego->n_pes, ego->my_pe,
++ ego->send_block_sizes, ego->send_block_offsets,
++ ego->recv_block_sizes, ego->recv_block_offsets,
++ ego->comm, O, I);
++
++ /* transpose locally to get non-transposed output */
++ cld1->apply(ego->cld1, I, O);
++ } /* else TRANSPOSED_OUT is true and user wants O transposed */
++ else if (ego->preserve_input) {
++ /* transpose locally to get contiguous chunks */
++ apply_pretranspose(ego, I, O);
++
++ /* transpose chunks globally */
++ transpose_chunks(ego->sched, ego->n_pes, ego->my_pe,
++ ego->send_block_sizes, ego->send_block_offsets,
++ ego->recv_block_sizes, ego->recv_block_offsets,
++ ego->comm, O, O);
++ }
++ else {
++ /* transpose locally to get contiguous chunks */
++ apply_pretranspose(ego, I, I);
++
++ /* transpose chunks globally */
++ transpose_chunks(ego->sched, ego->n_pes, ego->my_pe,
++ ego->send_block_sizes, ego->send_block_offsets,
++ ego->recv_block_sizes, ego->recv_block_offsets,
++ ego->comm, I, O);
++ }
++}
++
++static int applicable(const S *ego, const problem *p_,
++ const planner *plnr)
++{
++ const problem_mpi_transpose *p = (const problem_mpi_transpose *) p_;
++ /* Note: this is *not* UGLY for out-of-place, destroy-input plans;
++ the planner often prefers transpose-pairwise to transpose-alltoall,
++ at least with LAM MPI on my machine. */
++ return (1
++ && (!ego->preserve_input || (!NO_DESTROY_INPUTP(plnr)
++ && p->I != p->O))
++ && ONLY_TRANSPOSEDP(p->flags));
++}
++
++static void awake(plan *ego_, enum wakefulness wakefulness)
++{
++ P *ego = (P *) ego_;
++ X(plan_awake)(ego->cld1, wakefulness);
++ X(plan_awake)(ego->cld2, wakefulness);
++ X(plan_awake)(ego->cld2rest, wakefulness);
++ X(plan_awake)(ego->cld3, wakefulness);
++}
++
++static void destroy(plan *ego_)
++{
++ P *ego = (P *) ego_;
++ X(ifree0)(ego->sched);
++ X(ifree0)(ego->send_block_sizes);
++ MPI_Comm_free(&ego->comm);
++ X(plan_destroy_internal)(ego->cld3);
++ X(plan_destroy_internal)(ego->cld2rest);
++ X(plan_destroy_internal)(ego->cld2);
++ X(plan_destroy_internal)(ego->cld1);
++}
++
++static void print(const plan *ego_, printer *p)
++{
++ const P *ego = (const P *) ego_;
++ p->print(p, "(mpi-transpose-pairwise-transposed%s%(%p%)%(%p%)%(%p%)%(%p%))",
++ ego->preserve_input==2 ?"/p":"",
++ ego->cld1, ego->cld2, ego->cld2rest, ego->cld3);
++}
++
++/* Given a process which_pe and a number of processes npes, fills
++ the array sched[npes] with a sequence of processes to communicate
++ with for a deadlock-free, optimum-overlap all-to-all communication.
++ (All processes must call this routine to get their own schedules.)
++ The schedule can be re-ordered arbitrarily as long as all processes
++ apply the same permutation to their schedules.
++
++ The algorithm here is based upon the one described in:
++ J. A. M. Schreuder, "Constructing timetables for sport
++ competitions," Mathematical Programming Study 13, pp. 58-67 (1980).
++ In a sport competition, you have N teams and want every team to
++ play every other team in as short a time as possible (maximum overlap
++ between games). This timetabling problem is therefore identical
++ to that of an all-to-all communications problem. In our case, there
++ is one wrinkle: as part of the schedule, the process must do
++ some data transfer with itself (local data movement), analogous
++ to a requirement that each team "play itself" in addition to other
++ teams. With this wrinkle, it turns out that an optimal timetable
++ (N parallel games) can be constructed for any N, not just for even
++ N as in the original problem described by Schreuder.
++*/
++static void fill1_comm_sched(int *sched, int which_pe, int npes)
++{
++ int pe, i, n, s = 0;
++ A(which_pe >= 0 && which_pe < npes);
++ if (npes % 2 == 0) {
++ n = npes;
++ sched[s++] = which_pe;
++ }
++ else
++ n = npes + 1;
++ for (pe = 0; pe < n - 1; ++pe) {
++ if (npes % 2 == 0) {
++ if (pe == which_pe) sched[s++] = npes - 1;
++ else if (npes - 1 == which_pe) sched[s++] = pe;
++ }
++ else if (pe == which_pe) sched[s++] = pe;
++
++ if (pe != which_pe && which_pe < n - 1) {
++ i = (pe - which_pe + (n - 1)) % (n - 1);
++ if (i < n/2)
++ sched[s++] = (pe + i) % (n - 1);
++
++ i = (which_pe - pe + (n - 1)) % (n - 1);
++ if (i < n/2)
++ sched[s++] = (pe - i + (n - 1)) % (n - 1);
++ }
++ }
++ A(s == npes);
++}
++
++/* Sort the communication schedule sched for npes so that the schedule
++ on process sortpe is ascending or descending (!ascending). This is
++ necessary to allow in-place transposes when the problem does not
++ divide equally among the processes. In this case there is one
++ process where the incoming blocks are bigger/smaller than the
++ outgoing blocks and thus have to be received in
++ descending/ascending order, respectively, to avoid overwriting data
++ before it is sent. */
++static void sort1_comm_sched(int *sched, int npes, int sortpe, int ascending)
++{
++ int *sortsched, i;
++ sortsched = (int *) MALLOC(npes * sizeof(int) * 2, OTHER);
++ fill1_comm_sched(sortsched, sortpe, npes);
++ if (ascending)
++ for (i = 0; i < npes; ++i)
++ sortsched[npes + sortsched[i]] = sched[i];
++ else
++ for (i = 0; i < npes; ++i)
++ sortsched[2*npes - 1 - sortsched[i]] = sched[i];
++ for (i = 0; i < npes; ++i)
++ sched[i] = sortsched[npes + i];
++ X(ifree)(sortsched);
++}
++
++/* make the plans to do the pre-MPI transpositions (shared with
++ transpose-alltoall-transposed) */
++int XM(mkplans_pretranspose)(const problem_mpi_transpose *p, planner *plnr,
++ R *I, R *O, int my_pe,
++ plan **cld2, plan **cld2rest, plan **cld3,
++ INT *rest_Ioff, INT *rest_Ooff)
++{
++ INT vn = p->vn;
++ INT b = XM(block)(p->nx, p->block, my_pe);
++ INT bt = p->tblock;
++ INT nyb = p->ny / bt; /* number of equal-sized blocks */
++ INT nyr = p->ny - nyb * bt; /* leftover rows after equal blocks */
++
++ *cld2 = *cld2rest = *cld3 = NULL;
++ *rest_Ioff = *rest_Ooff = 0;
++
++ if (!(p->flags & TRANSPOSED_IN) && (nyr == 0 || I != O)) {
++ INT ny = p->ny * vn;
++ bt *= vn;
++ *cld2 = X(mkplan_f_d)(plnr,
++ X(mkproblem_rdft_0_d)(X(mktensor_3d)
++ (nyb, bt, b * bt,
++ b, ny, bt,
++ bt, 1, 1),
++ I, O),
++ 0, 0, NO_SLOW);
++ if (!*cld2) goto nada;
++
++ if (nyr > 0) {
++ *rest_Ioff = nyb * bt;
++ *rest_Ooff = nyb * b * bt;
++ bt = nyr * vn;
++ *cld2rest = X(mkplan_f_d)(plnr,
++ X(mkproblem_rdft_0_d)(X(mktensor_2d)
++ (b, ny, bt,
++ bt, 1, 1),
++ I + *rest_Ioff,
++ O + *rest_Ooff),
++ 0, 0, NO_SLOW);
++ if (!*cld2rest) goto nada;
++ }
++ }
++ else {
++ *cld2 = X(mkplan_f_d)(plnr,
++ X(mkproblem_rdft_0_d)(
++ X(mktensor_4d)
++ (nyb, b * bt * vn, b * bt * vn,
++ b, vn, bt * vn,
++ bt, b * vn, vn,
++ vn, 1, 1),
++ I, O),
++ 0, 0, NO_SLOW);
++ if (!*cld2) goto nada;
++
++ *rest_Ioff = *rest_Ooff = nyb * bt * b * vn;
++ *cld2rest = X(mkplan_f_d)(plnr,
++ X(mkproblem_rdft_0_d)(
++ X(mktensor_3d)
++ (b, vn, nyr * vn,
++ nyr, b * vn, vn,
++ vn, 1, 1),
++ I + *rest_Ioff, O + *rest_Ooff),
++ 0, 0, NO_SLOW);
++ if (!*cld2rest) goto nada;
++
++ if (!(p->flags & TRANSPOSED_IN)) {
++ *cld3 = X(mkplan_f_d)(plnr,
++ X(mkproblem_rdft_0_d)(
++ X(mktensor_3d)
++ (p->ny, vn, b * vn,
++ b, p->ny * vn, vn,
++ vn, 1, 1),
++ I, I),
++ 0, 0, NO_SLOW);
++ if (!*cld3) goto nada;
++ }
++ }
++
++ return 1;
++
++nada:
++ X(plan_destroy_internal)(*cld3);
++ X(plan_destroy_internal)(*cld2rest);
++ X(plan_destroy_internal)(*cld2);
++ *cld2 = *cld2rest = *cld3 = NULL;
++ return 0;
++}
++
++static plan *mkplan(const solver *ego_, const problem *p_, planner *plnr)
++{
++ const S *ego = (const S *) ego_;
++ const problem_mpi_transpose *p;
++ P *pln;
++ plan *cld1 = 0, *cld2 = 0, *cld2rest = 0, *cld3 = 0;
++ INT b, bt, vn, rest_Ioff, rest_Ooff;
++ INT *sbs, *sbo, *rbs, *rbo;
++ int pe, my_pe, n_pes, sort_pe = -1, ascending = 1;
++ R *I, *O;
++ static const plan_adt padt = {
++ XM(transpose_solve), awake, print, destroy
++ };
++
++ UNUSED(ego);
++
++ if (!applicable(ego, p_, plnr))
++ return (plan *) 0;
++
++ p = (const problem_mpi_transpose *) p_;
++ vn = p->vn;
++ I = p->I; O = p->O;
++
++ MPI_Comm_rank(p->comm, &my_pe);
++ MPI_Comm_size(p->comm, &n_pes);
++
++ bt = XM(block)(p->ny, p->tblock, my_pe);
++
++
++ if (ego->preserve_input || NO_DESTROY_INPUTP(plnr)) I = p->O;
++
++ if (!(p->flags & TRANSPOSED_OUT)) { /* nx x bt x vn -> bt x nx x vn */
++ cld1 = X(mkplan_f_d)(plnr,
++ X(mkproblem_rdft_0_d)(X(mktensor_3d)
++ (bt, vn, p->nx * vn,
++ p->nx, bt * vn, vn,
++ vn, 1, 1),
++ I, O = p->O),
++ 0, 0, NO_SLOW);
++ if (XM(any_true)(!cld1, p->comm)) goto nada;
++
++ }
++ else {
++ if (ego->preserve_input || NO_DESTROY_INPUTP(plnr))
++ O = p->O;
++ else
++ O = p->I;
++ }
++
++ if (XM(any_true)(!XM(mkplans_pretranspose)(p, plnr, p->I, O, my_pe,
++ &cld2, &cld2rest, &cld3,
++ &rest_Ioff, &rest_Ooff),
++ p->comm)) goto nada;
++
++ pln = MKPLAN_MPI_TRANSPOSE(P, &padt, apply);
++
++ pln->cld1 = cld1;
++ pln->cld2 = cld2;
++ pln->cld2rest = cld2rest;
++ pln->rest_Ioff = rest_Ioff;
++ pln->rest_Ooff = rest_Ooff;
++ pln->cld3 = cld3;
++ pln->preserve_input = ego->preserve_input ? 2 : NO_DESTROY_INPUTP(plnr);
++
++ MPI_Comm_dup(p->comm, &pln->comm);
++
++ n_pes = (int) X(imax)(XM(num_blocks)(p->nx, p->block),
++ XM(num_blocks)(p->ny, p->tblock));
++
++ /* Compute sizes/offsets of blocks to exchange between processors */
++ sbs = (INT *) MALLOC(4 * n_pes * sizeof(INT), PLANS);
++ sbo = sbs + n_pes;
++ rbs = sbo + n_pes;
++ rbo = rbs + n_pes;
++ b = XM(block)(p->nx, p->block, my_pe);
++ bt = XM(block)(p->ny, p->tblock, my_pe);
++ for (pe = 0; pe < n_pes; ++pe) {
++ INT db, dbt; /* destination block sizes */
++ db = XM(block)(p->nx, p->block, pe);
++ dbt = XM(block)(p->ny, p->tblock, pe);
++
++ sbs[pe] = b * dbt * vn;
++ sbo[pe] = pe * (b * p->tblock) * vn;
++ rbs[pe] = db * bt * vn;
++ rbo[pe] = pe * (p->block * bt) * vn;
++
++ if (db * dbt > 0 && db * p->tblock != p->block * dbt) {
++ A(sort_pe == -1); /* only one process should need sorting */
++ sort_pe = pe;
++ ascending = db * p->tblock > p->block * dbt;
++ }
++ }
++ pln->n_pes = n_pes;
++ pln->my_pe = my_pe;
++ pln->send_block_sizes = sbs;
++ pln->send_block_offsets = sbo;
++ pln->recv_block_sizes = rbs;
++ pln->recv_block_offsets = rbo;
++
++ if (my_pe >= n_pes) {
++ pln->sched = 0; /* this process is not doing anything */
++ }
++ else {
++ pln->sched = (int *) MALLOC(n_pes * sizeof(int), PLANS);
++ fill1_comm_sched(pln->sched, my_pe, n_pes);
++ if (sort_pe >= 0)
++ sort1_comm_sched(pln->sched, n_pes, sort_pe, ascending);
++ }
++
++ X(ops_zero)(&pln->super.super.ops);
++ if (cld1) X(ops_add2)(&cld1->ops, &pln->super.super.ops);
++ if (cld2) X(ops_add2)(&cld2->ops, &pln->super.super.ops);
++ if (cld2rest) X(ops_add2)(&cld2rest->ops, &pln->super.super.ops);
++ if (cld3) X(ops_add2)(&cld3->ops, &pln->super.super.ops);
++ /* FIXME: should MPI operations be counted in "other" somehow? */
++
++ return &(pln->super.super);
++
++ nada:
++ X(plan_destroy_internal)(cld3);
++ X(plan_destroy_internal)(cld2rest);
++ X(plan_destroy_internal)(cld2);
++ X(plan_destroy_internal)(cld1);
++ return (plan *) 0;
++}
++
++static solver *mksolver(int preserve_input)
++{
++ static const solver_adt sadt = { PROBLEM_MPI_TRANSPOSE, mkplan, 0 };
++ S *slv = MKSOLVER(S, &sadt);
++ slv->preserve_input = preserve_input;
++ return &(slv->super);
++}
++
++void XM(transpose_pairwise_transposed_register)(planner *p)
++{
++ int preserve_input;
++ for (preserve_input = 0; preserve_input <= 1; ++preserve_input)
++ REGISTER_SOLVER(p, mksolver(preserve_input));
++}
diff --git a/var/spack/repos/builtin/packages/fish/package.py b/var/spack/repos/builtin/packages/fish/package.py
index 9d23a3d63d..fb784b7571 100644
--- a/var/spack/repos/builtin/packages/fish/package.py
+++ b/var/spack/repos/builtin/packages/fish/package.py
@@ -24,6 +24,7 @@
##############################################################################
from spack import *
+
class Fish(Package):
"""fish is a smart and user-friendly command line shell for OS X, Linux, and
the rest of the family.
@@ -31,8 +32,7 @@ class Fish(Package):
homepage = "http://fishshell.com/"
url = "http://fishshell.com/files/2.2.0/fish-2.2.0.tar.gz"
- list_url = "http://fishshell.com/files/"
- list_depth = 2
+ list_url = "http://fishshell.com/"
version('2.2.0', 'a76339fd14ce2ec229283c53e805faac48c3e99d9e3ede9d82c0554acfc7b77a')
diff --git a/var/spack/repos/builtin/packages/fixesproto/package.py b/var/spack/repos/builtin/packages/fixesproto/package.py
new file mode 100644
index 0000000000..64852b40e2
--- /dev/null
+++ b/var/spack/repos/builtin/packages/fixesproto/package.py
@@ -0,0 +1,46 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class Fixesproto(Package):
+ """X Fixes Extension.
+
+ The extension makes changes to many areas of the protocol to resolve
+ issues raised by application interaction with core protocol mechanisms
+ that cannot be adequately worked around on the client side of the wire."""
+
+ homepage = "http://cgit.freedesktop.org/xorg/proto/fixesproto"
+ url = "https://www.x.org/archive/individual/proto/fixesproto-5.0.tar.gz"
+
+ version('5.0', '1b3115574cadd4cbea1f197faa7c1de4')
+
+ depends_on('pkg-config@0.9.0:', type='build')
+ depends_on('util-macros', type='build')
+
+ def install(self, spec, prefix):
+ configure('--prefix={0}'.format(prefix))
+
+ make('install')
diff --git a/var/spack/repos/builtin/packages/flex/package.py b/var/spack/repos/builtin/packages/flex/package.py
index b778538606..23b3c8b21a 100644
--- a/var/spack/repos/builtin/packages/flex/package.py
+++ b/var/spack/repos/builtin/packages/flex/package.py
@@ -25,19 +25,46 @@
from spack import *
-class Flex(Package):
+class Flex(AutotoolsPackage):
"""Flex is a tool for generating scanners."""
- homepage = "http://flex.sourceforge.net/"
- url = "http://download.sourceforge.net/flex/flex-2.5.39.tar.gz"
+ homepage = "https://github.com/westes/flex"
+ url = "https://github.com/westes/flex/releases/download/v2.6.1/flex-2.6.1.tar.gz"
- version('2.6.0', '5724bcffed4ebe39e9b55a9be80859ec')
- version('2.5.39', 'e133e9ead8ec0a58d81166b461244fde')
+ # Problematic version:
+ # See issue #2554; https://github.com/westes/flex/issues/113
+ # version('2.6.2', 'cc6d76c333db7653d5caf423a3335239')
+ version('2.6.1', '05bcd8fb629e0ae130311e8a6106fa82')
+ version('2.6.0', '760be2ee9433e822b6eb65318311c19d')
+ version('2.5.39', '5865e76ac69c05699f476515592750d7')
- depends_on("bison")
+ depends_on('bison', type='build')
+ depends_on('gettext@0.19:', type='build')
+ depends_on('help2man', type='build')
- def install(self, spec, prefix):
- configure("--prefix=%s" % prefix)
+ # Older tarballs don't come with a configure script
+ depends_on('m4', type='build', when='@:2.6.0')
+ depends_on('autoconf', type='build', when='@:2.6.0')
+ depends_on('automake', type='build', when='@:2.6.0')
+ depends_on('libtool', type='build', when='@:2.6.0')
- make()
- make("install")
+ def url_for_version(self, version):
+ url = "https://github.com/westes/flex"
+ if version >= Version('2.6.1'):
+ url += "/releases/download/v{0}/flex-{0}.tar.gz".format(version)
+ elif version == Version('2.6.0'):
+ url += "/archive/v{0}.tar.gz".format(version)
+ elif version >= Version('2.5.37'):
+ url += "/archive/flex-{0}.tar.gz".format(version)
+ else:
+ url += "/archive/flex-{0}.tar.gz".format(version.dashed)
+
+ return url
+
+ def autoreconf(self, spec, prefix):
+ pass
+
+ @when('@:2.6.0')
+ def autoreconf(self, spec, prefix):
+ libtoolize('--install', '--force')
+ autoreconf('--install', '--force')
diff --git a/var/spack/repos/builtin/packages/flint/package.py b/var/spack/repos/builtin/packages/flint/package.py
new file mode 100644
index 0000000000..c39b17db2c
--- /dev/null
+++ b/var/spack/repos/builtin/packages/flint/package.py
@@ -0,0 +1,66 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class Flint(Package):
+ """FLINT (Fast Library for Number Theory)."""
+
+ homepage = "http://www.flintlib.org"
+ url = "http://mirrors.mit.edu/sage/spkg/upstream/flint/flint-2.5.2.tar.gz"
+
+ version('2.5.2', 'cda885309362150196aed66a5e0f0383')
+ version('2.4.5', '6504b9deabeafb9313e57153a1730b33')
+ version('develop', git='https://github.com/wbhart/flint2.git')
+
+ # Overlap in functionality between gmp and mpir
+ # All other dependencies must also be built with
+ # one or the other
+ # variant('mpir', default=False,
+ # description='Compile with the MPIR library')
+
+ # Build dependencies
+ depends_on('autoconf', type='build')
+
+ # Other dependencies
+ depends_on('gmp') # mpir is a drop-in replacement for this
+ depends_on('mpfr') # Could also be built against mpir
+
+ def install(self, spec, prefix):
+ options = []
+ options = ["--prefix=%s" % prefix,
+ "--with-gmp=%s" % spec['gmp'].prefix,
+ "--with-mpfr=%s" % spec['mpfr'].prefix]
+
+ # if '+mpir' in spec:
+ # options.extend([
+ # "--with-mpir=%s" % spec['mpir'].prefix
+ # ])
+
+ configure(*options)
+ make()
+ if self.run_tests:
+ make("check")
+ make("install")
diff --git a/var/spack/repos/builtin/packages/fltk/package.py b/var/spack/repos/builtin/packages/fltk/package.py
index f8ac5bc2a4..f29b64b02b 100644
--- a/var/spack/repos/builtin/packages/fltk/package.py
+++ b/var/spack/repos/builtin/packages/fltk/package.py
@@ -26,13 +26,16 @@ from spack import *
class Fltk(Package):
- """
- FLTK (pronounced "fulltick") is a cross-platform C++ GUI toolkit for UNIX/Linux (X11), Microsoft Windows, and
- MacOS X. FLTK provides modern GUI functionality without the bloat and supports 3D graphics via OpenGL and its
- built-in GLUT emulation.
+ """FLTK (pronounced "fulltick") is a cross-platform C++ GUI toolkit for
+ UNIX/Linux (X11), Microsoft Windows, and MacOS X. FLTK provides
+ modern GUI functionality without the bloat and supports 3D
+ graphics via OpenGL and its built-in GLUT emulation.
+
+ FLTK is designed to be small and modular enough to be statically
+ linked, but works fine as a shared library. FLTK also includes an
+ excellent UI builder called FLUID that can be used to create
+ applications in minutes.
- FLTK is designed to be small and modular enough to be statically linked, but works fine as a shared library. FLTK
- also includes an excellent UI builder called FLUID that can be used to create applications in minutes.
"""
homepage = 'http://www.fltk.org/'
url = 'http://fltk.org/pub/fltk/1.3.3/fltk-1.3.3-source.tar.gz'
@@ -41,7 +44,8 @@ class Fltk(Package):
patch('font.patch', when='@1.3.3')
- variant('shared', default=True, description='Enables the build of shared libraries')
+ variant('shared', default=True,
+ description='Enables the build of shared libraries')
def install(self, spec, prefix):
options = ['--prefix=%s' % prefix,
diff --git a/var/spack/repos/builtin/packages/flux/package.py b/var/spack/repos/builtin/packages/flux/package.py
index 2b972d63ae..7abd9f46bd 100644
--- a/var/spack/repos/builtin/packages/flux/package.py
+++ b/var/spack/repos/builtin/packages/flux/package.py
@@ -25,13 +25,18 @@
from spack import *
import os
-class Flux(Package):
+
+class Flux(AutotoolsPackage):
""" A next-generation resource manager (pre-alpha) """
homepage = "https://github.com/flux-framework/flux-core"
- url = "https://github.com/flux-framework/flux-core"
+ url = "https://github.com/flux-framework/flux-core/releases/download/v0.6.0/flux-core-0.6.0.tar.gz"
+
+ version('0.6.0', md5='d44a0f719744771d168edd205bd8e74e')
+ version('master', branch='master',
+ git='https://github.com/flux-framework/flux-core')
- version('master', branch='master', git='https://github.com/flux-framework/flux-core')
+ variant('docs', default=True, description='Build flux manpages')
# Also needs autotools, but should use the system version if available
depends_on("zeromq@4.0.4:")
@@ -43,21 +48,20 @@ class Flux(Package):
depends_on("libxslt")
depends_on("python")
depends_on("py-cffi")
+ depends_on("jansson")
- # TODO: This provides a catalog, hacked with environment below for now
- depends_on("docbook-xml")
- depends_on("asciidoc")
+ depends_on("asciidoc", type='build', when="+docs")
- def install(self, spec, prefix):
- # Bootstrap with autotools
- bash = which('bash')
- bash('./autogen.sh')
- bash('./autogen.sh') #yes, twice, intentionally
+ depends_on("autoconf", type='build', when='@master')
+ depends_on("automake", type='build', when='@master')
+ depends_on("libtool", type='build', when='@master')
- # Fix asciidoc dependency on xml style sheets and whatnot
- os.environ['XML_CATALOG_FILES'] = os.path.join(spec['docbook-xml'].prefix,
- 'catalog.xml')
- # Configure, compile & install
- configure("--prefix=" + prefix)
- make("install", "V=1")
+ def autoreconf(self, spec, prefix):
+ if os.path.exists('autogen.sh'):
+ # Bootstrap with autotools
+ bash = which('bash')
+ bash('./autogen.sh')
+ bash('./autogen.sh') # yes, twice, intentionally
+ def configure_args(self):
+ return ['--disable-docs'] if '+docs' not in self.spec else []
diff --git a/var/spack/repos/builtin/packages/foam-extend/package.py b/var/spack/repos/builtin/packages/foam-extend/package.py
new file mode 100644
index 0000000000..67397a5b72
--- /dev/null
+++ b/var/spack/repos/builtin/packages/foam-extend/package.py
@@ -0,0 +1,289 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+from spack.environment import *
+
+import multiprocessing
+import os
+
+
+class FoamExtend(Package):
+ """The foam-extend project is a fork of the OpenFOAM open source library
+ for Computational Fluid Dynamics (CFD)."""
+
+ homepage = "http://www.extend-project.de/"
+
+ version('3.2', git='http://git.code.sf.net/p/foam-extend/foam-extend-3.2')
+ version('3.1', git='http://git.code.sf.net/p/foam-extend/foam-extend-3.1')
+ version('3.0', git='http://git.code.sf.net/p/foam-extend/foam-extend-3.0')
+
+ variant('paraview', default=False, description='Enable ParaFOAM')
+ variant(
+ 'scotch', default=True,
+ description='Activate Scotch as a possible decomposition library')
+ variant(
+ 'ptscotch', default=True,
+ description='Activate PT-Scotch as a possible decomposition library')
+ variant(
+ 'metis', default=True,
+ description='Activate Metis as a possible decomposition library')
+ variant(
+ 'parmetis', default=True,
+ description='Activate Parmetis as a possible decomposition library')
+ variant(
+ 'parmgridgen', default=True,
+ description='Activate Parmgridgen support')
+ variant(
+ 'source', default=True,
+ description='Installs also the source folder')
+
+ supported_compilers = {'clang': 'Clang', 'gcc': 'Gcc', 'intel': 'Icc'}
+
+ depends_on('mpi')
+ depends_on('python')
+ depends_on('flex')
+ depends_on('zlib')
+ depends_on('cmake', type='build')
+
+ depends_on('scotch ~ metis', when='~ptscotch+scotch')
+ depends_on('scotch ~ metis + mpi', when='+ptscotch')
+ depends_on('metis@5:', when='+metis')
+ depends_on('parmetis', when='+parmetis')
+ depends_on('parmgridgen', when='+parmgridgen')
+
+ depends_on('paraview', when='+paraview')
+
+ def set_arch(self):
+ (sysname, nodename, release, version, machine) = os.uname()
+
+ if self.compiler.name not in self.supported_compilers:
+ raise RuntimeError('{0} is not a supported compiler \
+ to compile OpenFOAM'.format(self.compiler.name))
+
+ foam_compiler = self.supported_compilers[self.compiler.name]
+ if sysname == 'Linux':
+ arch = 'linux'
+ if foam_compiler == 'Clang':
+ raise RuntimeError('OS, compiler combination not\
+ supported ({0} {1})'.format(sysname, foam_compiler))
+ elif sysname == 'Darwin':
+ if machine == 'x86_64':
+ arch = 'darwinIntel'
+ if foam_compiler == 'Icc':
+ raise RuntimeError('OS, compiler combination not\
+ supported ({0} {1})'.format(sysname, foam_compiler))
+ else:
+ raise RuntimeError('{0} {1} is not a \
+ supported architecture'.format(sysname, machine))
+
+ return (arch, foam_compiler)
+
+ def get_openfoam_environment(self):
+ return EnvironmentModifications.from_sourcing_files(
+ join_path(self.stage.source_path,
+ 'etc/bashrc'))
+
+ def patch(self):
+ # change names to match the package and not the one patch in
+ # the Third-Party of foam-extend
+ if '+parmgridgen' in self.spec:
+ filter_file(r'-lMGridGen',
+ r'-lmgrid',
+ 'src/dbns/Make/options')
+
+ filter_file(
+ r'-lMGridGen',
+ r'-lmgrid',
+ 'src/fvAgglomerationMethods/MGridGenGamgAgglomeration/Make/options') # noqa: E501
+
+ # Get the wmake arch and compiler
+ (arch, foam_compiler) = self.set_arch()
+
+ prefs_dict = {
+ 'compilerInstall': 'System',
+ 'WM_COMPILER': foam_compiler,
+ 'WM_ARCH_OPTION': '64',
+ 'WM_PRECISION_OPTION': 'DP',
+ 'WM_COMPILE_OPTION': 'SPACKOpt',
+ 'WM_MPLIB': 'SPACK',
+
+ 'CMAKE_DIR': self.spec['cmake'].prefix,
+ 'CMAKE_BIN_DIR': self.spec['cmake'].prefix.bin,
+ 'PYTHON_DIR': self.spec['python'].prefix,
+ 'PYTHON_BIN_DIR': self.spec['python'].prefix.bin,
+
+ 'FLEX_SYSTEM': 1,
+ 'FLEX_DIR': self.spec['flex'].prefix,
+
+ 'BISON_SYSTEM': 1,
+ 'BISON_DIR': self.spec['flex'].prefix,
+
+ 'ZLIB_SYSTEM': 1,
+ 'ZLIB_DIR': self.spec['zlib'].prefix,
+ }
+
+ if '+scotch' in self.spec or '+ptscotch' in self.spec:
+ prefs_dict['SCOTCH_SYSTEM'] = 1
+ prefs_dict['SCOTCH_DIR'] = self.spec['scotch'].prefix
+ prefs_dict['SCOTCH_BIN_DIR'] = self.spec['scotch'].prefix.bin
+ prefs_dict['SCOTCH_LIB_DIR'] = self.spec['scotch'].prefix.lib
+ prefs_dict['SCOTCH_INCLUDE_DIR'] = \
+ self.spec['scotch'].prefix.include
+
+ if '+metis' in self.spec:
+ prefs_dict['METIS_SYSTEM'] = 1
+ prefs_dict['METIS_DIR'] = self.spec['metis'].prefix
+ prefs_dict['METIS_BIN_DIR'] = self.spec['metis'].prefix.bin
+ prefs_dict['METIS_LIB_DIR'] = self.spec['metis'].prefix.lib
+ prefs_dict['METIS_INCLUDE_DIR'] = self.spec['metis'].prefix.include
+
+ if '+parmetis' in self.spec:
+ prefs_dict['PARMETIS_SYSTEM'] = 1
+ prefs_dict['PARMETIS_DIR'] = self.spec['parmetis'].prefix
+ prefs_dict['PARMETIS_BIN_DIR'] = self.spec['parmetis'].prefix.bin
+ prefs_dict['PARMETIS_LIB_DIR'] = self.spec['parmetis'].prefix.lib
+ prefs_dict['PARMETIS_INCLUDE_DIR'] = \
+ self.spec['parmetis'].prefix.include
+
+ if '+parmgridgen' in self.spec:
+ prefs_dict['PARMGRIDGEN_SYSTEM'] = 1
+ prefs_dict['PARMGRIDGEN_DIR'] = self.spec['parmgridgen'].prefix
+ prefs_dict['PARMGRIDGEN_BIN_DIR'] = \
+ self.spec['parmgridgen'].prefix.bin
+ prefs_dict['PARMGRIDGEN_LIB_DIR'] = \
+ self.spec['parmgridgen'].prefix.lib
+ prefs_dict['PARMGRIDGEN_INCLUDE_DIR'] = \
+ self.spec['parmgridgen'].prefix.include
+
+ if '+paraview' in self.spec:
+ prefs_dict['PARAVIEW_SYSTEM'] = 1
+ prefs_dict['PARAVIEW_DIR'] = self.spec['paraview'].prefix
+ prefs_dict['PARAVIEW_BIN_DIR'] = self.spec['paraview'].prefix.bin
+ prefs_dict['QT_SYSTEM'] = 1
+ prefs_dict['QT_DIR'] = self.spec['qt'].prefix
+ prefs_dict['QT_BIN_DIR'] = self.spec['qt'].prefix.bin
+
+ # write the prefs files to define the configuration needed,
+ # only the prefs.sh is used by this script but both are
+ # installed for end users
+ with working_dir('.'):
+ with open("etc/prefs.sh", "w") as fh:
+ for key in sorted(prefs_dict):
+ fh.write('export {0}={1}\n'.format(key, prefs_dict[key]))
+
+ with open("etc/prefs.csh", "w") as fh:
+ for key in sorted(prefs_dict):
+ fh.write('setenv {0}={1}\n'.format(key, prefs_dict[key]))
+
+ # Defining a different mpi and optimisation file to be able to
+ # make wmake get spack info with minimum modifications on
+ # configurations scripts
+ mpi_info = [
+ 'PFLAGS = -DOMPI_SKIP_MPICXX -DMPICH_IGNORE_CXX_SEEK',
+ 'PINC = -I{0}'.format(self.spec['mpi'].prefix.include),
+ 'PLIBS = -L{0} -lmpi'.format(self.spec['mpi'].prefix.lib)
+ ]
+
+ arch_path = ''.join([arch, prefs_dict['WM_ARCH_OPTION'],
+ foam_compiler])
+ option_path = ''.join([arch_path,
+ prefs_dict['WM_PRECISION_OPTION'],
+ prefs_dict['WM_COMPILE_OPTION']])
+ rule_path = join_path("wmake", "rules", arch_path)
+ build_path = join_path(self.stage.source_path, 'lib', option_path)
+ install_path = \
+ join_path(self.prefix,
+ 'foam-extend-{0}'.format(self.version.up_to(2)),
+ option_path)
+
+ rpaths_foam = ' '.join([
+ '{0}{1}'.format(self.compiler.cxx_rpath_arg,
+ install_path),
+ '{0}{1}'.format(self.compiler.cxx_rpath_arg,
+ build_path)
+ ])
+
+ compiler_flags = {
+ 'DBUG': rpaths_foam,
+ 'OPT': '-O3',
+ }
+
+ with working_dir(rule_path):
+ with open('mplibSPACK', "w") as fh:
+ fh.write('\n'.join(mpi_info))
+
+ for comp in ['c', 'c++']:
+ with open('{0}SPACKOpt'.format(comp), "w") as fh:
+ for key, val in compiler_flags.iteritems():
+ fh.write('{0}{1} = {2}\n'.format(comp, key, val))
+
+ _files_to_patch = [
+ 'src/thermophysicalModels/reactionThermo/chemistryReaders/chemkinReader/chemkinLexer.L', # noqa: E501
+ 'src/surfMesh/surfaceFormats/stl/STLsurfaceFormatASCII.L', # noqa: E501
+ 'src/meshTools/triSurface/triSurface/interfaces/STL/readSTLASCII.L', # noqa: E501
+ 'applications/utilities/preProcessing/fluentDataToFoam/fluentDataToFoam.L', # noqa: E501
+ 'applications/utilities/mesh/conversion/gambitToFoam/gambitToFoam.L', # noqa: E501
+ 'applications/utilities/mesh/conversion/fluent3DMeshToFoam/fluent3DMeshToFoam.L', # noqa: E501
+ 'applications/utilities/mesh/conversion/ansysToFoam/ansysToFoam.L', # noqa: E501
+ 'applications/utilities/mesh/conversion/fluentMeshToFoam/fluentMeshToFoam.L', # noqa: E501
+ 'applications/utilities/mesh/conversion/fluent3DMeshToElmer/fluent3DMeshToElmer.L' # noqa: E501
+ ]
+ for _file in _files_to_patch:
+ filter_file(r'#if YY_FLEX_SUBMINOR_VERSION < 34',
+ r'#if YY_FLEX_MAJOR_VERSION <= 2 && YY_FLEX_MINOR_VERSION <= 5 && YY_FLEX_SUBMINOR_VERSION < 34', # noqa: E501
+ _file)
+
+ def setup_environment(self, spack_env, run_env):
+ with working_dir(self.stage.path):
+ spack_env.set('FOAM_INST_DIR', os.path.abspath('.'))
+
+ (arch, foam_compiler) = self.set_arch()
+
+ run_env.set('FOAM_INST_DIR', self.prefix)
+
+ def install(self, spec, prefix):
+ env_openfoam = self.get_openfoam_environment()
+ env_openfoam.apply_modifications()
+
+ if self.parallel:
+ os.environ['WM_NCOMPPROCS'] = str(self.make_jobs) \
+ if self.make_jobs else str(multiprocessing.cpu_count())
+
+ allwmake = Executable('./Allwmake')
+ allwmake()
+
+ install_path = \
+ join_path(self.prefix,
+ 'foam-extend-{0}'.format(self.version.up_to(2)))
+
+ if '+source' in spec:
+ install_tree('src', join_path(install_path, 'src'))
+ install_tree('tutorials', join_path(install_path, 'tutorials'))
+
+ install_tree('lib', join_path(install_path, 'lib'))
+ install_tree('bin', join_path(install_path, 'bin'))
+ install_tree('applications', join_path(install_path, 'applications'))
+ install_tree('etc', join_path(install_path, 'etc'))
+ install_tree('wmake', join_path(install_path, 'wmake'))
diff --git a/var/spack/repos/builtin/packages/font-adobe-100dpi/package.py b/var/spack/repos/builtin/packages/font-adobe-100dpi/package.py
new file mode 100644
index 0000000000..bde6f352da
--- /dev/null
+++ b/var/spack/repos/builtin/packages/font-adobe-100dpi/package.py
@@ -0,0 +1,53 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class FontAdobe100dpi(Package):
+ """X.org adobe-100dpi font."""
+
+ homepage = "http://cgit.freedesktop.org/xorg/font/adobe-100dpi"
+ url = "https://www.x.org/archive/individual/font/font-adobe-100dpi-1.0.3.tar.gz"
+
+ version('1.0.3', 'ba61e7953f4f5cec5a8e69c262bbc7f9')
+
+ depends_on('font-util')
+
+ depends_on('fontconfig', type='build')
+ depends_on('mkfontdir', type='build')
+ depends_on('bdftopcf', type='build')
+ depends_on('pkg-config@0.9.0:', type='build')
+ depends_on('util-macros', type='build')
+
+ def install(self, spec, prefix):
+ configure('--prefix={0}'.format(prefix))
+
+ make()
+ make('install')
+
+ # `make install` copies the files to the font-util installation.
+ # Create a fake directory to convince Spack that we actually
+ # installed something.
+ mkdir(prefix.lib)
diff --git a/var/spack/repos/builtin/packages/font-adobe-75dpi/package.py b/var/spack/repos/builtin/packages/font-adobe-75dpi/package.py
new file mode 100644
index 0000000000..380fcf363e
--- /dev/null
+++ b/var/spack/repos/builtin/packages/font-adobe-75dpi/package.py
@@ -0,0 +1,53 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class FontAdobe75dpi(Package):
+ """X.org adobe-75dpi font."""
+
+ homepage = "http://cgit.freedesktop.org/xorg/font/adobe-75dpi"
+ url = "https://www.x.org/archive/individual/font/font-adobe-75dpi-1.0.3.tar.gz"
+
+ version('1.0.3', '7a414bb661949cec938938fd678cf649')
+
+ depends_on('font-util')
+
+ depends_on('fontconfig', type='build')
+ depends_on('mkfontdir', type='build')
+ depends_on('bdftopcf', type='build')
+ depends_on('pkg-config@0.9.0:', type='build')
+ depends_on('util-macros', type='build')
+
+ def install(self, spec, prefix):
+ configure('--prefix={0}'.format(prefix))
+
+ make()
+ make('install')
+
+ # `make install` copies the files to the font-util installation.
+ # Create a fake directory to convince Spack that we actually
+ # installed something.
+ mkdir(prefix.lib)
diff --git a/var/spack/repos/builtin/packages/font-adobe-utopia-100dpi/package.py b/var/spack/repos/builtin/packages/font-adobe-utopia-100dpi/package.py
new file mode 100644
index 0000000000..9782d259b5
--- /dev/null
+++ b/var/spack/repos/builtin/packages/font-adobe-utopia-100dpi/package.py
@@ -0,0 +1,53 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class FontAdobeUtopia100dpi(Package):
+ """X.org adobe-utopia-100dpi font."""
+
+ homepage = "http://cgit.freedesktop.org/xorg/font/adobe-utopia-100dpi"
+ url = "https://www.x.org/archive/individual/font/font-adobe-utopia-100dpi-1.0.4.tar.gz"
+
+ version('1.0.4', '128416eccd59b850f77a9b803681da3c')
+
+ depends_on('font-util')
+
+ depends_on('fontconfig', type='build')
+ depends_on('mkfontdir', type='build')
+ depends_on('bdftopcf', type='build')
+ depends_on('pkg-config@0.9.0:', type='build')
+ depends_on('util-macros', type='build')
+
+ def install(self, spec, prefix):
+ configure('--prefix={0}'.format(prefix))
+
+ make()
+ make('install')
+
+ # `make install` copies the files to the font-util installation.
+ # Create a fake directory to convince Spack that we actually
+ # installed something.
+ mkdir(prefix.lib)
diff --git a/var/spack/repos/builtin/packages/font-adobe-utopia-75dpi/package.py b/var/spack/repos/builtin/packages/font-adobe-utopia-75dpi/package.py
new file mode 100644
index 0000000000..9b687a7814
--- /dev/null
+++ b/var/spack/repos/builtin/packages/font-adobe-utopia-75dpi/package.py
@@ -0,0 +1,53 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class FontAdobeUtopia75dpi(Package):
+ """X.org adobe-utopia-75dpi font."""
+
+ homepage = "http://cgit.freedesktop.org/xorg/font/adobe-utopia-75dpi"
+ url = "https://www.x.org/archive/individual/font/font-adobe-utopia-75dpi-1.0.4.tar.gz"
+
+ version('1.0.4', '74c73a5b73c6c3224b299f1fc033e508')
+
+ depends_on('font-util')
+
+ depends_on('fontconfig', type='build')
+ depends_on('mkfontdir', type='build')
+ depends_on('bdftopcf', type='build')
+ depends_on('pkg-config@0.9.0:', type='build')
+ depends_on('util-macros', type='build')
+
+ def install(self, spec, prefix):
+ configure('--prefix={0}'.format(prefix))
+
+ make()
+ make('install')
+
+ # `make install` copies the files to the font-util installation.
+ # Create a fake directory to convince Spack that we actually
+ # installed something.
+ mkdir(prefix.lib)
diff --git a/var/spack/repos/builtin/packages/font-adobe-utopia-type1/package.py b/var/spack/repos/builtin/packages/font-adobe-utopia-type1/package.py
new file mode 100644
index 0000000000..14004e9883
--- /dev/null
+++ b/var/spack/repos/builtin/packages/font-adobe-utopia-type1/package.py
@@ -0,0 +1,51 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class FontAdobeUtopiaType1(Package):
+ """X.org adobe-utopia-type1 font."""
+
+ homepage = "https://cgit.freedesktop.org/xorg/font/adobe-utopia-type1"
+ url = "https://www.x.org/archive/individual/font/font-adobe-utopia-type1-1.0.4.tar.gz"
+
+ version('1.0.4', 'b0676c3495acabad519ee98a94163904')
+
+ depends_on('font-util', type='build')
+ depends_on('fontconfig', type='build')
+ depends_on('mkfontdir', type='build')
+ depends_on('mkfontscale', type='build')
+ depends_on('pkg-config@0.9.0:', type='build')
+ depends_on('util-macros', type='build')
+
+ def install(self, spec, prefix):
+ configure('--prefix={0}'.format(prefix))
+
+ make('install')
+
+ # `make install` copies the files to the font-util installation.
+ # Create a fake directory to convince Spack that we actually
+ # installed something.
+ mkdir(prefix.lib)
diff --git a/var/spack/repos/builtin/packages/font-alias/package.py b/var/spack/repos/builtin/packages/font-alias/package.py
new file mode 100644
index 0000000000..eb8c79fe2a
--- /dev/null
+++ b/var/spack/repos/builtin/packages/font-alias/package.py
@@ -0,0 +1,49 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class FontAlias(Package):
+ """X.org alias font."""
+
+ homepage = "http://cgit.freedesktop.org/xorg/font/alias"
+ url = "https://www.x.org/archive/individual/font/font-alias-1.0.3.tar.gz"
+
+ version('1.0.3', '535138efe0a95f5fe521be6a6b9c4888')
+
+ depends_on('font-util')
+
+ depends_on('pkg-config@0.9.0:', type='build')
+ depends_on('util-macros', type='build')
+
+ def install(self, spec, prefix):
+ configure('--prefix={0}'.format(prefix))
+
+ make('install')
+
+ # `make install` copies the files to the font-util installation.
+ # Create a fake directory to convince Spack that we actually
+ # installed something.
+ mkdir(prefix.lib)
diff --git a/var/spack/repos/builtin/packages/font-arabic-misc/package.py b/var/spack/repos/builtin/packages/font-arabic-misc/package.py
new file mode 100644
index 0000000000..8307d58d6e
--- /dev/null
+++ b/var/spack/repos/builtin/packages/font-arabic-misc/package.py
@@ -0,0 +1,52 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class FontArabicMisc(Package):
+ """X.org arabic-misc font."""
+
+ homepage = "http://cgit.freedesktop.org/xorg/font/arabic-misc"
+ url = "https://www.x.org/archive/individual/font/font-arabic-misc-1.0.3.tar.gz"
+
+ version('1.0.3', '918457df65ef93f09969c6ab01071789')
+
+ depends_on('font-util')
+
+ depends_on('fontconfig', type='build')
+ depends_on('mkfontdir', type='build')
+ depends_on('bdftopcf', type='build')
+ depends_on('pkg-config@0.9.0:', type='build')
+ depends_on('util-macros', type='build')
+
+ def install(self, spec, prefix):
+ configure('--prefix={0}'.format(prefix))
+
+ make('install')
+
+ # `make install` copies the files to the font-util installation.
+ # Create a fake directory to convince Spack that we actually
+ # installed something.
+ mkdir(prefix.lib)
diff --git a/var/spack/repos/builtin/packages/font-bh-100dpi/package.py b/var/spack/repos/builtin/packages/font-bh-100dpi/package.py
new file mode 100644
index 0000000000..1d488a6cd9
--- /dev/null
+++ b/var/spack/repos/builtin/packages/font-bh-100dpi/package.py
@@ -0,0 +1,53 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class FontBh100dpi(Package):
+ """X.org bh-100dpi font."""
+
+ homepage = "http://cgit.freedesktop.org/xorg/font/bh-100dpi"
+ url = "https://www.x.org/archive/individual/font/font-bh-100dpi-1.0.3.tar.gz"
+
+ version('1.0.3', '09e63a5608000531179e1ab068a35878')
+
+ depends_on('font-util')
+
+ depends_on('fontconfig', type='build')
+ depends_on('mkfontdir', type='build')
+ depends_on('bdftopcf', type='build')
+ depends_on('pkg-config@0.9.0:', type='build')
+ depends_on('util-macros', type='build')
+
+ def install(self, spec, prefix):
+ configure('--prefix={0}'.format(prefix))
+
+ make()
+ make('install')
+
+ # `make install` copies the files to the font-util installation.
+ # Create a fake directory to convince Spack that we actually
+ # installed something.
+ mkdir(prefix.lib)
diff --git a/var/spack/repos/builtin/packages/font-bh-75dpi/package.py b/var/spack/repos/builtin/packages/font-bh-75dpi/package.py
new file mode 100644
index 0000000000..22420dd887
--- /dev/null
+++ b/var/spack/repos/builtin/packages/font-bh-75dpi/package.py
@@ -0,0 +1,53 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class FontBh75dpi(Package):
+ """X.org bh-75dpi font."""
+
+ homepage = "http://cgit.freedesktop.org/xorg/font/bh-75dpi"
+ url = "https://www.x.org/archive/individual/font/font-bh-75dpi-1.0.3.tar.gz"
+
+ version('1.0.3', '88fec4ebc4a265684bff3abdd066f14f')
+
+ depends_on('font-util')
+
+ depends_on('fontconfig', type='build')
+ depends_on('mkfontdir', type='build')
+ depends_on('bdftopcf', type='build')
+ depends_on('pkg-config@0.9.0:', type='build')
+ depends_on('util-macros', type='build')
+
+ def install(self, spec, prefix):
+ configure('--prefix={0}'.format(prefix))
+
+ make()
+ make('install')
+
+ # `make install` copies the files to the font-util installation.
+ # Create a fake directory to convince Spack that we actually
+ # installed something.
+ mkdir(prefix.lib)
diff --git a/var/spack/repos/builtin/packages/font-bh-lucidatypewriter-100dpi/package.py b/var/spack/repos/builtin/packages/font-bh-lucidatypewriter-100dpi/package.py
new file mode 100644
index 0000000000..173195a557
--- /dev/null
+++ b/var/spack/repos/builtin/packages/font-bh-lucidatypewriter-100dpi/package.py
@@ -0,0 +1,53 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class FontBhLucidatypewriter100dpi(Package):
+ """X.org bh-lucidatypewriter-100dpi font."""
+
+ homepage = "http://cgit.freedesktop.org/xorg/font/bh-lucidatypewriter-100dpi"
+ url = "https://www.x.org/archive/individual/font/font-bh-lucidatypewriter-100dpi-1.0.3.tar.gz"
+
+ version('1.0.3', '5f716f54e497fb4ec1bb3a5d650ac6f7')
+
+ depends_on('font-util')
+
+ depends_on('fontconfig', type='build')
+ depends_on('mkfontdir', type='build')
+ depends_on('bdftopcf', type='build')
+ depends_on('pkg-config@0.9.0:', type='build')
+ depends_on('util-macros', type='build')
+
+ def install(self, spec, prefix):
+ configure('--prefix={0}'.format(prefix))
+
+ make()
+ make('install')
+
+ # `make install` copies the files to the font-util installation.
+ # Create a fake directory to convince Spack that we actually
+ # installed something.
+ mkdir(prefix.lib)
diff --git a/var/spack/repos/builtin/packages/font-bh-lucidatypewriter-75dpi/package.py b/var/spack/repos/builtin/packages/font-bh-lucidatypewriter-75dpi/package.py
new file mode 100644
index 0000000000..9066823bc3
--- /dev/null
+++ b/var/spack/repos/builtin/packages/font-bh-lucidatypewriter-75dpi/package.py
@@ -0,0 +1,53 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class FontBhLucidatypewriter75dpi(Package):
+ """X.org bh-lucidatypewriter-75dpi font."""
+
+ homepage = "http://cgit.freedesktop.org/xorg/font/bh-lucidatypewriter-75dpi"
+ url = "https://www.x.org/archive/individual/font/font-bh-lucidatypewriter-75dpi-1.0.3.tar.gz"
+
+ version('1.0.3', 'cab8a44ae329aab7141c7adeef0daf5a')
+
+ depends_on('font-util')
+
+ depends_on('fontconfig', type='build')
+ depends_on('mkfontdir', type='build')
+ depends_on('bdftopcf', type='build')
+ depends_on('pkg-config@0.9.0:', type='build')
+ depends_on('util-macros', type='build')
+
+ def install(self, spec, prefix):
+ configure('--prefix={0}'.format(prefix))
+
+ make()
+ make('install')
+
+ # `make install` copies the files to the font-util installation.
+ # Create a fake directory to convince Spack that we actually
+ # installed something.
+ mkdir(prefix.lib)
diff --git a/var/spack/repos/builtin/packages/font-bh-ttf/package.py b/var/spack/repos/builtin/packages/font-bh-ttf/package.py
new file mode 100644
index 0000000000..a10b88d355
--- /dev/null
+++ b/var/spack/repos/builtin/packages/font-bh-ttf/package.py
@@ -0,0 +1,52 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class FontBhTtf(Package):
+ """X.org bh-ttf font."""
+
+ homepage = "http://cgit.freedesktop.org/xorg/font/bh-ttf"
+ url = "https://www.x.org/archive/individual/font/font-bh-ttf-1.0.3.tar.gz"
+
+ version('1.0.3', '4ce741ec4edaa11cd38988d355a7578b')
+
+ depends_on('font-util')
+
+ depends_on('fontconfig', type='build')
+ depends_on('mkfontdir', type='build')
+ depends_on('bdftopcf', type='build')
+ depends_on('pkg-config@0.9.0:', type='build')
+ depends_on('util-macros', type='build')
+
+ def install(self, spec, prefix):
+ configure('--prefix={0}'.format(prefix))
+
+ make('install')
+
+ # `make install` copies the files to the font-util installation.
+ # Create a fake directory to convince Spack that we actually
+ # installed something.
+ mkdir(prefix.lib)
diff --git a/var/spack/repos/builtin/packages/font-bh-type1/package.py b/var/spack/repos/builtin/packages/font-bh-type1/package.py
new file mode 100644
index 0000000000..fffc2e4095
--- /dev/null
+++ b/var/spack/repos/builtin/packages/font-bh-type1/package.py
@@ -0,0 +1,52 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class FontBhType1(Package):
+ """X.org bh-type1 font."""
+
+ homepage = "http://cgit.freedesktop.org/xorg/font/bh-type1"
+ url = "https://www.x.org/archive/individual/font/font-bh-type1-1.0.3.tar.gz"
+
+ version('1.0.3', '62d4e8f782a6a0658784072a5df5ac98')
+
+ depends_on('font-util')
+
+ depends_on('fontconfig', type='build')
+ depends_on('mkfontdir', type='build')
+ depends_on('mkfontscale', type='build')
+ depends_on('pkg-config@0.9.0:', type='build')
+ depends_on('util-macros', type='build')
+
+ def install(self, spec, prefix):
+ configure('--prefix={0}'.format(prefix))
+
+ make('install')
+
+ # `make install` copies the files to the font-util installation.
+ # Create a fake directory to convince Spack that we actually
+ # installed something.
+ mkdir(prefix.lib)
diff --git a/var/spack/repos/builtin/packages/font-bitstream-100dpi/package.py b/var/spack/repos/builtin/packages/font-bitstream-100dpi/package.py
new file mode 100644
index 0000000000..e8e11ae627
--- /dev/null
+++ b/var/spack/repos/builtin/packages/font-bitstream-100dpi/package.py
@@ -0,0 +1,53 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class FontBitstream100dpi(Package):
+ """X.org bitstream-100dpi font."""
+
+ homepage = "http://cgit.freedesktop.org/xorg/font/bitstream-100dpi"
+ url = "https://www.x.org/archive/individual/font/font-bitstream-100dpi-1.0.3.tar.gz"
+
+ version('1.0.3', 'c27bf37e9b8039f93bd90b8131ed37ad')
+
+ depends_on('font-util')
+
+ depends_on('fontconfig', type='build')
+ depends_on('mkfontdir', type='build')
+ depends_on('bdftopcf', type='build')
+ depends_on('pkg-config@0.9.0:', type='build')
+ depends_on('util-macros', type='build')
+
+ def install(self, spec, prefix):
+ configure('--prefix={0}'.format(prefix))
+
+ make()
+ make('install')
+
+ # `make install` copies the files to the font-util installation.
+ # Create a fake directory to convince Spack that we actually
+ # installed something.
+ mkdir(prefix.lib)
diff --git a/var/spack/repos/builtin/packages/font-bitstream-75dpi/package.py b/var/spack/repos/builtin/packages/font-bitstream-75dpi/package.py
new file mode 100644
index 0000000000..5dd033964b
--- /dev/null
+++ b/var/spack/repos/builtin/packages/font-bitstream-75dpi/package.py
@@ -0,0 +1,53 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class FontBitstream75dpi(Package):
+ """X.org bitstream-75dpi font."""
+
+ homepage = "http://cgit.freedesktop.org/xorg/font/bitstream-75dpi"
+ url = "https://www.x.org/archive/individual/font/font-bitstream-75dpi-1.0.3.tar.gz"
+
+ version('1.0.3', '4ff6c5d6aebe69371e27b09ad8313d25')
+
+ depends_on('font-util')
+
+ depends_on('fontconfig', type='build')
+ depends_on('mkfontdir', type='build')
+ depends_on('bdftopcf', type='build')
+ depends_on('pkg-config@0.9.0:', type='build')
+ depends_on('util-macros', type='build')
+
+ def install(self, spec, prefix):
+ configure('--prefix={0}'.format(prefix))
+
+ make()
+ make('install')
+
+ # `make install` copies the files to the font-util installation.
+ # Create a fake directory to convince Spack that we actually
+ # installed something.
+ mkdir(prefix.lib)
diff --git a/var/spack/repos/builtin/packages/font-bitstream-speedo/package.py b/var/spack/repos/builtin/packages/font-bitstream-speedo/package.py
new file mode 100644
index 0000000000..e746f241df
--- /dev/null
+++ b/var/spack/repos/builtin/packages/font-bitstream-speedo/package.py
@@ -0,0 +1,52 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class FontBitstreamSpeedo(Package):
+ """X.org bitstream-speedo font."""
+
+ homepage = "http://cgit.freedesktop.org/xorg/font/bitstream-speedo"
+ url = "https://www.x.org/archive/individual/font/font-bitstream-speedo-1.0.2.tar.gz"
+
+ version('1.0.2', 'f0a777b351cf5adefefcf4823e0c1c01')
+
+ depends_on('font-util')
+
+ depends_on('fontconfig', type='build')
+ depends_on('mkfontdir', type='build')
+ depends_on('mkfontscale', type='build')
+ depends_on('pkg-config@0.9.0:', type='build')
+ depends_on('util-macros', type='build')
+
+ def install(self, spec, prefix):
+ configure('--prefix={0}'.format(prefix))
+
+ make('install')
+
+ # `make install` copies the files to the font-util installation.
+ # Create a fake directory to convince Spack that we actually
+ # installed something.
+ mkdir(prefix.lib)
diff --git a/var/spack/repos/builtin/packages/font-bitstream-type1/package.py b/var/spack/repos/builtin/packages/font-bitstream-type1/package.py
new file mode 100644
index 0000000000..65289685c3
--- /dev/null
+++ b/var/spack/repos/builtin/packages/font-bitstream-type1/package.py
@@ -0,0 +1,52 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class FontBitstreamType1(Package):
+ """X.org bitstream-type1 font."""
+
+ homepage = "http://cgit.freedesktop.org/xorg/font/bitstream-type1"
+ url = "https://www.x.org/archive/individual/font/font-bitstream-type1-1.0.3.tar.gz"
+
+ version('1.0.3', 'ff91738c4d3646d7999e00aa9923f2a0')
+
+ depends_on('font-util')
+
+ depends_on('fontconfig', type='build')
+ depends_on('mkfontdir', type='build')
+ depends_on('mkfontscale', type='build')
+ depends_on('pkg-config@0.9.0:', type='build')
+ depends_on('util-macros', type='build')
+
+ def install(self, spec, prefix):
+ configure('--prefix={0}'.format(prefix))
+
+ make('install')
+
+ # `make install` copies the files to the font-util installation.
+ # Create a fake directory to convince Spack that we actually
+ # installed something.
+ mkdir(prefix.lib)
diff --git a/var/spack/repos/builtin/packages/font-cronyx-cyrillic/package.py b/var/spack/repos/builtin/packages/font-cronyx-cyrillic/package.py
new file mode 100644
index 0000000000..07e1330fe6
--- /dev/null
+++ b/var/spack/repos/builtin/packages/font-cronyx-cyrillic/package.py
@@ -0,0 +1,53 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class FontCronyxCyrillic(Package):
+ """X.org cronyx-cyrillic font."""
+
+ homepage = "http://cgit.freedesktop.org/xorg/font/cronyx-cyrillic"
+ url = "https://www.x.org/archive/individual/font/font-cronyx-cyrillic-1.0.3.tar.gz"
+
+ version('1.0.3', '3119ba1bc7f775c162c96e17a912fe30')
+
+ depends_on('font-util')
+
+ depends_on('fontconfig', type='build')
+ depends_on('mkfontdir', type='build')
+ depends_on('bdftopcf', type='build')
+ depends_on('pkg-config@0.9.0:', type='build')
+ depends_on('util-macros', type='build')
+
+ def install(self, spec, prefix):
+ configure('--prefix={0}'.format(prefix))
+
+ make()
+ make('install')
+
+ # `make install` copies the files to the font-util installation.
+ # Create a fake directory to convince Spack that we actually
+ # installed something.
+ mkdir(prefix.lib)
diff --git a/var/spack/repos/builtin/packages/font-cursor-misc/package.py b/var/spack/repos/builtin/packages/font-cursor-misc/package.py
new file mode 100644
index 0000000000..6fddc015e3
--- /dev/null
+++ b/var/spack/repos/builtin/packages/font-cursor-misc/package.py
@@ -0,0 +1,53 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class FontCursorMisc(Package):
+ """X.org cursor-misc font."""
+
+ homepage = "http://cgit.freedesktop.org/xorg/font/cursor-misc"
+ url = "https://www.x.org/archive/individual/font/font-cursor-misc-1.0.3.tar.gz"
+
+ version('1.0.3', 'a0bf70c7e498f1cd8e3fdf6154f2bb00')
+
+ depends_on('font-util')
+
+ depends_on('fontconfig', type='build')
+ depends_on('mkfontdir', type='build')
+ depends_on('bdftopcf', type='build')
+ depends_on('pkg-config@0.9.0:', type='build')
+ depends_on('util-macros', type='build')
+
+ def install(self, spec, prefix):
+ configure('--prefix={0}'.format(prefix))
+
+ make()
+ make('install')
+
+ # `make install` copies the files to the font-util installation.
+ # Create a fake directory to convince Spack that we actually
+ # installed something.
+ mkdir(prefix.lib)
diff --git a/var/spack/repos/builtin/packages/font-daewoo-misc/package.py b/var/spack/repos/builtin/packages/font-daewoo-misc/package.py
new file mode 100644
index 0000000000..3dd3b59b14
--- /dev/null
+++ b/var/spack/repos/builtin/packages/font-daewoo-misc/package.py
@@ -0,0 +1,53 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class FontDaewooMisc(Package):
+ """X.org daewoo-misc font."""
+
+ homepage = "http://cgit.freedesktop.org/xorg/font/daewoo-misc"
+ url = "https://www.x.org/archive/individual/font/font-daewoo-misc-1.0.3.tar.gz"
+
+ version('1.0.3', '71a7e2796f045c9d217a19c4e6c25bc1')
+
+ depends_on('font-util')
+
+ depends_on('fontconfig', type='build')
+ depends_on('mkfontdir', type='build')
+ depends_on('bdftopcf', type='build')
+ depends_on('pkg-config@0.9.0:', type='build')
+ depends_on('util-macros', type='build')
+
+ def install(self, spec, prefix):
+ configure('--prefix={0}'.format(prefix))
+
+ make()
+ make('install')
+
+ # `make install` copies the files to the font-util installation.
+ # Create a fake directory to convince Spack that we actually
+ # installed something.
+ mkdir(prefix.lib)
diff --git a/var/spack/repos/builtin/packages/font-dec-misc/package.py b/var/spack/repos/builtin/packages/font-dec-misc/package.py
new file mode 100644
index 0000000000..035ae3eb15
--- /dev/null
+++ b/var/spack/repos/builtin/packages/font-dec-misc/package.py
@@ -0,0 +1,53 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class FontDecMisc(Package):
+ """X.org dec-misc font."""
+
+ homepage = "http://cgit.freedesktop.org/xorg/font/dec-misc"
+ url = "https://www.x.org/archive/individual/font/font-dec-misc-1.0.3.tar.gz"
+
+ version('1.0.3', '5a9242f6b60ecf2b8c5b158322ca2a40')
+
+ depends_on('font-util')
+
+ depends_on('fontconfig', type='build')
+ depends_on('mkfontdir', type='build')
+ depends_on('bdftopcf', type='build')
+ depends_on('pkg-config@0.9.0:', type='build')
+ depends_on('util-macros', type='build')
+
+ def install(self, spec, prefix):
+ configure('--prefix={0}'.format(prefix))
+
+ make()
+ make('install')
+
+ # `make install` copies the files to the font-util installation.
+ # Create a fake directory to convince Spack that we actually
+ # installed something.
+ mkdir(prefix.lib)
diff --git a/var/spack/repos/builtin/packages/font-ibm-type1/package.py b/var/spack/repos/builtin/packages/font-ibm-type1/package.py
new file mode 100644
index 0000000000..34bbe85cfb
--- /dev/null
+++ b/var/spack/repos/builtin/packages/font-ibm-type1/package.py
@@ -0,0 +1,52 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class FontIbmType1(Package):
+ """X.org ibm-type1 font."""
+
+ homepage = "http://cgit.freedesktop.org/xorg/font/ibm-type1"
+ url = "https://www.x.org/archive/individual/font/font-ibm-type1-1.0.3.tar.gz"
+
+ version('1.0.3', '2806116e4adcb89d3d5ff5faf65e57c1')
+
+ depends_on('font-util')
+
+ depends_on('fontconfig', type='build')
+ depends_on('mkfontdir', type='build')
+ depends_on('mkfontscale', type='build')
+ depends_on('pkg-config@0.9.0:', type='build')
+ depends_on('util-macros', type='build')
+
+ def install(self, spec, prefix):
+ configure('--prefix={0}'.format(prefix))
+
+ make('install')
+
+ # `make install` copies the files to the font-util installation.
+ # Create a fake directory to convince Spack that we actually
+ # installed something.
+ mkdir(prefix.lib)
diff --git a/var/spack/repos/builtin/packages/font-isas-misc/package.py b/var/spack/repos/builtin/packages/font-isas-misc/package.py
new file mode 100644
index 0000000000..b0575f8ffc
--- /dev/null
+++ b/var/spack/repos/builtin/packages/font-isas-misc/package.py
@@ -0,0 +1,53 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class FontIsasMisc(Package):
+ """X.org isas-misc font."""
+
+ homepage = "http://cgit.freedesktop.org/xorg/font/isas-misc"
+ url = "https://www.x.org/archive/individual/font/font-isas-misc-1.0.3.tar.gz"
+
+ version('1.0.3', 'ecc3b6fbe8f5721ddf5c7fc66f73e76f')
+
+ depends_on('font-util')
+
+ depends_on('fontconfig', type='build')
+ depends_on('mkfontdir', type='build')
+ depends_on('bdftopcf', type='build')
+ depends_on('pkg-config@0.9.0:', type='build')
+ depends_on('util-macros', type='build')
+
+ def install(self, spec, prefix):
+ configure('--prefix={0}'.format(prefix))
+
+ make()
+ make('install')
+
+ # `make install` copies the files to the font-util installation.
+ # Create a fake directory to convince Spack that we actually
+ # installed something.
+ mkdir(prefix.lib)
diff --git a/var/spack/repos/builtin/packages/font-jis-misc/package.py b/var/spack/repos/builtin/packages/font-jis-misc/package.py
new file mode 100644
index 0000000000..a5bee3fe31
--- /dev/null
+++ b/var/spack/repos/builtin/packages/font-jis-misc/package.py
@@ -0,0 +1,53 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class FontJisMisc(Package):
+ """X.org jis-misc font."""
+
+ homepage = "http://cgit.freedesktop.org/xorg/font/jis-misc"
+ url = "https://www.x.org/archive/individual/font/font-jis-misc-1.0.3.tar.gz"
+
+ version('1.0.3', 'c48ee5749ae25075d2c7a6111c195e7b')
+
+ depends_on('font-util')
+
+ depends_on('fontconfig', type='build')
+ depends_on('mkfontdir', type='build')
+ depends_on('bdftopcf', type='build')
+ depends_on('pkg-config@0.9.0:', type='build')
+ depends_on('util-macros', type='build')
+
+ def install(self, spec, prefix):
+ configure('--prefix={0}'.format(prefix))
+
+ make()
+ make('install')
+
+ # `make install` copies the files to the font-util installation.
+ # Create a fake directory to convince Spack that we actually
+ # installed something.
+ mkdir(prefix.lib)
diff --git a/var/spack/repos/builtin/packages/font-micro-misc/package.py b/var/spack/repos/builtin/packages/font-micro-misc/package.py
new file mode 100644
index 0000000000..930a299beb
--- /dev/null
+++ b/var/spack/repos/builtin/packages/font-micro-misc/package.py
@@ -0,0 +1,53 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class FontMicroMisc(Package):
+ """X.org micro-misc font."""
+
+ homepage = "http://cgit.freedesktop.org/xorg/font/micro-misc"
+ url = "https://www.x.org/archive/individual/font/font-micro-misc-1.0.3.tar.gz"
+
+ version('1.0.3', '4de3f0ce500aef85f198c52ace5e66ac')
+
+ depends_on('font-util')
+
+ depends_on('fontconfig', type='build')
+ depends_on('mkfontdir', type='build')
+ depends_on('bdftopcf', type='build')
+ depends_on('pkg-config@0.9.0:', type='build')
+ depends_on('util-macros', type='build')
+
+ def install(self, spec, prefix):
+ configure('--prefix={0}'.format(prefix))
+
+ make()
+ make('install')
+
+ # `make install` copies the files to the font-util installation.
+ # Create a fake directory to convince Spack that we actually
+ # installed something.
+ mkdir(prefix.lib)
diff --git a/var/spack/repos/builtin/packages/font-misc-cyrillic/package.py b/var/spack/repos/builtin/packages/font-misc-cyrillic/package.py
new file mode 100644
index 0000000000..4d25552732
--- /dev/null
+++ b/var/spack/repos/builtin/packages/font-misc-cyrillic/package.py
@@ -0,0 +1,53 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class FontMiscCyrillic(Package):
+ """X.org misc-cyrillic font."""
+
+ homepage = "http://cgit.freedesktop.org/xorg/font/misc-cyrillic"
+ url = "https://www.x.org/archive/individual/font/font-misc-cyrillic-1.0.3.tar.gz"
+
+ version('1.0.3', 'e7b13da5325f62dd3f630beade6d2656')
+
+ depends_on('font-util')
+
+ depends_on('fontconfig', type='build')
+ depends_on('mkfontdir', type='build')
+ depends_on('bdftopcf', type='build')
+ depends_on('pkg-config@0.9.0:', type='build')
+ depends_on('util-macros', type='build')
+
+ def install(self, spec, prefix):
+ configure('--prefix={0}'.format(prefix))
+
+ make()
+ make('install')
+
+ # `make install` copies the files to the font-util installation.
+ # Create a fake directory to convince Spack that we actually
+ # installed something.
+ mkdir(prefix.lib)
diff --git a/var/spack/repos/builtin/packages/font-misc-ethiopic/package.py b/var/spack/repos/builtin/packages/font-misc-ethiopic/package.py
new file mode 100644
index 0000000000..6ccdc4e482
--- /dev/null
+++ b/var/spack/repos/builtin/packages/font-misc-ethiopic/package.py
@@ -0,0 +1,52 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class FontMiscEthiopic(Package):
+ """X.org misc-ethiopic font."""
+
+ homepage = "http://cgit.freedesktop.org/xorg/font/misc-ethiopic"
+ url = "https://www.x.org/archive/individual/font/font-misc-ethiopic-1.0.3.tar.gz"
+
+ version('1.0.3', '02ddea9338d9d36804ad38f3daadb55a')
+
+ depends_on('font-util')
+
+ depends_on('fontconfig', type='build')
+ depends_on('mkfontdir', type='build')
+ depends_on('mkfontscale', type='build')
+ depends_on('pkg-config@0.9.0:', type='build')
+ depends_on('util-macros', type='build')
+
+ def install(self, spec, prefix):
+ configure('--prefix={0}'.format(prefix))
+
+ make('install')
+
+ # `make install` copies the files to the font-util installation.
+ # Create a fake directory to convince Spack that we actually
+ # installed something.
+ mkdir(prefix.lib)
diff --git a/var/spack/repos/builtin/packages/font-misc-meltho/package.py b/var/spack/repos/builtin/packages/font-misc-meltho/package.py
new file mode 100644
index 0000000000..eda84e2b32
--- /dev/null
+++ b/var/spack/repos/builtin/packages/font-misc-meltho/package.py
@@ -0,0 +1,52 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class FontMiscMeltho(Package):
+ """X.org misc-meltho font."""
+
+ homepage = "http://cgit.freedesktop.org/xorg/font/misc-meltho"
+ url = "https://www.x.org/archive/individual/font/font-misc-meltho-1.0.3.tar.gz"
+
+ version('1.0.3', '8380696483478449c39b04612f20eea8')
+
+ depends_on('font-util')
+
+ depends_on('fontconfig', type='build')
+ depends_on('mkfontdir', type='build')
+ depends_on('mkfontscale', type='build')
+ depends_on('pkg-config@0.9.0:', type='build')
+ depends_on('util-macros', type='build')
+
+ def install(self, spec, prefix):
+ configure('--prefix={0}'.format(prefix))
+
+ make('install')
+
+ # `make install` copies the files to the font-util installation.
+ # Create a fake directory to convince Spack that we actually
+ # installed something.
+ mkdir(prefix.lib)
diff --git a/var/spack/repos/builtin/packages/font-misc-misc/package.py b/var/spack/repos/builtin/packages/font-misc-misc/package.py
new file mode 100644
index 0000000000..c960d18b39
--- /dev/null
+++ b/var/spack/repos/builtin/packages/font-misc-misc/package.py
@@ -0,0 +1,53 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class FontMiscMisc(Package):
+ """X.org misc-misc font."""
+
+ homepage = "http://cgit.freedesktop.org/xorg/font/misc-misc"
+ url = "https://www.x.org/archive/individual/font/font-misc-misc-1.1.2.tar.gz"
+
+ version('1.1.2', '23a79b92275375315129b440206c85b9')
+
+ depends_on('font-util')
+
+ depends_on('fontconfig', type='build')
+ depends_on('mkfontdir', type='build')
+ depends_on('bdftopcf', type='build')
+ depends_on('pkg-config@0.9.0:', type='build')
+ depends_on('util-macros', type='build')
+
+ def install(self, spec, prefix):
+ configure('--prefix={0}'.format(prefix))
+
+ make()
+ make('install')
+
+ # `make install` copies the files to the font-util installation.
+ # Create a fake directory to convince Spack that we actually
+ # installed something.
+ mkdir(prefix.lib)
diff --git a/var/spack/repos/builtin/packages/font-mutt-misc/package.py b/var/spack/repos/builtin/packages/font-mutt-misc/package.py
new file mode 100644
index 0000000000..a5d4cae060
--- /dev/null
+++ b/var/spack/repos/builtin/packages/font-mutt-misc/package.py
@@ -0,0 +1,53 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class FontMuttMisc(Package):
+ """X.org mutt-misc font."""
+
+ homepage = "http://cgit.freedesktop.org/xorg/font/mutt-misc"
+ url = "https://www.x.org/archive/individual/font/font-mutt-misc-1.0.3.tar.gz"
+
+ version('1.0.3', '6c2de53ba514f720e02af48eef28ff32')
+
+ depends_on('font-util')
+
+ depends_on('fontconfig', type='build')
+ depends_on('mkfontdir', type='build')
+ depends_on('bdftopcf', type='build')
+ depends_on('pkg-config@0.9.0:', type='build')
+ depends_on('util-macros', type='build')
+
+ def install(self, spec, prefix):
+ configure('--prefix={0}'.format(prefix))
+
+ make()
+ make('install')
+
+ # `make install` copies the files to the font-util installation.
+ # Create a fake directory to convince Spack that we actually
+ # installed something.
+ mkdir(prefix.lib)
diff --git a/var/spack/repos/builtin/packages/font-schumacher-misc/package.py b/var/spack/repos/builtin/packages/font-schumacher-misc/package.py
new file mode 100644
index 0000000000..193fa2691e
--- /dev/null
+++ b/var/spack/repos/builtin/packages/font-schumacher-misc/package.py
@@ -0,0 +1,53 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class FontSchumacherMisc(Package):
+ """X.org schumacher-misc font."""
+
+ homepage = "http://cgit.freedesktop.org/xorg/font/schumacher-misc"
+ url = "https://www.x.org/archive/individual/font/font-schumacher-misc-1.1.2.tar.gz"
+
+ version('1.1.2', '1f3386a0a690ba8117fc05b501f9f91b')
+
+ depends_on('font-util')
+
+ depends_on('fontconfig', type='build')
+ depends_on('mkfontdir', type='build')
+ depends_on('bdftopcf', type='build')
+ depends_on('pkg-config@0.9.0:', type='build')
+ depends_on('util-macros', type='build')
+
+ def install(self, spec, prefix):
+ configure('--prefix={0}'.format(prefix))
+
+ make()
+ make('install')
+
+ # `make install` copies the files to the font-util installation.
+ # Create a fake directory to convince Spack that we actually
+ # installed something.
+ mkdir(prefix.lib)
diff --git a/var/spack/repos/builtin/packages/font-screen-cyrillic/package.py b/var/spack/repos/builtin/packages/font-screen-cyrillic/package.py
new file mode 100644
index 0000000000..5914a3c9de
--- /dev/null
+++ b/var/spack/repos/builtin/packages/font-screen-cyrillic/package.py
@@ -0,0 +1,53 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class FontScreenCyrillic(Package):
+ """X.org screen-cyrillic font."""
+
+ homepage = "http://cgit.freedesktop.org/xorg/font/screen-cyrillic"
+ url = "https://www.x.org/archive/individual/font/font-screen-cyrillic-1.0.4.tar.gz"
+
+ version('1.0.4', '4cadaf2ba4c4d0f4cb9b4e7b8f0a3019')
+
+ depends_on('font-util')
+
+ depends_on('fontconfig', type='build')
+ depends_on('mkfontdir', type='build')
+ depends_on('bdftopcf', type='build')
+ depends_on('pkg-config@0.9.0:', type='build')
+ depends_on('util-macros', type='build')
+
+ def install(self, spec, prefix):
+ configure('--prefix={0}'.format(prefix))
+
+ make()
+ make('install')
+
+ # `make install` copies the files to the font-util installation.
+ # Create a fake directory to convince Spack that we actually
+ # installed something.
+ mkdir(prefix.lib)
diff --git a/var/spack/repos/builtin/packages/font-sony-misc/package.py b/var/spack/repos/builtin/packages/font-sony-misc/package.py
new file mode 100644
index 0000000000..145ee20971
--- /dev/null
+++ b/var/spack/repos/builtin/packages/font-sony-misc/package.py
@@ -0,0 +1,53 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class FontSonyMisc(Package):
+ """X.org sony-misc font."""
+
+ homepage = "http://cgit.freedesktop.org/xorg/font/sony-misc"
+ url = "https://www.x.org/archive/individual/font/font-sony-misc-1.0.3.tar.gz"
+
+ version('1.0.3', '4026cb88e2253efc0b8376003780ccb6')
+
+ depends_on('font-util')
+
+ depends_on('fontconfig', type='build')
+ depends_on('mkfontdir', type='build')
+ depends_on('bdftopcf', type='build')
+ depends_on('pkg-config@0.9.0:', type='build')
+ depends_on('util-macros', type='build')
+
+ def install(self, spec, prefix):
+ configure('--prefix={0}'.format(prefix))
+
+ make()
+ make('install')
+
+ # `make install` copies the files to the font-util installation.
+ # Create a fake directory to convince Spack that we actually
+ # installed something.
+ mkdir(prefix.lib)
diff --git a/var/spack/repos/builtin/packages/font-sun-misc/package.py b/var/spack/repos/builtin/packages/font-sun-misc/package.py
new file mode 100644
index 0000000000..dcf5b9e217
--- /dev/null
+++ b/var/spack/repos/builtin/packages/font-sun-misc/package.py
@@ -0,0 +1,52 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class FontSunMisc(Package):
+ """X.org sun-misc font."""
+
+ homepage = "http://cgit.freedesktop.org/xorg/font/sun-misc"
+ url = "https://www.x.org/archive/individual/font/font-sun-misc-1.0.3.tar.gz"
+
+ version('1.0.3', '87ce97ce0582e76bc4064a4d4d10db09')
+
+ depends_on('font-util')
+ depends_on('fontconfig', type='build')
+ depends_on('mkfontdir', type='build')
+ depends_on('bdftopcf', type='build')
+ depends_on('pkg-config@0.9.0:', type='build')
+ depends_on('util-macros', type='build')
+
+ def install(self, spec, prefix):
+ configure('--prefix={0}'.format(prefix))
+
+ make()
+ make('install')
+
+ # `make install` copies the files to the font-util installation.
+ # Create a fake directory to convince Spack that we actually
+ # installed something.
+ mkdir(prefix.lib)
diff --git a/var/spack/repos/builtin/packages/font-util/package.py b/var/spack/repos/builtin/packages/font-util/package.py
new file mode 100644
index 0000000000..0b310117c6
--- /dev/null
+++ b/var/spack/repos/builtin/packages/font-util/package.py
@@ -0,0 +1,43 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class FontUtil(Package):
+ """X.Org font package creation/installation utilities."""
+
+ homepage = "http://cgit.freedesktop.org/xorg/font/util"
+ url = "https://www.x.org/archive/individual/font/font-util-1.3.1.tar.gz"
+
+ version('1.3.1', 'd153a9af216e4498fa171faea2c82514')
+
+ depends_on('pkg-config@0.9.0:', type='build')
+ depends_on('util-macros', type='build')
+
+ def install(self, spec, prefix):
+ configure('--prefix={0}'.format(prefix))
+
+ make()
+ make('install')
diff --git a/var/spack/repos/builtin/packages/font-winitzki-cyrillic/package.py b/var/spack/repos/builtin/packages/font-winitzki-cyrillic/package.py
new file mode 100644
index 0000000000..0af366c742
--- /dev/null
+++ b/var/spack/repos/builtin/packages/font-winitzki-cyrillic/package.py
@@ -0,0 +1,53 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class FontWinitzkiCyrillic(Package):
+ """X.org winitzki-cyrillic font."""
+
+ homepage = "http://cgit.freedesktop.org/xorg/font/winitzki-cyrillic"
+ url = "https://www.x.org/archive/individual/font/font-winitzki-cyrillic-1.0.3.tar.gz"
+
+ version('1.0.3', '777c667b080b33793528d5abf3247a48')
+
+ depends_on('font-util')
+
+ depends_on('fontconfig', type='build')
+ depends_on('mkfontdir', type='build')
+ depends_on('bdftopcf', type='build')
+ depends_on('pkg-config@0.9.0:', type='build')
+ depends_on('util-macros', type='build')
+
+ def install(self, spec, prefix):
+ configure('--prefix={0}'.format(prefix))
+
+ make()
+ make('install')
+
+ # `make install` copies the files to the font-util installation.
+ # Create a fake directory to convince Spack that we actually
+ # installed something.
+ mkdir(prefix.lib)
diff --git a/var/spack/repos/builtin/packages/font-xfree86-type1/package.py b/var/spack/repos/builtin/packages/font-xfree86-type1/package.py
new file mode 100644
index 0000000000..dceac106a9
--- /dev/null
+++ b/var/spack/repos/builtin/packages/font-xfree86-type1/package.py
@@ -0,0 +1,52 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class FontXfree86Type1(Package):
+ """X.org xfree86-type1 font."""
+
+ homepage = "http://cgit.freedesktop.org/xorg/font/xfree86-type1"
+ url = "https://www.x.org/archive/individual/font/font-xfree86-type1-1.0.4.tar.gz"
+
+ version('1.0.4', '89c33c5176cd580de6636ad50ce7777b')
+
+ depends_on('font-util')
+
+ depends_on('fontconfig', type='build')
+ depends_on('mkfontdir', type='build')
+ depends_on('mkfontscale', type='build')
+ depends_on('pkg-config@0.9.0:', type='build')
+ depends_on('util-macros', type='build')
+
+ def install(self, spec, prefix):
+ configure('--prefix={0}'.format(prefix))
+
+ make('install')
+
+ # `make install` copies the files to the font-util installation.
+ # Create a fake directory to convince Spack that we actually
+ # installed something.
+ mkdir(prefix.lib)
diff --git a/var/spack/repos/builtin/packages/fontcacheproto/package.py b/var/spack/repos/builtin/packages/fontcacheproto/package.py
new file mode 100644
index 0000000000..77feb87573
--- /dev/null
+++ b/var/spack/repos/builtin/packages/fontcacheproto/package.py
@@ -0,0 +1,39 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class Fontcacheproto(Package):
+ """X.org FontcacheProto protocol headers."""
+
+ homepage = "http://cgit.freedesktop.org/xorg/proto/fontcacheproto"
+ url = "https://www.x.org/archive/individual/proto/fontcacheproto-0.1.3.tar.gz"
+
+ version('0.1.3', '5a91ab914ffbfbc856e6fcde52e6f3e3')
+
+ def install(self, spec, prefix):
+ configure('--prefix={0}'.format(prefix))
+
+ make('install')
diff --git a/var/spack/repos/builtin/packages/fontconfig/package.py b/var/spack/repos/builtin/packages/fontconfig/package.py
index 76e9d8cb3f..4f79bbb6fc 100644
--- a/var/spack/repos/builtin/packages/fontconfig/package.py
+++ b/var/spack/repos/builtin/packages/fontconfig/package.py
@@ -24,18 +24,23 @@
##############################################################################
from spack import *
-class Fontconfig(Package):
- """Fontconfig customizing font access"""
+
+class Fontconfig(AutotoolsPackage):
+ """Fontconfig is a library for configuring/customizing font access"""
homepage = "http://www.freedesktop.org/wiki/Software/fontconfig/"
url = "http://www.freedesktop.org/software/fontconfig/release/fontconfig-2.11.1.tar.gz"
- version('2.11.1' , 'e75e303b4f7756c2b16203a57ac87eba')
+ version('2.11.1', 'e75e303b4f7756c2b16203a57ac87eba')
depends_on('freetype')
depends_on('libxml2')
+ depends_on('pkg-config', type='build')
+ depends_on('font-util', type='build')
- def install(self, spec, prefix):
- configure("--prefix=%s" % prefix, "--enable-libxml2")
+ def configure_args(self):
+ font_path = join_path(self.spec['font-util'].prefix, 'share', 'fonts')
- make()
- make("install")
+ return ["--prefix={0}".format(self.prefix),
+ "--enable-libxml2",
+ "--disable-docs",
+ "--with-default-fonts={0}".format(font_path)]
diff --git a/var/spack/repos/builtin/packages/fontsproto/package.py b/var/spack/repos/builtin/packages/fontsproto/package.py
new file mode 100644
index 0000000000..c3771e18fa
--- /dev/null
+++ b/var/spack/repos/builtin/packages/fontsproto/package.py
@@ -0,0 +1,42 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class Fontsproto(Package):
+ """X Fonts Extension."""
+
+ homepage = "http://cgit.freedesktop.org/xorg/proto/fontsproto"
+ url = "https://www.x.org/archive/individual/proto/fontsproto-2.1.3.tar.gz"
+
+ version('2.1.3', '0415f0360e33f3202af67c6c46782251')
+
+ depends_on('pkg-config@0.9.0:', type='build')
+ depends_on('util-macros', type='build')
+
+ def install(self, spec, prefix):
+ configure('--prefix={0}'.format(prefix))
+
+ make('install')
diff --git a/var/spack/repos/builtin/packages/fonttosfnt/package.py b/var/spack/repos/builtin/packages/fonttosfnt/package.py
new file mode 100644
index 0000000000..016cab1ac8
--- /dev/null
+++ b/var/spack/repos/builtin/packages/fonttosfnt/package.py
@@ -0,0 +1,47 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class Fonttosfnt(Package):
+ """Wrap a bitmap font in a sfnt (TrueType) wrapper."""
+
+ homepage = "http://cgit.freedesktop.org/xorg/app/fonttosfnt"
+ url = "https://www.x.org/archive/individual/app/fonttosfnt-1.0.4.tar.gz"
+
+ version('1.0.4', 'ba77fd047a9cca400f17db8c46b06ce8')
+
+ depends_on('freetype')
+ depends_on('libfontenc')
+
+ depends_on('xproto', type='build')
+ depends_on('pkg-config@0.9.0:', type='build')
+ depends_on('util-macros', type='build')
+
+ def install(self, spec, prefix):
+ configure('--prefix={0}'.format(prefix))
+
+ make()
+ make('install')
diff --git a/var/spack/repos/builtin/packages/freetype/package.py b/var/spack/repos/builtin/packages/freetype/package.py
index 897c011396..4446ba5666 100644
--- a/var/spack/repos/builtin/packages/freetype/package.py
+++ b/var/spack/repos/builtin/packages/freetype/package.py
@@ -24,17 +24,22 @@
##############################################################################
from spack import *
-class Freetype(Package):
- """Font package"""
- homepage = "http://http://www.freetype.org"
- url = "http://download.savannah.gnu.org/releases/freetype/freetype-2.5.3.tar.gz"
- version('2.5.3' , 'cafe9f210e45360279c730d27bf071e9')
+class Freetype(AutotoolsPackage):
+ """FreeType is a freely available software library to render fonts.
+ It is written in C, designed to be small, efficient, highly customizable,
+ and portable while capable of producing high-quality output (glyph images)
+ of most vector and bitmap font formats."""
- depends_on('libpng')
+ homepage = "https://www.freetype.org/index.html"
+ url = "http://download.savannah.gnu.org/releases/freetype/freetype-2.7.tar.gz"
- def install(self, spec, prefix):
- configure("--prefix=%s" % prefix)
+ version('2.7', '337139e5c7c5bd645fe130608e0fa8b5')
+ version('2.5.3', 'cafe9f210e45360279c730d27bf071e9')
- make()
- make("install")
+ depends_on('libpng')
+ depends_on('bzip2')
+ depends_on('pkg-config@0.9.0:', type='build')
+
+ def configure_args(self):
+ return ['--with-harfbuzz=no']
diff --git a/var/spack/repos/builtin/packages/fslsfonts/package.py b/var/spack/repos/builtin/packages/fslsfonts/package.py
new file mode 100644
index 0000000000..6e46bd6b5a
--- /dev/null
+++ b/var/spack/repos/builtin/packages/fslsfonts/package.py
@@ -0,0 +1,46 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class Fslsfonts(Package):
+ """fslsfonts produces a list of fonts served by an X font server."""
+
+ homepage = "http://cgit.freedesktop.org/xorg/app/fslsfonts"
+ url = "https://www.x.org/archive/individual/app/fslsfonts-1.0.5.tar.gz"
+
+ version('1.0.5', 'ef781bd6a7b529d3ed7a256055715730')
+
+ depends_on('libfs')
+
+ depends_on('xproto@7.0.25:', type='build')
+ depends_on('pkg-config@0.9.0:', type='build')
+ depends_on('util-macros', type='build')
+
+ def install(self, spec, prefix):
+ configure('--prefix={0}'.format(prefix))
+
+ make()
+ make('install')
diff --git a/var/spack/repos/builtin/packages/fstobdf/package.py b/var/spack/repos/builtin/packages/fstobdf/package.py
new file mode 100644
index 0000000000..6358e33d2a
--- /dev/null
+++ b/var/spack/repos/builtin/packages/fstobdf/package.py
@@ -0,0 +1,50 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class Fstobdf(Package):
+ """The fstobdf program reads a font from a font server and prints a BDF
+ file on the standard output that may be used to recreate the font.
+ This is useful in testing servers, debugging font metrics, and
+ reproducing lost BDF files."""
+
+ homepage = "http://cgit.freedesktop.org/xorg/app/fstobdf"
+ url = "https://www.x.org/archive/individual/app/fstobdf-1.0.6.tar.gz"
+
+ version('1.0.6', '6d3f24673fcb9ce266f49dc140bbf250')
+
+ depends_on('libx11')
+ depends_on('libfs')
+
+ depends_on('xproto@7.0.25:', type='build')
+ depends_on('pkg-config@0.9.0:', type='build')
+ depends_on('util-macros', type='build')
+
+ def install(self, spec, prefix):
+ configure('--prefix={0}'.format(prefix))
+
+ make()
+ make('install')
diff --git a/var/spack/repos/builtin/packages/gasnet/package.py b/var/spack/repos/builtin/packages/gasnet/package.py
index b3bd6c25f2..12ecd9fd6f 100644
--- a/var/spack/repos/builtin/packages/gasnet/package.py
+++ b/var/spack/repos/builtin/packages/gasnet/package.py
@@ -24,6 +24,7 @@
##############################################################################
from spack import *
+
class Gasnet(Package):
"""GASNet is a language-independent, low-level networking layer
that provides network-independent, high-performance communication
@@ -36,24 +37,24 @@ class Gasnet(Package):
version('1.24.0', 'c8afdf48381e8b5a7340bdb32ca0f41a')
-
def install(self, spec, prefix):
# TODO: don't use paths with @ in them.
change_sed_delimiter('@', ';', 'configure')
- configure("--prefix=%s" % prefix,
- # TODO: factor IB suport out into architecture description.
- "--enable-ibv",
- "--enable-udp",
- "--disable-mpi",
- "--enable-par",
- "--enable-mpi-compat",
- "--enable-segment-fast",
- "--disable-aligned-segments",
- # TODO: make an option so that Legion can request builds with/without this.
- # See the Legion webpage for details on when to/not to use.
- "--disable-pshm",
- "--with-segment-mmap-max=64MB")
+ configure(
+ "--prefix=%s" % prefix,
+ # TODO: factor IB suport out into architecture description.
+ "--enable-ibv",
+ "--enable-udp",
+ "--disable-mpi",
+ "--enable-par",
+ "--enable-mpi-compat",
+ "--enable-segment-fast",
+ "--disable-aligned-segments",
+ # TODO: make option so Legion can request builds with/without this.
+ # See the Legion webpage for details on when to/not to use.
+ "--disable-pshm",
+ "--with-segment-mmap-max=64MB")
make()
make("install")
diff --git a/var/spack/repos/builtin/packages/gawk/package.py b/var/spack/repos/builtin/packages/gawk/package.py
new file mode 100644
index 0000000000..fbd0ae53bb
--- /dev/null
+++ b/var/spack/repos/builtin/packages/gawk/package.py
@@ -0,0 +1,45 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class Gawk(AutotoolsPackage):
+ """If you are like many computer users, you would frequently like to make
+ changes in various text files wherever certain patterns appear, or
+ extract data from parts of certain lines while discarding the
+ rest. To write a program to do this in a language such as C or
+ Pascal is a time-consuming inconvenience that may take many lines
+ of code. The job is easy with awk, especially the GNU
+ implementation: gawk.
+
+ The awk utility interprets a special-purpose programming language
+ that makes it possible to handle simple data-reformatting jobs
+ with just a few lines of code.
+ """
+
+ homepage = "https://www.gnu.org/software/gawk/"
+ url = "http://ftp.gnu.org/gnu/gawk/gawk-4.1.4.tar.xz"
+
+ version('4.1.4', '4e7dbc81163e60fd4f0b52496e7542c9')
diff --git a/var/spack/repos/builtin/packages/gbenchmark/package.py b/var/spack/repos/builtin/packages/gbenchmark/package.py
new file mode 100644
index 0000000000..1c54f60243
--- /dev/null
+++ b/var/spack/repos/builtin/packages/gbenchmark/package.py
@@ -0,0 +1,59 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class Gbenchmark(CMakePackage):
+ """A microbenchmark support library"""
+
+ homepage = "https://github.com/google/benchmark"
+ url = "https://github.com/google/benchmark/archive/v1.0.0.tar.gz"
+
+ version('1.1.0', '8c539bbe2a212618fa87b6c38fba087100b6e4ae')
+ version('1.0.0', '4f778985dce02d2e63262e6f388a24b595254a93')
+
+ def build_type(self):
+ return "Release"
+
+ def patch(self):
+ filter_file(
+ r'add_cxx_compiler_flag..fstrict.aliasing.',
+ r'##### add_cxx_compiler_flag(-fstrict-aliasing)',
+ 'CMakeLists.txt'
+ )
+ filter_file(
+ r'add_cxx_compiler_flag..Werror',
+ r'##### add_cxx_compiler_flag(-Werror',
+ 'CMakeLists.txt'
+ )
+
+ def cmake_args(self, spec, prefix):
+ if self.compiler.name == 'intel':
+ return [
+ "-DCMAKE_CXX_FLAGS=-no-ansi-alias -fno-strict-aliasing",
+ "-DCMAKE_C_FLAGS=-no-ansi-alias -fno-strict-aliasing",
+ "-DBENCHMARK_ENABLE_TESTING=OFF"
+ ]
+ return []
diff --git a/var/spack/repos/builtin/packages/gcc/gcc-backport.patch b/var/spack/repos/builtin/packages/gcc/gcc-backport.patch
new file mode 100644
index 0000000000..f9fab68f23
--- /dev/null
+++ b/var/spack/repos/builtin/packages/gcc/gcc-backport.patch
@@ -0,0 +1,138 @@
+2016-02-20 Bernd Edlinger <bernd.edlinger@hotmail.de>
+
+ Backported from mainline
+ 2016-02-19 Jakub Jelinek <jakub@redhat.com>
+ Bernd Edlinger <bernd.edlinger@hotmail.de>
+
+ * Make-lang.in: Invoke gperf with -L C++.
+ * cfns.gperf: Remove prototypes for hash and libc_name_p
+ inlines.
+ * cfns.h: Regenerated.
+ * except.c (nothrow_libfn_p): Adjust.
+
+Index: gcc/cp/Make-lang.in
+===================================================================
+--- a/gcc/cp/Make-lang.in (revision 233574)
++++ b/gcc/cp/Make-lang.in (working copy)
+@@ -111,7 +111,7 @@ else
+ # deleting the $(srcdir)/cp/cfns.h file.
+ $(srcdir)/cp/cfns.h:
+ endif
+- gperf -o -C -E -k '1-6,$$' -j1 -D -N 'libc_name_p' -L ANSI-C \
++ gperf -o -C -E -k '1-6,$$' -j1 -D -N 'libc_name_p' -L C++ \
+ $(srcdir)/cp/cfns.gperf --output-file $(srcdir)/cp/cfns.h
+
+ #
+Index: gcc/cp/cfns.gperf
+===================================================================
+--- a/gcc/cp/cfns.gperf (revision 233574)
++++ b/gcc/cp/cfns.gperf (working copy)
+@@ -1,3 +1,5 @@
++%language=C++
++%define class-name libc_name
+ %{
+ /* Copyright (C) 2000-2015 Free Software Foundation, Inc.
+
+@@ -16,14 +18,6 @@ for more details.
+ You should have received a copy of the GNU General Public License
+ along with GCC; see the file COPYING3. If not see
+ <http://www.gnu.org/licenses/>. */
+-#ifdef __GNUC__
+-__inline
+-#endif
+-static unsigned int hash (const char *, unsigned int);
+-#ifdef __GNUC__
+-__inline
+-#endif
+-const char * libc_name_p (const char *, unsigned int);
+ %}
+ %%
+ # The standard C library functions, for feeding to gperf; the result is used
+Index: gcc/cp/cfns.h
+===================================================================
+--- a/gcc/cp/cfns.h (revision 233574)
++++ b/gcc/cp/cfns.h (working copy)
+@@ -1,5 +1,5 @@
+-/* ANSI-C code produced by gperf version 3.0.3 */
+-/* Command-line: gperf -o -C -E -k '1-6,$' -j1 -D -N libc_name_p -L ANSI-C cfns.gperf */
++/* C++ code produced by gperf version 3.0.4 */
++/* Command-line: gperf -o -C -E -k '1-6,$' -j1 -D -N libc_name_p -L C++ --output-file cfns.h cfns.gperf */
+
+ #if !((' ' == 32) && ('!' == 33) && ('"' == 34) && ('#' == 35) \
+ && ('%' == 37) && ('&' == 38) && ('\'' == 39) && ('(' == 40) \
+@@ -28,7 +28,7 @@
+ #error "gperf generated tables don't work with this execution character set. Please report a bug to <bug-gnu-gperf@gnu.org>."
+ #endif
+
+-#line 1 "cfns.gperf"
++#line 3 "cfns.gperf"
+
+ /* Copyright (C) 2000-2015 Free Software Foundation, Inc.
+
+@@ -47,26 +47,19 @@ for more details.
+ You should have received a copy of the GNU General Public License
+ along with GCC; see the file COPYING3. If not see
+ <http://www.gnu.org/licenses/>. */
+-#ifdef __GNUC__
+-__inline
+-#endif
+-static unsigned int hash (const char *, unsigned int);
+-#ifdef __GNUC__
+-__inline
+-#endif
+-const char * libc_name_p (const char *, unsigned int);
+ /* maximum key range = 391, duplicates = 0 */
+
+-#ifdef __GNUC__
+-__inline
+-#else
+-#ifdef __cplusplus
+-inline
+-#endif
+-#endif
+-static unsigned int
+-hash (register const char *str, register unsigned int len)
++class libc_name
+ {
++private:
++ static inline unsigned int hash (const char *str, unsigned int len);
++public:
++ static const char *libc_name_p (const char *str, unsigned int len);
++};
++
++inline unsigned int
++libc_name::hash (register const char *str, register unsigned int len)
++{
+ static const unsigned short asso_values[] =
+ {
+ 400, 400, 400, 400, 400, 400, 400, 400, 400, 400,
+@@ -122,14 +115,8 @@ along with GCC; see the file COPYING3. If not see
+ return hval + asso_values[(unsigned char)str[len - 1]];
+ }
+
+-#ifdef __GNUC__
+-__inline
+-#ifdef __GNUC_STDC_INLINE__
+-__attribute__ ((__gnu_inline__))
+-#endif
+-#endif
+ const char *
+-libc_name_p (register const char *str, register unsigned int len)
++libc_name::libc_name_p (register const char *str, register unsigned int len)
+ {
+ enum
+ {
+Index: gcc/cp/except.c
+===================================================================
+--- a/gcc/cp/except.c (revision 233574)
++++ b/gcc/cp/except.c (working copy)
+@@ -1040,7 +1040,8 @@ nothrow_libfn_p (const_tree fn)
+ unless the system headers are playing rename tricks, and if
+ they are, we don't want to be confused by them. */
+ id = DECL_NAME (fn);
+- return !!libc_name_p (IDENTIFIER_POINTER (id), IDENTIFIER_LENGTH (id));
++ return !!libc_name::libc_name_p (IDENTIFIER_POINTER (id),
++ IDENTIFIER_LENGTH (id));
+ }
+
+ /* Returns nonzero if an exception of type FROM will be caught by a
diff --git a/var/spack/repos/builtin/packages/gcc/package.py b/var/spack/repos/builtin/packages/gcc/package.py
index 224105ea0f..31da068d72 100644
--- a/var/spack/repos/builtin/packages/gcc/package.py
+++ b/var/spack/repos/builtin/packages/gcc/package.py
@@ -1,8 +1,33 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
from spack import *
from contextlib import closing
from glob import glob
import sys
+from os.path import isfile
class Gcc(Package):
@@ -10,13 +35,18 @@ class Gcc(Package):
Objective-C, Fortran, and Java."""
homepage = "https://gcc.gnu.org"
- url = "http://open-source-box.org/gcc/gcc-4.9.2/gcc-4.9.2.tar.bz2"
- list_url = 'http://open-source-box.org/gcc/'
+ url = "http://ftp.gnu.org/gnu/gcc/gcc-4.9.2/gcc-4.9.2.tar.bz2"
+ list_url = 'http://ftp.gnu.org/gnu/gcc/'
list_depth = 2
+ version('6.3.0', '677a7623c7ef6ab99881bc4e048debb6')
+ version('6.2.0', '9768625159663b300ae4de2f4745fcc4')
version('6.1.0', '8fb6cb98b8459f5863328380fbf06bd1')
+ version('5.4.0', '4c626ac2a83ef30dfb9260e6f59c2b30')
version('5.3.0', 'c9616fd448f980259c31de613e575719')
version('5.2.0', 'a51bcfeb3da7dd4c623e27207ed43467')
+ version('5.1.0', 'd5525b1127d07d215960e6051c5da35e')
+ version('4.9.4', '87c24a4090c1577ba817ec6882602491')
version('4.9.3', '6f831b4d251872736e8e9cc09746f327')
version('4.9.2', '4df8ee253b7f3863ad0b86359cd39c43')
version('4.9.1', 'fddf71348546af523353bd43d34919c1')
@@ -32,6 +62,9 @@ class Gcc(Package):
variant('gold',
default=sys.platform != 'darwin',
description="Build the gold linker plugin for ld-based LTO")
+ variant('piclibs',
+ default=False,
+ description="Build PIC versions of libgfortran.a and libstdc++.a")
depends_on("mpfr")
depends_on("gmp")
@@ -49,6 +82,9 @@ class Gcc(Package):
else:
provides('golang', when='@4.7.1:')
+ patch('piclibs.patch', when='+piclibs')
+ patch('gcc-backport.patch', when='@4.7:4.9.2,5:5.3')
+
def install(self, spec, prefix):
# libjava/configure needs a minor fix to install into spack paths.
filter_file(r"'@.*@'", "'@[[:alnum:]]*@'", 'libjava/configure',
@@ -56,9 +92,23 @@ class Gcc(Package):
enabled_languages = set(('c', 'c++', 'fortran', 'java', 'objc'))
- if spec.satisfies("@4.7.1:") and sys.platform != 'darwin':
+ if spec.satisfies("@4.7.1:") and sys.platform != 'darwin' and \
+ not (spec.satisfies('@:4.9.3') and 'ppc64le' in spec.architecture):
enabled_languages.add('go')
+ # Fix a standard header file for OS X Yosemite that
+ # is GCC incompatible by replacing non-GCC compliant macros
+ if 'yosemite' in spec.architecture:
+ if isfile(r'/usr/include/dispatch/object.h'):
+ new_dispatch_dir = join_path(prefix, 'include', 'dispatch')
+ mkdirp(new_dispatch_dir)
+ cp = which('cp')
+ new_header = join_path(new_dispatch_dir, 'object.h')
+ cp(r'/usr/include/dispatch/object.h', new_header)
+ filter_file(r'typedef void \(\^dispatch_block_t\)\(void\)',
+ 'typedef void* dispatch_block_t',
+ new_header)
+
# Generic options to compile GCC
options = ["--prefix=%s" % prefix, "--libdir=%s/lib64" % prefix,
"--disable-multilib",
diff --git a/var/spack/repos/builtin/packages/gcc/piclibs.patch b/var/spack/repos/builtin/packages/gcc/piclibs.patch
new file mode 100644
index 0000000000..0ecb793067
--- /dev/null
+++ b/var/spack/repos/builtin/packages/gcc/piclibs.patch
@@ -0,0 +1,62 @@
+diff --git a/libgfortran/Makefile.in b/libgfortran/Makefile.in
+index 62b9f7a..7666fdb 100644
+--- a/libgfortran/Makefile.in
++++ b/libgfortran/Makefile.in
+@@ -357,11 +357,11 @@ AUTOMAKE = @AUTOMAKE@
+ AWK = @AWK@
+ CC = @CC@
+ CCDEPMODE = @CCDEPMODE@
+-CFLAGS = @CFLAGS@
++CFLAGS = @CFLAGS@ -fPIC
+ CPP = @CPP@
+-CPPFLAGS = @CPPFLAGS@
++CPPFLAGS = @CPPFLAGS@ -fPIC
+ CYGPATH_W = @CYGPATH_W@
+-DEFS = @DEFS@
++DEFS = @DEFS@ -fPIC
+ DEPDIR = @DEPDIR@
+ DSYMUTIL = @DSYMUTIL@
+ DUMPBIN = @DUMPBIN@
+@@ -371,7 +371,7 @@ ECHO_T = @ECHO_T@
+ EGREP = @EGREP@
+ EXEEXT = @EXEEXT@
+ FC = @FC@
+-FCFLAGS = @FCFLAGS@
++FCFLAGS = @FCFLAGS@ -fPIC
+ FGREP = @FGREP@
+ FPU_HOST_HEADER = @FPU_HOST_HEADER@
+ GREP = @GREP@
+diff --git a/libstdc++-v3/Makefile.in b/libstdc++-v3/Makefile.in
+index bede542..9b3e442 100644
+--- a/libstdc++-v3/Makefile.in
++++ b/libstdc++-v3/Makefile.in
+@@ -115,7 +115,7 @@ CC = @CC@
+ CCODECVT_CC = @CCODECVT_CC@
+ CCOLLATE_CC = @CCOLLATE_CC@
+ CCTYPE_CC = @CCTYPE_CC@
+-CFLAGS = @CFLAGS@
++CFLAGS = @CFLAGS@ -fPIC
+ CLOCALE_CC = @CLOCALE_CC@
+ CLOCALE_H = @CLOCALE_H@
+ CLOCALE_INTERNAL_H = @CLOCALE_INTERNAL_H@
+@@ -124,7 +124,7 @@ CMESSAGES_H = @CMESSAGES_H@
+ CMONEY_CC = @CMONEY_CC@
+ CNUMERIC_CC = @CNUMERIC_CC@
+ CPP = @CPP@
+-CPPFLAGS = @CPPFLAGS@
++CPPFLAGS = @CPPFLAGS@ -fPIC
+ CPU_DEFINES_SRCDIR = @CPU_DEFINES_SRCDIR@
+ CPU_OPT_BITS_RANDOM = @CPU_OPT_BITS_RANDOM@
+ CPU_OPT_EXT_RANDOM = @CPU_OPT_EXT_RANDOM@
+@@ -139,7 +139,7 @@ CYGPATH_W = @CYGPATH_W@
+ C_INCLUDE_DIR = @C_INCLUDE_DIR@
+ DBLATEX = @DBLATEX@
+ DEBUG_FLAGS = @DEBUG_FLAGS@
+-DEFS = @DEFS@
++DEFS = @DEFS@ -fPIC
+ DOT = @DOT@
+ DOXYGEN = @DOXYGEN@
+ DSYMUTIL = @DSYMUTIL@
+--
+2.8.3
+
diff --git a/var/spack/repos/builtin/packages/gccmakedep/package.py b/var/spack/repos/builtin/packages/gccmakedep/package.py
new file mode 100644
index 0000000000..ed2530d89e
--- /dev/null
+++ b/var/spack/repos/builtin/packages/gccmakedep/package.py
@@ -0,0 +1,42 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class Gccmakedep(Package):
+ """X.org gccmakedep utilities."""
+
+ homepage = "https://cgit.freedesktop.org/xorg/util/gccmakedep/"
+ url = "https://www.x.org/archive/individual/util/gccmakedep-1.0.3.tar.gz"
+
+ version('1.0.3', '127ddb6131eb4a56fdf6644a63ade788')
+
+ depends_on('pkg-config@0.9.0:', type='build')
+
+ def install(self, spec, prefix):
+ configure('--prefix={0}'.format(prefix))
+
+ make()
+ make('install')
diff --git a/var/spack/repos/builtin/packages/gconf/package.py b/var/spack/repos/builtin/packages/gconf/package.py
new file mode 100644
index 0000000000..3b3abc71ed
--- /dev/null
+++ b/var/spack/repos/builtin/packages/gconf/package.py
@@ -0,0 +1,51 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class Gconf(Package):
+ """GConf is a system for storing application preferences."""
+
+ homepage = "https://projects.gnome.org/gconf/"
+ url = "ftp://ftp.gnome.org/pub/gnome/sources/GConf/3.2/GConf-3.2.6.tar.xz"
+
+ version('3.2.6', '2b16996d0e4b112856ee5c59130e822c')
+
+ depends_on('glib@2.14.0:')
+ depends_on('libxml2')
+
+ # TODO: add missing dependencies
+ # gio-2.0 >= 2.31.0
+ # gthread-2.0
+ # gmodule-2.0 >= 2.7.0
+ # gobject-2.0 >= 2.7.0
+ # dbus-1 >= 1.0.0
+ # dbus-glib-1 >= 0.74
+
+ def install(self, spec, prefix):
+ configure('--prefix={0}'.format(prefix))
+
+ make()
+ make('install')
diff --git a/var/spack/repos/builtin/packages/gdal/package.py b/var/spack/repos/builtin/packages/gdal/package.py
index caff06bdd6..b52b1f1038 100644
--- a/var/spack/repos/builtin/packages/gdal/package.py
+++ b/var/spack/repos/builtin/packages/gdal/package.py
@@ -24,23 +24,24 @@
##############################################################################
from spack import *
+
class Gdal(Package):
- """
- GDAL is a translator library for raster and vector geospatial
+ """GDAL is a translator library for raster and vector geospatial
data formats that is released under an X/MIT style Open Source
license by the Open Source Geospatial Foundation. As a library,
it presents a single raster abstract data model and vector
abstract data model to the calling application for all supported
formats. It also comes with a variety of useful command line
- utilities for data translation and processing
+ utilities for data translation and processing.
"""
homepage = "http://www.gdal.org/"
- url = "http://download.osgeo.org/gdal/2.0.2/gdal-2.0.2.tar.gz"
+ url = "http://download.osgeo.org/gdal/2.1.2/gdal-2.1.2.tar.xz"
list_url = "http://download.osgeo.org/gdal/"
list_depth = 2
- version('2.0.2', '573865f3f59ba7b4f8f4cddf223b52a5')
+ version('2.1.2', 'ae85b78888514c75e813d658cac9478e')
+ version('2.0.2', '940208e737c87d31a90eaae43d0efd65')
extends('python')
@@ -62,7 +63,7 @@ class Gdal(Package):
depends_on("libpng")
depends_on("zlib")
depends_on("proj")
- depends_on("py-numpy")
+ depends_on("py-numpy", type=('build', 'run'))
parallel = False
diff --git a/var/spack/repos/builtin/packages/gdb/package.py b/var/spack/repos/builtin/packages/gdb/package.py
index 58d8a545db..f90e4e7ff0 100644
--- a/var/spack/repos/builtin/packages/gdb/package.py
+++ b/var/spack/repos/builtin/packages/gdb/package.py
@@ -22,15 +22,15 @@
# License along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
-
from spack import *
class Gdb(Package):
+ """GDB, the GNU Project debugger, allows you to see what is going on
+ 'inside' another program while it executes -- or what another
+ program was doing at the moment it crashed.
"""
- GDB, the GNU Project debugger, allows you to see what is going on `inside' another program while it executes
- -- or what another program was doing at the moment it crashed.
- """
+
homepage = "https://www.gnu.org/software/gdb"
url = "http://ftp.gnu.org/gnu/gdb/gdb-7.10.tar.gz"
@@ -41,9 +41,18 @@ class Gdb(Package):
version('7.9', '8f8ced422fe462a00e0135a643544f17')
version('7.8.2', '8b0ea8b3559d3d90b3ff4952f0aeafbc')
- depends_on('texinfo')
+ variant('python', default=True, description='Compile with Python support')
+
+ # Required dependency
+ depends_on('texinfo', type='build')
+
+ # Optional dependency
+ depends_on('python', when='+python')
def install(self, spec, prefix):
- configure('--prefix=%s' % prefix)
+ options = ['--prefix=%s' % prefix]
+ if '+python' in spec:
+ options.extend(['--with-python'])
+ configure(*options)
make()
make("install")
diff --git a/var/spack/repos/builtin/packages/gdk-pixbuf/package.py b/var/spack/repos/builtin/packages/gdk-pixbuf/package.py
index daf43f3256..4d39086b06 100644
--- a/var/spack/repos/builtin/packages/gdk-pixbuf/package.py
+++ b/var/spack/repos/builtin/packages/gdk-pixbuf/package.py
@@ -24,6 +24,7 @@
##############################################################################
from spack import *
+
class GdkPixbuf(Package):
"""The Gdk Pixbuf is a toolkit for image loading and pixel buffer
manipulation. It is used by GTK+ 2 and GTK+ 3 to load and
@@ -35,6 +36,8 @@ class GdkPixbuf(Package):
version('2.31.2', '6be6bbc4f356d4b79ab4226860ab8523')
+ depends_on("pkg-config", type="build")
+ depends_on("gettext")
depends_on("glib")
depends_on("jpeg")
depends_on("libpng")
diff --git a/var/spack/repos/builtin/packages/geant4/package.py b/var/spack/repos/builtin/packages/geant4/package.py
new file mode 100644
index 0000000000..239ef6d586
--- /dev/null
+++ b/var/spack/repos/builtin/packages/geant4/package.py
@@ -0,0 +1,85 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+
+from spack import *
+
+
+class Geant4(Package):
+ """Geant4 is a toolkit for the simulation of the passage of particles
+ through matter. Its areas of application include high energy, nuclear
+ and accelerator physics, as well as studies in medical and space
+ science."""
+
+ homepage = "http://geant4.cern.ch/"
+ url = "http://geant4.cern.ch/support/source/geant4.10.01.p03.tar.gz"
+
+ version('10.02.p01', 'b81f7082a15f6a34b720b6f15c6289cfe4ddbbbdcef0dc52719f71fac95f7f1c')
+ version('10.01.p03', '4fb4175cc0dabcd517443fbdccd97439')
+
+ variant('qt', default=False, description='Enable Qt support')
+
+ depends_on('cmake@3.5:', type='build')
+
+ depends_on("clhep@2.3.1.1~cxx11+cxx14", when="@10.02.p01")
+ depends_on("clhep@2.2.0.4~cxx11+cxx14", when="@10.01.p03")
+ depends_on("expat")
+ depends_on("zlib")
+ depends_on("xerces-c")
+ depends_on("qt@4.8:", when="+qt")
+
+ def install(self, spec, prefix):
+ cmake_args = list(std_cmake_args)
+ cmake_args.append('-DXERCESC_ROOT_DIR:STRING=%s' %
+ spec['xerces-c'].prefix)
+ cmake_args.append('-DGEANT4_BUILD_CXXSTD=c++14')
+
+ cmake_args += ['-DGEANT4_USE_GDML=ON',
+ '-DGEANT4_USE_SYSTEM_EXPAT=ON',
+ '-DGEANT4_USE_SYSTEM_ZLIB=ON',
+ '-DGEANT4_USE_SYSTEM_CLHEP=ON']
+
+ # fixme: turn off data for now and maybe each data set should
+ # go into a separate package to cut down on disk usage between
+ # different code versions using the same data versions.
+ cmake_args.append('-DGEANT4_INSTALL_DATA=OFF')
+
+ # http://geant4.web.cern.ch/geant4/UserDocumentation/UsersGuides/InstallationGuide/html/ch02s03.html
+ # fixme: likely things that need addressing:
+ # -DGEANT4_USE_OPENGL_X11=ON
+
+ if '+qt' in spec:
+ cmake_args.append('-DGEANT4_USE_QT=ON')
+
+ build_directory = join_path(self.stage.path, 'spack-build')
+ source_directory = self.stage.source_path
+
+ with working_dir(build_directory, create=True):
+ cmake(source_directory, *cmake_args)
+ make()
+ make("install")
+
+ def url_for_version(self, version):
+ """Handle Geant4's unusual version string."""
+ return "http://geant4.cern.ch/support/source/geant4.%s.tar.gz" % version
diff --git a/var/spack/repos/builtin/packages/geos/package.py b/var/spack/repos/builtin/packages/geos/package.py
index 88438b0a99..324186cfbc 100644
--- a/var/spack/repos/builtin/packages/geos/package.py
+++ b/var/spack/repos/builtin/packages/geos/package.py
@@ -23,7 +23,7 @@
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
from spack import *
-import os
+
class Geos(Package):
"""GEOS (Geometry Engine - Open Source) is a C++ port of the Java
diff --git a/var/spack/repos/builtin/packages/gettext/package.py b/var/spack/repos/builtin/packages/gettext/package.py
index df301aea3c..cf260c3f8a 100644
--- a/var/spack/repos/builtin/packages/gettext/package.py
+++ b/var/spack/repos/builtin/packages/gettext/package.py
@@ -24,31 +24,80 @@
##############################################################################
from spack import *
+
class Gettext(Package):
"""GNU internationalization (i18n) and localization (l10n) library."""
homepage = "https://www.gnu.org/software/gettext/"
url = "http://ftpmirror.gnu.org/gettext/gettext-0.19.7.tar.xz"
- version('0.19.7', 'f81e50556da41b44c1d59ac93474dca5')
+ version('0.19.8.1', 'df3f5690eaa30fd228537b00cb7b7590')
+ version('0.19.7', 'f81e50556da41b44c1d59ac93474dca5')
+
+ # Recommended variants
+ variant('curses', default=True, description='Use libncurses')
+ variant('libxml2', default=True, description='Use libxml2')
+ variant('git', default=True, description='Enable git support')
+ variant('tar', default=True, description='Enable tar support')
+ variant('bzip2', default=True, description='Enable bzip2 support')
+ variant('xz', default=True, description='Enable xz support')
+
+ # Optional variants
+ variant('libunistring', default=False, description='Use libunistring')
+
+ # Recommended dependencies
+ depends_on('ncurses', when='+curses')
+ depends_on('libxml2', when='+libxml2')
+ # Java runtime and compiler (e.g. GNU gcj or kaffe)
+ # C# runtime and compiler (e.g. pnet or mono)
+ depends_on('tar', when='+tar')
+ # depends_on('gzip', when='+gzip')
+ depends_on('bzip2', when='+bzip2')
+ depends_on('xz', when='+xz')
+
+ # Optional dependencies
+ # depends_on('glib') # circular dependency?
+ # depends_on('libcroco@0.6.1:')
+ depends_on('libunistring', when='+libunistring')
+ # depends_on('cvs')
def install(self, spec, prefix):
- options = ['--disable-dependency-tracking',
- '--disable-silent-rules',
- '--disable-debug',
- '--prefix=%s' % prefix,
- '--with-included-gettext',
- '--with-included-glib',
- '--with-included-libcroco',
- '--with-included-libunistring',
- '--with-emacs',
- '--with-lispdir=%s/emacs/site-lisp/gettext' % prefix.share,
- '--disable-java',
- '--disable-csharp',
- '--without-git', # Don't use VCS systems to create these archives
- '--without-cvs',
- '--without-xz']
-
- configure(*options)
+ config_args = [
+ '--prefix={0}'.format(prefix),
+ '--disable-java',
+ '--disable-csharp',
+ '--with-included-glib',
+ '--with-included-gettext',
+ '--with-included-libcroco',
+ '--without-emacs',
+ '--with-lispdir=%s/emacs/site-lisp/gettext' % prefix.share,
+ '--without-cvs'
+ ]
+
+ if '+curses' in spec:
+ config_args.append('--with-ncurses-prefix={0}'.format(
+ spec['ncurses'].prefix))
+ else:
+ config_args.append('--disable-curses')
+
+ if '+libxml2' in spec:
+ config_args.append('--with-libxml2-prefix={0}'.format(
+ spec['libxml2'].prefix))
+ else:
+ config_args.append('--with-included-libxml')
+
+ if '+bzip2' not in spec:
+ config_args.append('--without-bzip2')
+
+ if '+xz' not in spec:
+ config_args.append('--without-xz')
+
+ if '+libunistring' in spec:
+ config_args.append('--with-libunistring-prefix={0}'.format(
+ spec['libunistring'].prefix))
+ else:
+ config_args.append('--with-included-libunistring')
+
+ configure(*config_args)
make()
make("install")
diff --git a/var/spack/repos/builtin/packages/gflags/package.py b/var/spack/repos/builtin/packages/gflags/package.py
index 9f3552d53d..7e04c9b682 100644
--- a/var/spack/repos/builtin/packages/gflags/package.py
+++ b/var/spack/repos/builtin/packages/gflags/package.py
@@ -22,9 +22,9 @@
# License along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
-import os
from spack import *
+
class Gflags(Package):
"""The gflags package contains a C++ library that implements
commandline flags processing. It includes built-in support for
@@ -37,6 +37,8 @@ class Gflags(Package):
version('2.1.2', 'ac432de923f9de1e9780b5254884599f')
+ depends_on('cmake', type='build')
+
def install(self, spec, prefix):
cmake("-DCMAKE_INSTALL_PREFIX=" + prefix,
"-DBUILD_SHARED_LIBS=ON")
diff --git a/var/spack/repos/builtin/packages/ghostscript-fonts/package.py b/var/spack/repos/builtin/packages/ghostscript-fonts/package.py
new file mode 100644
index 0000000000..1bf3f857b0
--- /dev/null
+++ b/var/spack/repos/builtin/packages/ghostscript-fonts/package.py
@@ -0,0 +1,43 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+import glob
+
+
+class GhostscriptFonts(Package):
+ """Ghostscript Fonts"""
+
+ homepage = "http://ghostscript.com/"
+ url = "ftp://ftp.imagemagick.org/pub/ImageMagick/delegates/ghostscript-fonts-std-8.11.tar.gz"
+
+ version('8.11', '6865682b095f8c4500c54b285ff05ef6')
+
+ def install(self, spec, prefix):
+ fdir = join_path(prefix.share, 'font')
+ mkdirp(fdir)
+ files = glob.glob('*')
+ for f in files:
+ if not f.startswith('spack-build'):
+ install(f, fdir)
diff --git a/var/spack/repos/builtin/packages/ghostscript/package.py b/var/spack/repos/builtin/packages/ghostscript/package.py
index 707f65c902..f63ebac0c1 100644
--- a/var/spack/repos/builtin/packages/ghostscript/package.py
+++ b/var/spack/repos/builtin/packages/ghostscript/package.py
@@ -26,16 +26,20 @@ from spack import *
class Ghostscript(Package):
- """an interpreter for the PostScript language and for PDF. """
+ """An interpreter for the PostScript language and for PDF."""
+
homepage = "http://ghostscript.com/"
- url = "http://downloads.ghostscript.com/public/old-gs-releases/ghostscript-9.16.tar.gz"
+ url = "http://downloads.ghostscript.com/public/old-gs-releases/ghostscript-9.18.tar.gz"
- version('9.16', '829319325bbdb83f5c81379a8f86f38f')
+ version('9.18', '33a47567d7a591c00a253caddd12a88a')
parallel = False
+ depends_on('libtiff')
+
def install(self, spec, prefix):
- configure("--prefix=%s" % prefix, "--enable-shared")
+ configure('--prefix={0}'.format(prefix),
+ '--with-system-libtiff')
make()
- make("install")
+ make('install')
diff --git a/var/spack/repos/builtin/packages/giflib/package.py b/var/spack/repos/builtin/packages/giflib/package.py
new file mode 100644
index 0000000000..7082384b9b
--- /dev/null
+++ b/var/spack/repos/builtin/packages/giflib/package.py
@@ -0,0 +1,41 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class Giflib(Package):
+ """The GIFLIB project maintains the giflib service library, which has
+ been pulling images out of GIFs since 1989."""
+
+ homepage = "http://giflib.sourceforge.net/"
+ url = "https://downloads.sourceforge.net/project/giflib/giflib-5.1.4.tar.bz2"
+
+ version('5.1.4', '2c171ced93c0e83bb09e6ccad8e3ba2b')
+
+ def install(self, spec, prefix):
+ configure('--prefix=%s' % prefix)
+
+ make()
+ make("install")
diff --git a/var/spack/repos/builtin/packages/git-lfs/package.py b/var/spack/repos/builtin/packages/git-lfs/package.py
new file mode 100644
index 0000000000..25e440ff6d
--- /dev/null
+++ b/var/spack/repos/builtin/packages/git-lfs/package.py
@@ -0,0 +1,53 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class GitLfs(Package):
+ """Git LFS is a system for managing and versioning large files in
+ association with a Git repository. Instead of storing the large files
+ within the Git repository as blobs, Git LFS stores special "pointer
+ files" in the repository, while storing the actual file contents on a
+ Git LFS server."""
+
+ homepage = "https://git-lfs.github.com"
+ git_url = "https://github.com/github/git-lfs.git"
+
+ version('1.4.1', git=git_url, tag='v1.4.1')
+ version('1.3.1', git=git_url, tag='v1.3.1')
+
+ # TODO: Implement this by following the instructions at this location:
+ # https://github.com/github/git-lfs/blob/master/CONTRIBUTING.md#building
+ # variant('test', default=True, description='Build and run tests as part of the build.') # NOQA: E501
+
+ depends_on('go@1.5:', type='build')
+ depends_on('git@1.8.2:', type='run')
+
+ def install(self, spec, prefix):
+ bootstrap_script = Executable(join_path('script', 'bootstrap'))
+ bootstrap_script()
+
+ mkdirp(prefix.bin)
+ install(join_path('bin', 'git-lfs'), prefix.bin)
diff --git a/var/spack/repos/builtin/packages/git/package.py b/var/spack/repos/builtin/packages/git/package.py
index 03e7d3e031..c7b239ef12 100644
--- a/var/spack/repos/builtin/packages/git/package.py
+++ b/var/spack/repos/builtin/packages/git/package.py
@@ -22,8 +22,10 @@
# License along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
+import sys
from spack import *
+
class Git(Package):
"""Git is a free and open source distributed version control
system designed to handle everything from small to very large
@@ -31,47 +33,56 @@ class Git(Package):
homepage = "http://git-scm.com"
url = "https://github.com/git/git/tarball/v2.7.1"
+ version('2.11.0', 'c63fb83b86431af96f8e9722ebb3ca01')
+ version('2.9.3', 'b0edfc0f3cb046aec7ed68a4b7282a75')
+ version('2.9.2', '3ff8a9b30fd5c99a02e6d6585ab543fc')
+ version('2.9.1', 'a5d806743a992300b45f734d1667ddd2')
+ version('2.9.0', 'bf33a13c2adc05bc9d654c415332bc65')
+ version('2.8.4', '86afb10254c3803894c9863fb5896bb6')
+ version('2.8.3', '0e19f31f96f9364fd247b8dc737dacfd')
+ version('2.8.2', '3d55550880af98f6e35c7f1d7c5aecfe')
version('2.8.1', '1308448d95afa41a4135903f22262fc8')
version('2.8.0', 'eca687e46e9750121638f258cff8317b')
version('2.7.3', 'fa1c008b56618c355a32ba4a678305f6')
version('2.7.1', 'bf0706b433a8dedd27a63a72f9a66060')
-
# See here for info on vulnerable Git versions:
# http://www.theregister.co.uk/2016/03/16/git_server_client_patch_now/
# All the following are vulnerable
- #version('2.6.3', 'b711be7628a4a2c25f38d859ee81b423')
- #version('2.6.2', 'da293290da69f45a86a311ad3cd43dc8')
- #version('2.6.1', '4c62ee9c5991fe93d99cf2a6b68397fd')
- #version('2.6.0', 'eb76a07148d94802a1745d759716a57e')
- #version('2.5.4', '3eca2390cf1fa698b48e2a233563a76b')
- #version('2.2.1', 'ff41fdb094eed1ec430aed8ee9b9849c')
-
+ # version('2.6.3', 'b711be7628a4a2c25f38d859ee81b423')
+ # version('2.6.2', 'da293290da69f45a86a311ad3cd43dc8')
+ # version('2.6.1', '4c62ee9c5991fe93d99cf2a6b68397fd')
+ # version('2.6.0', 'eb76a07148d94802a1745d759716a57e')
+ # version('2.5.4', '3eca2390cf1fa698b48e2a233563a76b')
+ # version('2.2.1', 'ff41fdb094eed1ec430aed8ee9b9849c')
- depends_on("openssl")
- depends_on("autoconf")
+ depends_on("autoconf", type='build')
depends_on("curl")
depends_on("expat")
-
- # Also depends_on gettext: apt-get install gettext (Ubuntu)
-
- # Use system perl for now.
- # depends_on("perl")
- # depends_on("pcre")
-
+ depends_on("gettext")
+ depends_on("libiconv")
+ depends_on("openssl")
+ depends_on("pcre")
+ depends_on("perl")
depends_on("zlib")
def install(self, spec, prefix):
+ env['LDFLAGS'] = "-L%s" % spec['gettext'].prefix.lib + " -lintl"
configure_args = [
"--prefix=%s" % prefix,
- "--without-pcre",
- "--with-openssl=%s" % spec['openssl'].prefix,
- "--with-zlib=%s" % spec['zlib'].prefix,
"--with-curl=%s" % spec['curl'].prefix,
"--with-expat=%s" % spec['expat'].prefix,
- ]
+ "--with-iconv=%s" % spec['libiconv'].prefix,
+ "--with-libpcre=%s" % spec['pcre'].prefix,
+ "--with-openssl=%s" % spec['openssl'].prefix,
+ "--with-perl=%s" % join_path(spec['perl'].prefix.bin, 'perl'),
+ "--with-zlib=%s" % spec['zlib'].prefix,
+ ]
which('autoreconf')('-i')
configure(*configure_args)
+ if sys.platform == "darwin":
+ # Don't link with -lrt; the system has no (and needs no) librt
+ filter_file(r' -lrt$', '', 'Makefile')
make()
make("install")
diff --git a/var/spack/repos/builtin/packages/gl2ps/package.py b/var/spack/repos/builtin/packages/gl2ps/package.py
index 25172bd544..d5e7b00027 100644
--- a/var/spack/repos/builtin/packages/gl2ps/package.py
+++ b/var/spack/repos/builtin/packages/gl2ps/package.py
@@ -24,6 +24,7 @@
##############################################################################
from spack import *
+
class Gl2ps(Package):
"""GL2PS is a C library providing high quality vector output for any
OpenGL application."""
diff --git a/var/spack/repos/builtin/packages/glew/package.py b/var/spack/repos/builtin/packages/glew/package.py
new file mode 100644
index 0000000000..5df7c8642f
--- /dev/null
+++ b/var/spack/repos/builtin/packages/glew/package.py
@@ -0,0 +1,51 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class Glew(Package):
+ """The OpenGL Extension Wrangler Library."""
+
+ homepage = "http://glew.sourceforge.net/"
+ url = "https://sourceforge.net/projects/glew/files/glew/2.0.0/glew-2.0.0.tgz/download"
+
+ version('2.0.0', '2a2cd7c98f13854d2fcddae0d2b20411')
+
+ depends_on("cmake", type='build')
+
+ def install(self, spec, prefix):
+ options = []
+ options.extend(std_cmake_args)
+
+ with working_dir('build'):
+ cmake('./cmake/', *options)
+
+ # https://github.com/Homebrew/legacy-homebrew/issues/22025
+ # Note: This file is generated only after cmake is run
+ filter_file(r'Requires: glu',
+ (''), '../glew.pc')
+
+ make()
+ make("install")
diff --git a/var/spack/repos/builtin/packages/glib/g_date_strftime.patch b/var/spack/repos/builtin/packages/glib/g_date_strftime.patch
new file mode 100644
index 0000000000..532c743277
--- /dev/null
+++ b/var/spack/repos/builtin/packages/glib/g_date_strftime.patch
@@ -0,0 +1,34 @@
+From 00148329967adb196138372771052a3f606a6ea3 Mon Sep 17 00:00:00 2001
+From: coypu <coypu@sdf.org>
+Date: Wed, 2 Mar 2016 19:43:10 +0200
+Subject: [PATCH 2/2] gdate: Suppress string format literal warning
+
+Newer versions of GCC emit an error here, but we know it's safe.
+https://bugzilla.gnome.org/761550
+---
+ glib/gdate.c | 5 +++++
+ 1 file changed, 5 insertions(+)
+
+diff --git a/glib/gdate.c b/glib/gdate.c
+index 4aece02..92c34d2 100644
+--- a/glib/gdate.c
++++ b/glib/gdate.c
+@@ -2439,6 +2439,9 @@ win32_strftime_helper (const GDate *d,
+ *
+ * Returns: number of characters written to the buffer, or 0 the buffer was too small
+ */
++#pragma GCC diagnostic push
++#pragma GCC diagnostic ignored "-Wformat-nonliteral"
++
+ gsize
+ g_date_strftime (gchar *s,
+ gsize slen,
+@@ -2549,3 +2552,5 @@ g_date_strftime (gchar *s,
+ return retval;
+ #endif
+ }
++
++#pragma GCC diagnostic pop
+--
+2.7.1
+
diff --git a/var/spack/repos/builtin/packages/glib/no-Werror=format-security.patch b/var/spack/repos/builtin/packages/glib/no-Werror=format-security.patch
new file mode 100644
index 0000000000..cfcfe424be
--- /dev/null
+++ b/var/spack/repos/builtin/packages/glib/no-Werror=format-security.patch
@@ -0,0 +1,16 @@
+--- a/configure.ac 2016-08-16 11:57:34.000000000 -0400
++++ b/configure.ac 2016-08-16 11:57:36.000000000 -0400
+@@ -3357,11 +3357,11 @@
+ enable_compile_warnings=yes)
+ AS_IF([test "x$enable_compile_warnings" = xyes], [
+ CC_CHECK_FLAGS_APPEND([GLIB_WARN_CFLAGS], [CFLAGS], [\
+ -Wall -Wstrict-prototypes -Werror=declaration-after-statement \
+ -Werror=missing-prototypes -Werror=implicit-function-declaration \
+- -Werror=pointer-arith -Werror=init-self -Werror=format-security \
+- -Werror=format=2 -Werror=missing-include-dirs])
++ -Werror=pointer-arith -Werror=init-self \
++ -Werror=missing-include-dirs])
+ ])
+ AC_SUBST(GLIB_WARN_CFLAGS)
+
+ #
diff --git a/var/spack/repos/builtin/packages/glib/package.py b/var/spack/repos/builtin/packages/glib/package.py
index b146ea63d8..4d8085baf2 100644
--- a/var/spack/repos/builtin/packages/glib/package.py
+++ b/var/spack/repos/builtin/packages/glib/package.py
@@ -23,24 +23,53 @@
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
from spack import *
-import sys
+import os
+
class Glib(Package):
"""The GLib package contains a low-level libraries useful for
providing data structure handling for C, portability wrappers
and interfaces for such runtime functionality as an event loop,
threads, dynamic loading and an object system."""
+
homepage = "https://developer.gnome.org/glib/"
url = "http://ftp.gnome.org/pub/gnome/sources/glib/2.42/glib-2.42.1.tar.xz"
+ version('2.49.4', 'e2c87c03017b0cd02c4c73274b92b148')
+ version('2.48.1', '67bd3b75c9f6d5587b457dc01cdcd5bb')
version('2.42.1', '89c4119e50e767d3532158605ee9121a')
- depends_on("libffi")
- depends_on("zlib")
- depends_on("pkg-config")
- depends_on('gettext', sys.platform=='darwin')
+ depends_on('autoconf', type='build')
+ depends_on('automake', type='build')
+ depends_on('libtool', type='build')
+ depends_on('pkg-config+internal_glib', type='build')
+ depends_on('libffi')
+ depends_on('zlib')
+ depends_on('gettext')
+ depends_on('pcre+utf', when='@2.48:')
+
+ # The following patch is needed for gcc-6.1
+ patch('g_date_strftime.patch', when='@2.42.1')
+ # Clang doesn't seem to acknowledge the pragma lines to disable the -Werror
+ # around a legitimate usage.
+ patch('no-Werror=format-security.patch')
+
+ def url_for_version(self, version):
+ """Handle glib's version-based custom URLs."""
+ url = 'http://ftp.gnome.org/pub/gnome/sources/glib'
+ return url + '/%s/glib-%s.tar.xz' % (version.up_to(2), version)
def install(self, spec, prefix):
+ autoreconf = which("autoreconf")
+ autoreconf("--install", "--verbose", "--force",
+ "-I", "config",
+ "-I", os.path.join(spec['pkg-config'].prefix,
+ "share", "aclocal"),
+ "-I", os.path.join(spec['automake'].prefix,
+ "share", "aclocal"),
+ "-I", os.path.join(spec['libtool'].prefix,
+ "share", "aclocal"),
+ )
configure("--prefix=%s" % prefix)
make()
make("install", parallel=False)
diff --git a/var/spack/repos/builtin/packages/glm/package.py b/var/spack/repos/builtin/packages/glm/package.py
index 0c9212f17d..c565b3cae7 100644
--- a/var/spack/repos/builtin/packages/glm/package.py
+++ b/var/spack/repos/builtin/packages/glm/package.py
@@ -26,17 +26,18 @@ from spack import *
class Glm(Package):
- """
- OpenGL Mathematics (GLM) is a header only C++ mathematics library for graphics software based on
- the OpenGL Shading Language (GLSL) specification.
+ """OpenGL Mathematics (GLM) is a header only C++ mathematics library for
+ graphics software based on the OpenGL Shading Language (GLSL)
+ specification.
+
"""
homepage = "https://github.com/g-truc/glm"
url = "https://github.com/g-truc/glm/archive/0.9.7.1.tar.gz"
version('0.9.7.1', '61af6639cdf652d1cdd7117190afced8')
-
- depends_on ("cmake")
+
+ depends_on('cmake', type='build')
def install(self, spec, prefix):
with working_dir('spack-build', create=True):
diff --git a/var/spack/repos/builtin/packages/global/package.py b/var/spack/repos/builtin/packages/global/package.py
index c144d6660b..fedf41c829 100644
--- a/var/spack/repos/builtin/packages/global/package.py
+++ b/var/spack/repos/builtin/packages/global/package.py
@@ -34,7 +34,7 @@ class Global(Package):
version('6.5', 'dfec818b4f53d91721e247cf7b218078')
- depends_on('exuberant-ctags')
+ depends_on('exuberant-ctags', type=('build', 'run'))
depends_on('ncurses')
def install(self, spec, prefix):
diff --git a/var/spack/repos/builtin/packages/globus-toolkit/package.py b/var/spack/repos/builtin/packages/globus-toolkit/package.py
new file mode 100644
index 0000000000..5cec13a5af
--- /dev/null
+++ b/var/spack/repos/builtin/packages/globus-toolkit/package.py
@@ -0,0 +1,40 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class GlobusToolkit(Package):
+ """The Globus Toolkit is an open source software toolkit used for building
+ grids"""
+
+ homepage = "http://toolkit.globus.org"
+ url = "http://toolkit.globus.org/ftppub/gt6/installers/src/globus_toolkit-6.0.1470089956.tar.gz"
+
+ version('6.0.1470089956', 'b77fe3cc5a5844df995688b0e630d077')
+
+ def install(self, spec, prefix):
+ configure("--prefix=%s" % prefix)
+ make()
+ make('install')
diff --git a/var/spack/repos/builtin/packages/glog/package.py b/var/spack/repos/builtin/packages/glog/package.py
index 03ee092429..14f042732b 100644
--- a/var/spack/repos/builtin/packages/glog/package.py
+++ b/var/spack/repos/builtin/packages/glog/package.py
@@ -22,9 +22,9 @@
# License along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
-import os
from spack import *
+
class Glog(Package):
"""C++ implementation of the Google logging module."""
diff --git a/var/spack/repos/builtin/packages/glpk/package.py b/var/spack/repos/builtin/packages/glpk/package.py
index 2ab3c38150..1b52643e59 100644
--- a/var/spack/repos/builtin/packages/glpk/package.py
+++ b/var/spack/repos/builtin/packages/glpk/package.py
@@ -22,22 +22,23 @@
# License along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
-
from spack import *
class Glpk(Package):
- """
- The GLPK (GNU Linear Programming Kit) package is intended for solving large-scale linear programming (LP), mixed
- integer programming (MIP), and other related problems. It is a set of routines written in ANSI C and organized in
- the form of a callable library
+ """The GLPK (GNU Linear Programming Kit) package is intended for solving
+ large-scale linear programming (LP), mixed integer programming
+ (MIP), and other related problems. It is a set of routines written
+ in ANSI C and organized in the form of a callable library
+
"""
homepage = "https://www.gnu.org/software/glpk"
url = "http://ftp.gnu.org/gnu/glpk/glpk-4.57.tar.gz"
version('4.57', '237531a54f73155842f8defe51aedb0f')
- variant('gmp', default=False, description='Activates support for GMP library')
+ variant('gmp', default=False,
+ description='Activates support for GMP library')
depends_on('gmp', when='+gmp')
diff --git a/var/spack/repos/builtin/packages/glproto/package.py b/var/spack/repos/builtin/packages/glproto/package.py
new file mode 100644
index 0000000000..462e529067
--- /dev/null
+++ b/var/spack/repos/builtin/packages/glproto/package.py
@@ -0,0 +1,45 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class Glproto(Package):
+ """OpenGL Extension to the X Window System.
+
+ This extension defines a protocol for the client to send 3D rendering
+ commands to the X server."""
+
+ homepage = "https://www.x.org/wiki/"
+ url = "https://www.x.org/archive/individual/proto/glproto-1.4.17.tar.gz"
+
+ version('1.4.17', 'd69554c1b51a83f2c6976a640819911b')
+
+ depends_on('pkg-config@0.9.0:', type='build')
+ depends_on('util-macros', type='build')
+
+ def install(self, spec, prefix):
+ configure('--prefix={0}'.format(prefix))
+
+ make('install')
diff --git a/var/spack/repos/builtin/packages/gmake/package.py b/var/spack/repos/builtin/packages/gmake/package.py
new file mode 100644
index 0000000000..ca296350a4
--- /dev/null
+++ b/var/spack/repos/builtin/packages/gmake/package.py
@@ -0,0 +1,42 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class Gmake(Package):
+ """GNU Make."""
+
+ homepage = "http://gnu.org/gnu/make"
+ url = "ftp://ftp.gnu.org/gnu/make/make-4.0.tar.gz"
+
+ version('4.0', 'b5e558f981326d9ca1bfdb841640721a')
+
+ def install(self, spec, prefix):
+ configure('--prefix={0}'.format(prefix))
+
+ make()
+ make('install')
+ with working_dir(prefix.bin):
+ symlink('make', 'gmake')
diff --git a/var/spack/repos/builtin/packages/gmp/package.py b/var/spack/repos/builtin/packages/gmp/package.py
index 3933788425..45e8b8a452 100644
--- a/var/spack/repos/builtin/packages/gmp/package.py
+++ b/var/spack/repos/builtin/packages/gmp/package.py
@@ -24,20 +24,26 @@
##############################################################################
from spack import *
-class Gmp(Package):
- """GMP is a free library for arbitrary precision arithmetic,
- operating on signed integers, rational numbers, and
- floating-point numbers."""
+
+class Gmp(AutotoolsPackage):
+ """GMP is a free library for arbitrary precision arithmetic, operating
+ on signed integers, rational numbers, and floating-point numbers."""
+
homepage = "https://gmplib.org"
- url = "https://gmplib.org/download/gmp/gmp-6.0.0a.tar.bz2"
+ url = "https://gmplib.org/download/gmp/gmp-6.0.0a.tar.bz2"
- version('6.1.0' , '86ee6e54ebfc4a90b643a65e402c4048')
+ version('6.1.2', '8ddbb26dc3bd4e2302984debba1406a5')
+ version('6.1.1', '4c175f86e11eb32d8bf9872ca3a8e11d')
+ version('6.1.0', '86ee6e54ebfc4a90b643a65e402c4048')
version('6.0.0a', 'b7ff2d88cae7f8085bd5006096eed470')
- version('6.0.0' , '6ef5869ae735db9995619135bd856b84')
+ version('6.0.0', '6ef5869ae735db9995619135bd856b84')
+
+ depends_on('m4', type='build')
- depends_on("m4")
+ def configure_args(self):
+ args = ['--enable-cxx']
+ # This flag is necessary for the Intel build to pass `make check`
+ if self.spec.compiler.name == 'intel':
+ args.append('CXXFLAGS=-no-ftz')
- def install(self, spec, prefix):
- configure("--prefix=%s" % prefix)
- make()
- make("install")
+ return args
diff --git a/var/spack/repos/builtin/packages/gmsh/package.py b/var/spack/repos/builtin/packages/gmsh/package.py
index e425d460c2..fffd448443 100644
--- a/var/spack/repos/builtin/packages/gmsh/package.py
+++ b/var/spack/repos/builtin/packages/gmsh/package.py
@@ -25,54 +25,106 @@
from spack import *
-class Gmsh(Package):
- """
- Gmsh is a free 3D finite element grid generator with a built-in CAD engine and post-processor. Its design goal is
- to provide a fast, light and user-friendly meshing tool with parametric input and advanced visualization
- capabilities. Gmsh is built around four modules: geometry, mesh, solver and post-processing. The specification of
- any input to these modules is done either interactively using the graphical user interface or in ASCII text files
- using Gmsh's own scripting language.
+class Gmsh(CMakePackage):
+ """Gmsh is a free 3D finite element grid generator with a built-in CAD engine
+ and post-processor. Its design goal is to provide a fast, light and
+ user-friendly meshing tool with parametric input and advanced visualization
+ capabilities. Gmsh is built around four modules: geometry, mesh, solver and
+ post-processing. The specification of any input to these modules is done
+ either interactively using the graphical user interface or in ASCII text
+ files using Gmsh's own scripting language.
"""
+
homepage = 'http://gmsh.info'
url = 'http://gmsh.info/src/gmsh-2.11.0-source.tgz'
+ version('2.15.0', '992a4b580454105f719f5bc05441d3d392ab0b4b80d4ea07b61ca3bdc974070a')
+ version('2.12.0', '7fbd2ec8071e79725266e72744d21e902d4fe6fa9e7c52340ad5f4be5c159d09')
version('2.11.0', 'f15b6e7ac9ca649c9a74440e1259d0db')
- # FIXME : Misses dependencies on gmm, PetsC, TetGen
-
- variant('shared', default=True, description='Enables the build of shared libraries')
- variant('debug', default=False, description='Builds the library in debug mode')
- variant('mpi', default=False, description='Builds MPI support for parser and solver')
- variant('fltk', default=False, description='Enables the build of the FLTK GUI')
- variant('hdf5', default=False, description='Enables HDF5 support')
- variant('compression', default=True, description='Enables IO compression through zlib')
+ variant('shared', default=True,
+ description='Enables the build of shared libraries')
+ variant('debug', default=False,
+ description='Builds the library in debug mode')
+ variant('mpi', default=True,
+ description='Builds MPI support for parser and solver')
+ variant('fltk', default=False,
+ description='Enables the build of the FLTK GUI')
+ variant('hdf5', default=False, description='Enables HDF5 support')
+ variant('compression', default=True,
+ description='Enables IO compression through zlib')
+ variant('oce', default=False, description='Build with OCE')
+ variant('petsc', default=False, description='Build with PETSc')
+ variant('slepc', default=False,
+ description='Build with SLEPc (only when PETSc is enabled)')
depends_on('blas')
depends_on('lapack')
+ depends_on('cmake@2.8:', type='build')
depends_on('gmp')
- depends_on('mpi', when='+mpi')
- depends_on('fltk', when='+fltk') # Assumes OpenGL with GLU is already provided by the system
+ depends_on('mpi', when='+mpi')
+ # Assumes OpenGL with GLU is already provided by the system:
+ depends_on('fltk', when='+fltk')
depends_on('hdf5', when='+hdf5')
- depends_on('zlib', when='+compression')
+ depends_on('oce', when='+oce')
+ depends_on('petsc+mpi', when='+petsc+mpi')
+ depends_on('petsc', when='+petsc~mpi')
+ depends_on('slepc', when='+slepc+petsc')
+ depends_on('zlib', when='+compression')
- def install(self, spec, prefix):
+ def cmake_args(self):
+ spec = self.spec
+ prefix = self.prefix
options = []
- options.extend(std_cmake_args)
- build_directory = join_path(self.stage.path, 'spack-build')
- source_directory = self.stage.source_path
+ # Make sure native file dialogs are used
+ options.extend(['-DENABLE_NATIVE_FILE_CHOOSER=ON'])
options.append('-DCMAKE_INSTALL_NAME_DIR:PATH=%s/lib' % prefix)
# Prevent GMsh from using its own strange directory structure on OSX
options.append('-DENABLE_OS_SPECIFIC_INSTALL=OFF')
+ # Make sure GMSH picks up correct BlasLapack by providing linker flags
+ blas_lapack = spec['lapack'].lapack_libs + spec['blas'].blas_libs
+ options.append(
+ '-DBLAS_LAPACK_LIBRARIES={0}'.format(blas_lapack.ld_flags))
+
+ # Gmsh does not have an option to compile against external metis.
+ # Its own Metis, however, fails to build
+ options.append('-DENABLE_METIS=OFF')
+
+ if '+fltk' in spec:
+ options.append('-DENABLE_FLTK=ON')
+ else:
+ options.append('-DENABLE_FLTK=OFF')
+
+ if '+oce' in spec:
+ env['CASROOT'] = self.spec['oce'].prefix
+ options.extend(['-DENABLE_OCC=ON'])
+ else:
+ options.extend(['-DENABLE_OCC=OFF'])
+
+ if '+petsc' in spec:
+ env['PETSC_DIR'] = self.spec['petsc'].prefix
+ options.extend(['-DENABLE_PETSC=ON'])
+ else:
+ options.extend(['-DENABLE_PETSC=OFF'])
+
+ if '+slepc' in spec:
+ env['SLEPC_DIR'] = self.spec['slepc'].prefix
+ options.extend(['-DENABLE_SLEPC=ON'])
+ else:
+ options.extend(['-DENABLE_SLEPC=OFF'])
+
if '+shared' in spec:
+ # Builds dynamic executable and installs shared library
options.extend(['-DENABLE_BUILD_SHARED:BOOL=ON',
- '-DENABLE_BUILD_DYNAMIC:BOOL=ON']) # Builds dynamic executable and installs shared library
+ '-DENABLE_BUILD_DYNAMIC:BOOL=ON'])
else:
- options.append('-DENABLE_BUILD_LIB:BOOL=ON') # Builds and installs static library
+ # Builds and installs static library
+ options.append('-DENABLE_BUILD_LIB:BOOL=ON')
if '+debug' in spec:
options.append('-DCMAKE_BUILD_TYPE:STRING=Debug')
@@ -83,7 +135,4 @@ class Gmsh(Package):
if '+compression' in spec:
options.append('-DENABLE_COMPRESSED_IO:BOOL=ON')
- with working_dir(build_directory, create=True):
- cmake(source_directory, *options)
- make()
- make('install')
+ return options
diff --git a/var/spack/repos/builtin/packages/gnu-prolog/package.py b/var/spack/repos/builtin/packages/gnu-prolog/package.py
new file mode 100644
index 0000000000..1e0487c654
--- /dev/null
+++ b/var/spack/repos/builtin/packages/gnu-prolog/package.py
@@ -0,0 +1,42 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class GnuProlog(Package):
+ """A free Prolog compiler with constraint solving over finite domains."""
+ homepage = "http://www.gprolog.org/"
+ url = "http://www.gprolog.org/gprolog-1.4.4.tar.gz"
+
+ version('1.4.4', '37009da471e5217ff637ad1c516448c8')
+
+ parallel = False
+
+ def install(self, spec, prefix):
+ with working_dir('src'):
+ configure('--with-install-dir=%s' % prefix,
+ '--without-links-dir')
+ make()
+ make('install')
diff --git a/var/spack/repos/builtin/packages/gnuplot/package.py b/var/spack/repos/builtin/packages/gnuplot/package.py
index a76677e066..600b6d285f 100644
--- a/var/spack/repos/builtin/packages/gnuplot/package.py
+++ b/var/spack/repos/builtin/packages/gnuplot/package.py
@@ -27,13 +27,18 @@ from spack import *
import os
+
class Gnuplot(Package):
- """
- Gnuplot is a portable command-line driven graphing utility for Linux, OS/2, MS Windows, OSX, VMS, and many other
- platforms. The source code is copyrighted but freely distributed (i.e., you don't have to pay for it). It was
- originally created to allow scientists and students to visualize mathematical functions and data interactively,
- but has grown to support many non-interactive uses such as web scripting. It is also used as a plotting engine by
- third-party applications like Octave. Gnuplot has been supported and under active development since 1986
+ """Gnuplot is a portable command-line driven graphing utility for Linux,
+ OS/2, MS Windows, OSX, VMS, and many other platforms. The source
+ code is copyrighted but freely distributed (i.e., you don't have
+ to pay for it). It was originally created to allow scientists and
+ students to visualize mathematical functions and data
+ interactively, but has grown to support many non-interactive uses
+ such as web scripting. It is also used as a plotting engine by
+ third-party applications like Octave. Gnuplot has been supported
+ and under active development since 1986
+
"""
homepage = "http://www.gnuplot.info"
url = "http://downloads.sourceforge.net/project/gnuplot/gnuplot/5.0.1/gnuplot-5.0.1.tar.gz"
diff --git a/var/spack/repos/builtin/packages/gnutls/package.py b/var/spack/repos/builtin/packages/gnutls/package.py
index 71b571bc88..5f7b0daf9b 100644
--- a/var/spack/repos/builtin/packages/gnutls/package.py
+++ b/var/spack/repos/builtin/packages/gnutls/package.py
@@ -24,6 +24,7 @@
##############################################################################
from spack import *
+
class Gnutls(Package):
"""GnuTLS is a secure communications library implementing the SSL,
TLS and DTLS protocols and technologies around them. It
diff --git a/var/spack/repos/builtin/packages/go-bootstrap/package.py b/var/spack/repos/builtin/packages/go-bootstrap/package.py
index b0e2109fd3..b497144f2f 100644
--- a/var/spack/repos/builtin/packages/go-bootstrap/package.py
+++ b/var/spack/repos/builtin/packages/go-bootstrap/package.py
@@ -1,3 +1,27 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
import os
import shutil
import glob
@@ -17,25 +41,41 @@ class GoBootstrap(Package):
extendable = True
- # temporary fix until tags are pulled correctly
- version('1.4.2', git='https://go.googlesource.com/go', tag='go1.4.2')
+ # NOTE: Go@1.4.x is the only supported bootstrapping compiler because all
+ # later versions require a Go compiler to build.
+ # See: https://golang.org/doc/install/source#go14 and
+ # https://github.com/golang/go/issues/17545 and
+ # https://github.com/golang/go/issues/16352
+ version('1.4-bootstrap-20161024', '76e42c8152e8560ded880a6d1d1f53cb',
+ url='https://storage.googleapis.com/golang/go1.4-bootstrap-20161024.tar.gz')
- variant('test',
- default=True,
- description="Run tests as part of build, a good idea but quite"
- " time consuming")
+ variant('test', default=True, description='Build and run tests as part of the build.')
- provides('golang@:1.4.2')
+ provides('golang@:1.4-bootstrap-20161024')
- depends_on('git')
+ depends_on('git', type=('build', 'link', 'run'))
+
+ # NOTE: Older versions of Go attempt to download external files that have
+ # since been moved while running the test suite. This patch modifies the
+ # test files so that these tests don't cause false failures.
+ # See: https://github.com/golang/go/issues/15694
+ @when('@:1.4.3')
+ def patch(self):
+ test_suite_file = FileFilter(join_path('src', 'run.bash'))
+ test_suite_file.filter(
+ r'^(.*)(\$GOROOT/src/cmd/api/run.go)(.*)$',
+ r'# \1\2\3',
+ )
+
+ @when('@1.5.0:')
+ def patch(self):
+ pass
def install(self, spec, prefix):
+ env['CGO_ENABLED'] = '0'
bash = which('bash')
with working_dir('src'):
- if '+test' in spec:
- bash('all.bash')
- else:
- bash('make.bash')
+ bash('{0}.bash'.format('all' if '+test' in spec else 'make'))
try:
os.makedirs(prefix)
@@ -47,5 +87,8 @@ class GoBootstrap(Package):
else:
shutil.copy2(f, os.path.join(prefix, f))
+ def setup_dependent_environment(self, spack_env, run_env, dep_spec):
+ spack_env.set('GOROOT_BOOTSTRAP', self.spec.prefix)
+
def setup_environment(self, spack_env, run_env):
spack_env.set('GOROOT_FINAL', self.spec.prefix)
diff --git a/var/spack/repos/builtin/packages/go/misc-cgo-testcshared.patch b/var/spack/repos/builtin/packages/go/misc-cgo-testcshared.patch
new file mode 100644
index 0000000000..17751df816
--- /dev/null
+++ b/var/spack/repos/builtin/packages/go/misc-cgo-testcshared.patch
@@ -0,0 +1,11 @@
+--- misc/cgo/testcshared/test.bash.orig 2016-11-19 00:00:11.917000000 +0000
++++ misc/cgo/testcshared/test.bash 2016-11-19 00:00:22.081000000 +0000
+@@ -107,7 +107,7 @@
+
+ # test0: exported symbols in shared lib are accessible.
+ # TODO(iant): using _shared here shouldn't really be necessary.
+-$(go env CC) ${GOGCCFLAGS} -I ${installdir} -o testp main0.c libgo.$libext
++$(go env CC) ${GOGCCFLAGS} -I ${installdir} -o testp main0.c ./libgo.$libext
+ binpush testp
+
+ output=$(run LD_LIBRARY_PATH=. ./testp)
diff --git a/var/spack/repos/builtin/packages/go/package.py b/var/spack/repos/builtin/packages/go/package.py
index 13b83517d1..ad1436c0c3 100644
--- a/var/spack/repos/builtin/packages/go/package.py
+++ b/var/spack/repos/builtin/packages/go/package.py
@@ -1,39 +1,97 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
import os
import shutil
import glob
import llnl.util.tty as tty
from spack import *
+# - vanilla CentOS 7, and possibly other systems, fail a test:
+# TestCloneNEWUSERAndRemapRootDisableSetgroups
+#
+# The Fix, discussed here: https://github.com/golang/go/issues/16283
+# is to enable "user_namespace".
+#
+# On a Digital Ocean image, this can be achieved by updating
+# `/etc/default/grub` so that the `GRUB_CMDLINE_LINUX` variable
+# includes `user_namespace.enable=1`, re-cooking the grub
+# configuration with `sudo grub2-mkconfig -o /boot/grub2/grub.cfg`,
+# and then rebooting.
+#
+# - on CentOS 7 systems (and possibly others) you need to have the
+# glibc package installed or various static cgo tests fail.
+
class Go(Package):
"""The golang compiler and build environment"""
homepage = "https://golang.org"
- url = "https://go.googlesource.com/go"
+ url = 'https://storage.googleapis.com/golang/go1.7.4.src.tar.gz'
extendable = True
- version('1.5.4', git='https://go.googlesource.com/go', tag='go1.5.4')
- version('1.6.2', git='https://go.googlesource.com/go', tag='go1.6.2')
+ version('1.7.4', '49c1076428a5d3b5ad7ac65233fcca2f')
+ version('1.6.4', 'b023240be707b34059d2c114d3465c92')
- variant('test',
- default=True,
- description="Run tests as part of build, a good idea but quite"
- " time consuming")
+ variant('test', default=True, description='Build and run tests as part of the build.')
provides('golang')
- # to-do, make non-c self-hosting compilers feasible without backflips
+ depends_on('git', type=('build', 'link', 'run'))
+ # TODO: Make non-c self-hosting compilers feasible without backflips
# should be a dep on external go compiler
- depends_on('go-bootstrap')
- depends_on('git')
+ depends_on('go-bootstrap', type='build')
+
+ # https://github.com/golang/go/issues/17545
+ patch('time_test.patch', when='@1.6.4:1.7.4')
+
+ # https://github.com/golang/go/issues/17986
+ patch('misc-cgo-testcshared.patch', level=0, when='@1.6.4:1.7.4')
+
+ # NOTE: Older versions of Go attempt to download external files that have
+ # since been moved while running the test suite. This patch modifies the
+ # test files so that these tests don't cause false failures.
+ # See: https://github.com/golang/go/issues/15694
+ @when('@:1.4.3')
+ def patch(self):
+ test_suite_file = FileFilter(join_path('src', 'run.bash'))
+ test_suite_file.filter(
+ r'^(.*)(\$GOROOT/src/cmd/api/run.go)(.*)$',
+ r'# \1\2\3',
+ )
+
+ @when('@1.5.0:')
+ def patch(self):
+ pass
+
+ def url_for_version(self, version):
+ return "https://storage.googleapis.com/golang/go{0}.src.tar.gz".format(version)
def install(self, spec, prefix):
bash = which('bash')
with working_dir('src'):
- if '+test' in spec:
- bash('all.bash')
- else:
- bash('make.bash')
+ bash('{0}.bash'.format('all' if '+test' in spec else 'make'))
try:
os.makedirs(prefix)
@@ -47,7 +105,6 @@ class Go(Package):
def setup_environment(self, spack_env, run_env):
spack_env.set('GOROOT_FINAL', self.spec.prefix)
- spack_env.set('GOROOT_BOOTSTRAP', self.spec['go-bootstrap'].prefix)
def setup_dependent_package(self, module, ext_spec):
"""Called before go modules' install() methods.
diff --git a/var/spack/repos/builtin/packages/go/time_test.patch b/var/spack/repos/builtin/packages/go/time_test.patch
new file mode 100644
index 0000000000..c3e0697c91
--- /dev/null
+++ b/var/spack/repos/builtin/packages/go/time_test.patch
@@ -0,0 +1,18 @@
+diff --git a/src/time/time_test.go b/src/time/time_test.go
+index 68236fd..2e47d08 100644
+--- a/src/time/time_test.go
++++ b/src/time/time_test.go
+@@ -943,8 +943,11 @@ func TestLoadFixed(t *testing.T) {
+ // but Go and most other systems use "east is positive".
+ // So GMT+1 corresponds to -3600 in the Go zone, not +3600.
+ name, offset := Now().In(loc).Zone()
+- if name != "GMT+1" || offset != -1*60*60 {
+- t.Errorf("Now().In(loc).Zone() = %q, %d, want %q, %d", name, offset, "GMT+1", -1*60*60)
++ // The zone abbreviation is "-01" since tzdata-2016g, and "GMT+1"
++ // on earlier versions; we accept both. (Issue #17276).
++ if !(name == "GMT+1" || name == "-01") || offset != -1*60*60 {
++ t.Errorf("Now().In(loc).Zone() = %q, %d, want %q or %q, %d",
++ name, offset, "GMT+1", "-01", -1*60*60)
+ }
+ }
+
diff --git a/var/spack/repos/builtin/packages/gobject-introspection/package.py b/var/spack/repos/builtin/packages/gobject-introspection/package.py
new file mode 100644
index 0000000000..952ec21661
--- /dev/null
+++ b/var/spack/repos/builtin/packages/gobject-introspection/package.py
@@ -0,0 +1,51 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class GobjectIntrospection(Package):
+ """The GObject Introspection is used to describe the program APIs and
+ collect them in a uniform, machine readable format.Cairo is a 2D graphics
+ library with support for multiple output"""
+
+ homepage = "https://wiki.gnome.org/Projects/GObjectIntrospection"
+ url = "http://ftp.gnome.org/pub/gnome/sources/gobject-introspection/1.48/gobject-introspection-1.48.0.tar.xz"
+
+ version('1.48.0', '01301fa9019667d48e927353e08bc218')
+
+ # version 1.48.0 build fails with glib 2.49.4
+ depends_on("glib@2.48.1")
+ depends_on("python")
+ depends_on("cairo")
+ depends_on("bison", type="build")
+ depends_on("flex", type="build")
+
+ def install(self, spec, prefix):
+ configure("--prefix=%s" % prefix)
+ # we need to filter this file to avoid an overly long hashbang line
+ filter_file('@PYTHON@', 'python',
+ 'tools/g-ir-tool-template.in')
+ make()
+ make("install")
diff --git a/var/spack/repos/builtin/packages/googletest/package.py b/var/spack/repos/builtin/packages/googletest/package.py
index 9444253c8c..6f3cafec06 100644
--- a/var/spack/repos/builtin/packages/googletest/package.py
+++ b/var/spack/repos/builtin/packages/googletest/package.py
@@ -24,6 +24,7 @@
##############################################################################
from spack import *
+
class Googletest(Package):
"""Google test framework for C++. Also called gtest."""
homepage = "https://github.com/google/googletest"
@@ -31,7 +32,7 @@ class Googletest(Package):
version('1.7.0', '5eaf03ed925a47b37c8e1d559eb19bc4')
- depends_on("cmake")
+ depends_on("cmake", type='build')
def install(self, spec, prefix):
which('cmake')('.', *std_cmake_args)
@@ -40,9 +41,8 @@ class Googletest(Package):
# Google Test doesn't have a make install
# We have to do our own install here.
- install_tree('include', prefix.include)
+ install_tree('include', prefix.include)
mkdirp(prefix.lib)
- install('./libgtest.a', '%s' % prefix.lib)
- install('./libgtest_main.a', '%s' % prefix.lib)
-
+ install('./libgtest.a', '%s' % prefix.lib)
+ install('./libgtest_main.a', '%s' % prefix.lib)
diff --git a/var/spack/repos/builtin/packages/gource/package.py b/var/spack/repos/builtin/packages/gource/package.py
new file mode 100644
index 0000000000..dda00420a3
--- /dev/null
+++ b/var/spack/repos/builtin/packages/gource/package.py
@@ -0,0 +1,63 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class Gource(Package):
+ """Software version control visualization."""
+
+ homepage = "http://gource.io"
+ url = "https://github.com/acaudwell/Gource/releases/download/gource-0.44/gource-0.44.tar.gz"
+
+ version('0.44', '79cda1bfaad16027d59cce55455bfab88b57c69d')
+
+ depends_on('automake', type='build')
+ depends_on('autoconf', type='build')
+ depends_on('libtool', type='build')
+ depends_on('glm', type='build')
+ depends_on('pkg-config', type='build')
+
+ depends_on('freetype@2.0:')
+ depends_on('pcre')
+ depends_on('boost@1.46:+filesystem+system')
+ depends_on('glew')
+ depends_on('jpeg')
+ depends_on('libpng')
+ depends_on('pcre')
+ depends_on('sdl2')
+ depends_on('sdl2-image')
+
+ def install(self, spec, prefix):
+ make_args = ['--prefix=%s' % prefix,
+ '--disable-dependency-tracking',
+ '--without-x',
+ '--with-boost=%s' % spec['boost'].prefix]
+
+ autoreconf('-i')
+ configure(*make_args)
+ make()
+
+ make("install",
+ parallel=False)
diff --git a/var/spack/repos/builtin/packages/gperf/package.py b/var/spack/repos/builtin/packages/gperf/package.py
index af176afcf6..0ae07b33fc 100644
--- a/var/spack/repos/builtin/packages/gperf/package.py
+++ b/var/spack/repos/builtin/packages/gperf/package.py
@@ -24,6 +24,7 @@
##############################################################################
from spack import *
+
class Gperf(Package):
"""GNU gperf is a perfect hash function generator. For a given
list of strings, it produces a hash function and hash table, in
@@ -38,6 +39,8 @@ class Gperf(Package):
version('3.0.4', 'c1f1db32fb6598d6a93e6e88796a8632')
def install(self, spec, prefix):
- configure("--prefix=%s" % prefix)
+ configure('--prefix={0}'.format(prefix))
+
make()
- make("install")
+ # make('check') # fails tests
+ make('install')
diff --git a/var/spack/repos/builtin/packages/gperftools/package.py b/var/spack/repos/builtin/packages/gperftools/package.py
index 1f17ab71a4..c6ca6c8057 100644
--- a/var/spack/repos/builtin/packages/gperftools/package.py
+++ b/var/spack/repos/builtin/packages/gperftools/package.py
@@ -24,14 +24,20 @@
##############################################################################
from spack import *
+
class Gperftools(Package):
- """Google's fast malloc/free implementation, especially for multi-threaded applications.
- Contains tcmalloc, heap-checker, heap-profiler, and cpu-profiler."""
+ """Google's fast malloc/free implementation, especially for
+ multi-threaded applications. Contains tcmalloc, heap-checker,
+ heap-profiler, and cpu-profiler.
+
+ """
homepage = "https://code.google.com/p/gperftools"
url = "https://googledrive.com/host/0B6NtGsLhIcf7MWxMMF9JdTN3UVk/gperftools-2.3.tar.gz"
- version('2.4', '2171cea3bbe053036fb5d5d25176a160', url="https://github.com/gperftools/gperftools/releases/download/gperftools-2.4/gperftools-2.4.tar.gz")
- version('2.3', 'f54dd119f0e46ac1f13264f8d97adf90', url="https://googledrive.com/host/0B6NtGsLhIcf7MWxMMF9JdTN3UVk/gperftools-2.3.tar.gz")
+ version('2.4', '2171cea3bbe053036fb5d5d25176a160',
+ url="https://github.com/gperftools/gperftools/releases/download/gperftools-2.4/gperftools-2.4.tar.gz")
+ version('2.3', 'f54dd119f0e46ac1f13264f8d97adf90',
+ url="https://googledrive.com/host/0B6NtGsLhIcf7MWxMMF9JdTN3UVk/gperftools-2.3.tar.gz")
depends_on("libunwind")
diff --git a/var/spack/repos/builtin/packages/grackle/Make.mach.template b/var/spack/repos/builtin/packages/grackle/Make.mach.template
new file mode 100644
index 0000000000..83abaa26d1
--- /dev/null
+++ b/var/spack/repos/builtin/packages/grackle/Make.mach.template
@@ -0,0 +1,71 @@
+MACH_TEXT = Generic Linux
+MACH_VALID = 1
+MACH_FILE = Make.mach.@ARCHITECTURE
+
+#-----------------------------------------------------------------------
+# Install paths (local variables)
+#-----------------------------------------------------------------------
+
+LOCAL_HDF5_INSTALL = @HDF5_ROOT
+
+#-----------------------------------------------------------------------
+# Compiler settings
+#-----------------------------------------------------------------------
+
+MACH_CC_NOMPI = @CC # C compiler
+MACH_CXX_NOMPI = @CXX # C++ compiler
+MACH_FC_NOMPI = @F77 # Fortran 77
+MACH_F90_NOMPI = @FC # Fortran 90
+MACH_LD_NOMPI = @FC # Linker
+@LINK_VARIABLES_DEFINITION
+
+#-----------------------------------------------------------------------
+# Machine-dependent defines
+#-----------------------------------------------------------------------
+
+MACH_DEFINES = -DLINUX -DH5_USE_16_API -fPIC
+
+#-----------------------------------------------------------------------
+# Compiler flag settings
+#-----------------------------------------------------------------------
+
+MACH_CPPFLAGS = -P -traditional
+MACH_CFLAGS =
+MACH_CXXFLAGS =
+MACH_FFLAGS = -fno-second-underscore -ffixed-line-length-132
+MACH_F90FLAGS = -fno-second-underscore
+MACH_LDFLAGS = @STDCXX_LIB
+
+#-----------------------------------------------------------------------
+# Optimization flags
+#-----------------------------------------------------------------------
+
+MACH_OPT_WARN = -Wall -g
+MACH_OPT_DEBUG = -g
+MACH_OPT_HIGH = -O2
+MACH_OPT_AGGRESSIVE = -O3 -g
+
+#-----------------------------------------------------------------------
+# Includes
+#-----------------------------------------------------------------------
+
+LOCAL_INCLUDES_HDF5 = -I@HDF5_ROOT/include # HDF5 includes
+
+MACH_INCLUDES = $(LOCAL_INCLUDES_HDF5)
+
+#-----------------------------------------------------------------------
+# Libraries
+#-----------------------------------------------------------------------
+
+LOCAL_LIBS_HDF5 = -L@HDF5_ROOT/lib -lhdf5 # HDF5 libraries
+LOCAL_LIBS_MACH = # Machine-dependent libraries
+
+MACH_LIBS = $(LOCAL_LIBS_HDF5) $(LOCAL_LIBS_MACH)
+
+#-----------------------------------------------------------------------
+# Installation
+#-----------------------------------------------------------------------
+
+MACH_INSTALL_PREFIX = @PREFIX
+MACH_INSTALL_LIB_DIR =
+MACH_INSTALL_INCLUDE_DIR =
diff --git a/var/spack/repos/builtin/packages/grackle/package.py b/var/spack/repos/builtin/packages/grackle/package.py
new file mode 100644
index 0000000000..7e3777158f
--- /dev/null
+++ b/var/spack/repos/builtin/packages/grackle/package.py
@@ -0,0 +1,89 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+import os.path
+import shutil
+import inspect
+
+from spack import *
+
+
+class Grackle(Package):
+ """Grackle is a chemistry and radiative cooling library for astrophysical
+ simulations with interfaces for C, C++, and Fortran codes. It is a
+ generalized and trimmed down version of the chemistry network of the Enzo
+ simulation code
+ """
+ homepage = 'http://grackle.readthedocs.io/en/grackle-2.2/'
+ url = 'https://bitbucket.org/grackle/grackle/get/grackle-2.0.1.tar.bz2'
+
+ version('2.2', 'ec49ed1db5a42db21f478285150c2ba3')
+ version('2.0.1', 'a9624ad13a60c592c1a0a4ea8e1ae86d')
+
+ depends_on('libtool', when='@2.2')
+
+ depends_on('mpi')
+ depends_on('hdf5+mpi')
+
+ parallel = False
+
+ def install(self, spec, prefix):
+ template_name = '{0.architecture}-{0.compiler.name}'
+ grackle_architecture = template_name.format(spec)
+ link_variables = 'MACH_AR = ar' if spec.version < Version(2.2) else 'MACH_LIBTOOL = libtool' # NOQA: ignore=E501
+ substitutions = {
+ '@ARCHITECTURE': grackle_architecture,
+ '@CC': spec['mpi'].mpicc,
+ '@CXX': spec['mpi'].mpicxx,
+ '@FC': spec['mpi'].mpifc,
+ '@F77': spec['mpi'].mpif77,
+ '@STDCXX_LIB': ' '.join(self.compiler.stdcxx_libs),
+ '@HDF5_ROOT': spec['hdf5'].prefix,
+ '@PREFIX': prefix,
+ '@LINK_VARIABLES_DEFINITION': link_variables
+ }
+
+ template = join_path(
+ os.path.dirname(inspect.getmodule(self).__file__),
+ 'Make.mach.template'
+ )
+ makefile = join_path(
+ self.stage.source_path,
+ 'src',
+ 'clib',
+ 'Make.mach.{0}'.format(grackle_architecture)
+ )
+ shutil.copy(template, makefile)
+ for key, value in substitutions.items():
+ filter_file(key, value, makefile)
+
+ configure()
+ with working_dir('src/clib'):
+ make('clean')
+ make('machine-{0}'.format(grackle_architecture))
+ make('opt-high')
+ make('show-config')
+ make()
+ mkdirp(prefix.lib)
+ make('install')
diff --git a/var/spack/repos/builtin/packages/grandr/package.py b/var/spack/repos/builtin/packages/grandr/package.py
new file mode 100644
index 0000000000..8097d4fa01
--- /dev/null
+++ b/var/spack/repos/builtin/packages/grandr/package.py
@@ -0,0 +1,45 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class Grandr(Package):
+ """RandR user interface using GTK+ libraries."""
+
+ homepage = "https://cgit.freedesktop.org/xorg/app/grandr"
+ url = "https://www.x.org/archive/individual/app/grandr-0.1.tar.gz"
+
+ version('0.1', '707109a105f2ab1bb216e6e6a5a10ba4')
+
+ depends_on('gtkplus@2.0.0:')
+ depends_on('gconf')
+ depends_on('xrandr@1.2:')
+
+ def install(self, spec, prefix):
+ configure('--prefix={0}'.format(prefix))
+
+ make()
+ make('check')
+ make('install')
diff --git a/var/spack/repos/builtin/packages/graphlib/package.py b/var/spack/repos/builtin/packages/graphlib/package.py
index f70f32cc8a..1e0eb2bf3b 100644
--- a/var/spack/repos/builtin/packages/graphlib/package.py
+++ b/var/spack/repos/builtin/packages/graphlib/package.py
@@ -24,12 +24,16 @@
##############################################################################
from spack import *
+
class Graphlib(Package):
"""Library to create, manipulate, and export graphs Graphlib."""
- homepage = "http://https://github.com/lee218llnl/graphlib"
- url = "https://github.com/lee218llnl/graphlib/archive/v2.0.0.tar.gz"
+ homepage = "https://github.com/LLNL/graphlib"
+ url = "https://github.com/LLNL/graphlib/archive/v2.0.0.tar.gz"
version('2.0.0', '43c6df84f1d38ba5a5dce0ae19371a70')
+ version('3.0.0', '625d199f97ab1b84cbc8daabcaee5e2a')
+
+ depends_on('cmake', type='build')
def install(self, spec, prefix):
cmake(".", *std_cmake_args)
diff --git a/var/spack/repos/builtin/packages/graphviz/package.py b/var/spack/repos/builtin/packages/graphviz/package.py
index 2f99015ba2..1bf6c70926 100644
--- a/var/spack/repos/builtin/packages/graphviz/package.py
+++ b/var/spack/repos/builtin/packages/graphviz/package.py
@@ -24,9 +24,10 @@
##############################################################################
from spack import *
import sys
+import shutil
-class Graphviz(Package):
+class Graphviz(AutotoolsPackage):
"""Graph Visualization Software"""
homepage = "http://www.graphviz.org"
url = "http://www.graphviz.org/pub/graphviz/stable/SOURCES/graphviz-2.38.0.tar.gz"
@@ -37,18 +38,23 @@ class Graphviz(Package):
# related to missing Perl packages. If spack begins support for Perl in the
# future, this package can be updated to depend_on('perl') and the
# ncecessary devel packages.
- variant('perl', default=False, description='Enable if you need the optional Perl language bindings.') # NOQA: ignore=E501
+ variant(
+ 'perl', default=False,
+ description='Enable if you need the optional Perl language bindings.')
parallel = False
depends_on("swig")
depends_on("python")
depends_on("ghostscript")
- depends_on("pkg-config")
+ depends_on("freetype")
+ depends_on("expat")
+ depends_on("libtool")
+ depends_on("pkg-config", type='build')
- def install(self, spec, prefix):
- options = ['--prefix=%s' % prefix]
- if '+perl' not in spec:
+ def configure_args(self):
+ options = []
+ if '+perl' not in self.spec:
options.append('--disable-perl')
# On OSX fix the compiler error:
@@ -58,6 +64,7 @@ class Graphviz(Package):
if sys.platform == 'darwin':
options.append('CFLAGS=-I/opt/X11/include')
- configure(*options)
- make()
- make("install")
+ # A hack to patch config.guess in the libltdl sub directory
+ shutil.copyfile('./config/config.guess', 'libltdl/config/config.guess')
+
+ return options
diff --git a/var/spack/repos/builtin/packages/grib-api/package.py b/var/spack/repos/builtin/packages/grib-api/package.py
new file mode 100644
index 0000000000..8b81e14a27
--- /dev/null
+++ b/var/spack/repos/builtin/packages/grib-api/package.py
@@ -0,0 +1,80 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class GribApi(Package):
+ """The ECMWF GRIB API is an application program interface accessible from
+ C, FORTRAN and Python programs developed for encoding and decoding WMO
+ FM-92 GRIB edition 1 and edition 2 messages."""
+
+ homepage = "https://software.ecmwf.int/wiki/display/GRIB/Home"
+ url = "https://software.ecmwf.int/wiki/download/attachments/3473437/grib_api-1.17.0-Source.tar.gz"
+
+ version('1.17.0', 'bca7114d2c3100501a08190a146818d2')
+ version('1.16.0', '8c7fdee03344e4379d400ae20976a460')
+
+ variant('netcdf', default=False, description='Enable netcdf encoding/decoding using netcdf library')
+ variant('jpeg', default=True, description='Enable jpeg 2000 for grib 2 decoding/encoding')
+ variant('png', default=False, description='Enable png for decoding/encoding')
+
+ depends_on('cmake', type='build')
+ depends_on('libpng', when='+png')
+ depends_on('netcdf', when='+netcdf')
+ depends_on('jasper', when='+jpeg')
+
+ def install(self, spec, prefix):
+ options = []
+ options.extend(std_cmake_args)
+ options.append('-DBUILD_SHARED_LIBS=BOTH')
+
+ # We will add python support later.
+ options.append('-DENABLE_PYTHON=OFF')
+
+ # Disable FORTRAN interface if we don't have it.
+ if (self.compiler.f77 is None) or (self.compiler.fc is None):
+ options.append('-DENABLE_FORTRAN=OFF')
+
+ if '+netcdf' in spec:
+ options.append('-DENABLE_NETCDF=ON')
+ options.append('-DNETCDF_PATH=%s' % spec['netcdf'].prefix)
+ else:
+ options.append('-DENABLE_NETCDF=OFF')
+
+ if '+jpeg' in spec:
+ options.append('-DENABLE_JPG=ON')
+ options.append('-DJASPER_PATH=%s' % spec['jasper'].prefix)
+ else:
+ options.append('-DENABLE_JPG=OFF')
+
+ if '+png' in spec:
+ options.append('-DENABLE_PNG=ON')
+ else:
+ options.append('-DENABLE_PNG=OFF')
+
+ with working_dir('spack-build', create=True):
+ cmake('..', *options)
+ make()
+ make('install')
diff --git a/var/spack/repos/builtin/packages/gromacs/package.py b/var/spack/repos/builtin/packages/gromacs/package.py
index 1a50a42488..d079188db6 100644
--- a/var/spack/repos/builtin/packages/gromacs/package.py
+++ b/var/spack/repos/builtin/packages/gromacs/package.py
@@ -25,16 +25,17 @@
from spack import *
-class Gromacs(Package):
- """
- GROMACS (GROningen MAchine for Chemical Simulations) is a molecular dynamics package primarily designed for
- simulations of proteins, lipids and nucleic acids. It was originally developed in the Biophysical Chemistry
- department of University of Groningen, and is now maintained by contributors in universities and research centers
- across the world.
-
- GROMACS is one of the fastest and most popular software packages available and can run on CPUs as well as GPUs.
- It is free, open source released under the GNU General Public License. Starting from version 4.6, GROMACS is
- released under the GNU Lesser General Public License.
+class Gromacs(CMakePackage):
+ """GROMACS (GROningen MAchine for Chemical Simulations) is a molecular
+ dynamics package primarily designed for simulations of proteins, lipids
+ and nucleic acids. It was originally developed in the Biophysical
+ Chemistry department of University of Groningen, and is now maintained
+ by contributors in universities and research centers across the world.
+
+ GROMACS is one of the fastest and most popular software packages
+ available and can run on CPUs as well as GPUs. It is free, open source
+ released under the GNU General Public License. Starting from version 4.6,
+ GROMACS is released under the GNU Lesser General Public License.
"""
homepage = 'http://www.gromacs.org'
@@ -43,38 +44,42 @@ class Gromacs(Package):
version('5.1.2', '614d0be372f1a6f1f36382b7a6fcab98')
variant('mpi', default=True, description='Activate MPI support')
- variant('shared', default=True, description='Enables the build of shared libraries')
+ variant('shared', default=True,
+ description='Enables the build of shared libraries')
variant('debug', default=False, description='Enables debug mode')
- variant('double', default=False, description='Produces a double precision version of the executables')
+ variant(
+ 'double', default=False,
+ description='Produces a double precision version of the executables')
+ variant('plumed', default=False, description='Enable PLUMED support')
depends_on('mpi', when='+mpi')
-
+ depends_on('plumed+mpi', when='+plumed+mpi')
+ depends_on('plumed~mpi', when='+plumed~mpi')
depends_on('fftw')
+ depends_on('cmake@2.8.8:', type='build')
# TODO : add GPU support
- def install(self, spec, prefix):
+ def patch(self):
+ if '+plumed' in self.spec:
+ self.spec['plumed'].package.apply_patch(self)
+
+ def cmake_args(self):
options = []
- if '+mpi' in spec:
+ if '+mpi' in self.spec:
options.append('-DGMX_MPI:BOOL=ON')
- if '+double' in spec:
+ if '+double' in self.spec:
options.append('-DGMX_DOUBLE:BOOL=ON')
- if '~shared' in spec:
+ if '~shared' in self.spec:
options.append('-DBUILD_SHARED_LIBS:BOOL=OFF')
- if '+debug' in spec:
+ if '+debug' in self.spec:
options.append('-DCMAKE_BUILD_TYPE:STRING=Debug')
else:
options.append('-DCMAKE_BUILD_TYPE:STRING=Release')
- options.extend(std_cmake_args)
-
- with working_dir('spack-build', create=True):
-
- cmake('..', *options)
- make()
- make('install')
+ return options
diff --git a/var/spack/repos/builtin/packages/gsl/package.py b/var/spack/repos/builtin/packages/gsl/package.py
index c1695a6f02..f13a9a66e8 100644
--- a/var/spack/repos/builtin/packages/gsl/package.py
+++ b/var/spack/repos/builtin/packages/gsl/package.py
@@ -26,21 +26,18 @@
from spack import *
-class Gsl(Package):
- """
- The GNU Scientific Library (GSL) is a numerical library for C and C++ programmers. It is free software under the
- GNU General Public License. The library provides a wide range of mathematical routines such as random number
- generators, special functions and least-squares fitting. There are over 1000 functions in total with an extensive
- test suite.
- """
- homepage = "http://www.gnu.org/software/gsl"
- url = "http://mirror.switch.ch/ftp/mirror/gnu/gsl/gsl-2.1.tar.gz"
+class Gsl(AutotoolsPackage):
+ """The GNU Scientific Library (GSL) is a numerical library for C and C++
+ programmers. It is free software under the GNU General Public License. The
+ library provides a wide range of mathematical routines such as random
+ number generators, special functions and least-squares fitting. There are
+ over 1000 functions in total with an extensive test suite."""
- version('2.1' , 'd8f70abafd3e9f0bae03c52d1f4e8de5')
- version('2.0' , 'ae44cdfed78ece40e73411b63a78c375')
- version('1.16', 'e49a664db13d81c968415cd53f62bc8b')
+ homepage = "http://www.gnu.org/software/gsl"
+ url = "http://mirror.switch.ch/ftp/mirror/gnu/gsl/gsl-2.3.tar.gz"
- def install(self, spec, prefix):
- configure('--prefix=%s' % prefix)
- make()
- make("install")
+ version('2.3', '905fcbbb97bc552d1037e34d200931a0')
+ version('2.2.1', '3d90650b7cfe0a6f4b29c2d7b0f86458')
+ version('2.1', 'd8f70abafd3e9f0bae03c52d1f4e8de5')
+ version('2.0', 'ae44cdfed78ece40e73411b63a78c375')
+ version('1.16', 'e49a664db13d81c968415cd53f62bc8b')
diff --git a/var/spack/repos/builtin/packages/gtkplus/package.py b/var/spack/repos/builtin/packages/gtkplus/package.py
index c135e89f78..b53b688372 100644
--- a/var/spack/repos/builtin/packages/gtkplus/package.py
+++ b/var/spack/repos/builtin/packages/gtkplus/package.py
@@ -24,16 +24,24 @@
##############################################################################
from spack import *
+
class Gtkplus(Package):
- """The GTK+ 2 package contains libraries used for creating graphical user interfaces for applications."""
+ """The GTK+ 2 package contains libraries used for creating graphical user
+ interfaces for applications."""
homepage = "http://www.gtk.org"
+ url = "http://ftp.gnome.org/pub/gnome/sources/gtk+/2.24/gtk+-2.24.31.tar.xz"
+
+ version('2.24.31', '68c1922732c7efc08df4656a5366dcc3afdc8791513400dac276009b40954658')
+ version('2.24.25', '38af1020cb8ff3d10dda2c8807f11e92af9d2fa4045de61c62eedb7fbc7ea5b3')
- version('2.24.25', '612350704dd3aacb95355a4981930c6f',
- url="http://ftp.gnome.org/pub/gnome/sources/gtk+/2.24/gtk+-2.24.25.tar.xz")
+ variant('X', default=False, description="Enable an X toolkit")
depends_on("atk")
depends_on("gdk-pixbuf")
+ depends_on("glib")
depends_on("pango")
+ depends_on("pango~X", when='~X')
+ depends_on("pango+X", when='+X')
def patch(self):
# remove disable deprecated flag.
diff --git a/var/spack/repos/builtin/packages/gts/package.py b/var/spack/repos/builtin/packages/gts/package.py
new file mode 100644
index 0000000000..2b3d4dd4f8
--- /dev/null
+++ b/var/spack/repos/builtin/packages/gts/package.py
@@ -0,0 +1,53 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class Gts(Package):
+ """GTS stands for the GNU Triangulated Surface Library.
+
+ It is an Open Source Free Software Library intended to provide a set of
+ useful functions to deal with 3D surfaces meshed with interconnected
+ triangles. The source code is available free of charge under the Free
+ Software LGPL license.
+
+ The code is written entirely in C with an object-oriented approach
+ based mostly on the design of GTK+. Careful attention is paid to
+ performance related issues as the initial goal of GTS is to provide a
+ simple and efficient library to scientists dealing with 3D computational
+ surface meshes.
+ """
+
+ homepage = "http://gts.sourceforge.net/index.html"
+ url = "http://gts.sourceforge.net/tarballs/gts-snapshot-121130.tar.gz"
+
+ version('121130', '023ebb6b13b8707534182a3ef0d12908')
+
+ depends_on('glib')
+
+ def install(self, spec, prefix):
+ configure('--prefix={0}'.format(prefix))
+ make()
+ make('install')
diff --git a/var/spack/repos/builtin/packages/guile/package.py b/var/spack/repos/builtin/packages/guile/package.py
new file mode 100644
index 0000000000..22aff1bddf
--- /dev/null
+++ b/var/spack/repos/builtin/packages/guile/package.py
@@ -0,0 +1,68 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class Guile(Package):
+ """Guile is the GNU Ubiquitous Intelligent Language for Extensions,
+ the official extension language for the GNU operating system."""
+
+ homepage = "https://www.gnu.org/software/guile/"
+ url = "ftp://ftp.gnu.org/gnu/guile/guile-2.0.11.tar.gz"
+
+ version('2.0.11', 'e532c68c6f17822561e3001136635ddd')
+
+ variant('readline', default=True, description='Use the readline library')
+
+ depends_on('gmp@4.2:')
+ depends_on('gettext')
+ depends_on('libtool@1.5.6:')
+ depends_on('libunistring@0.9.3:')
+ depends_on('bdw-gc@7.0:')
+ depends_on('libffi')
+ depends_on('readline', when='+readline')
+ depends_on('pkg-config', type='build')
+
+ def install(self, spec, prefix):
+ config_args = [
+ '--prefix={0}'.format(prefix),
+ '--with-libunistring-prefix={0}'.format(
+ spec['libunistring'].prefix),
+ '--with-libltdl-prefix={0}'.format(spec['libtool'].prefix),
+ '--with-libgmp-prefix={0}'.format(spec['gmp'].prefix),
+ '--with-libintl-prefix={0}'.format(spec['gettext'].prefix)
+ ]
+
+ if '+readline' in spec:
+ config_args.append('--with-libreadline-prefix={0}'.format(
+ spec['readline'].prefix))
+ else:
+ config_args.append('--without-libreadline-prefix')
+
+ configure(*config_args)
+
+ make()
+ make('check')
+ make('install')
diff --git a/var/spack/repos/builtin/packages/h5hut/package.py b/var/spack/repos/builtin/packages/h5hut/package.py
new file mode 100644
index 0000000000..22146372dc
--- /dev/null
+++ b/var/spack/repos/builtin/packages/h5hut/package.py
@@ -0,0 +1,75 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+
+from spack import *
+
+
+class H5hut(AutotoolsPackage):
+ """H5hut (HDF5 Utility Toolkit).
+ High-Performance I/O Library for Particle-based Simulations."""
+
+ homepage = "https://amas.psi.ch/H5hut/"
+ url = "https://amas.psi.ch/H5hut/raw-attachment/wiki/DownloadSources/H5hut-1.99.13.tar.gz"
+
+ version('1.99.13', '2a07a449afe50534de006ac6954a421a')
+
+ variant('fortran', default=True, description='Enable Fortran support')
+ variant('mpi', default=True, description='Enable MPI support')
+
+ depends_on('mpi', when='+mpi')
+ # h5hut +mpi uses the obsolete function H5Pset_fapl_mpiposix:
+ depends_on('hdf5@1.8:1.8.12+mpi', when='+mpi')
+ depends_on('hdf5@1.8:', when='~mpi')
+
+ # If built in parallel, the following error message occurs:
+ # install: .libs/libH5hut.a: No such file or directory
+ parallel = False
+
+ @AutotoolsPackage.precondition('configure')
+ def validate(self):
+ """Checks if Fortran compiler is available."""
+
+ if '+fortran' in self.spec and not self.compiler.fc:
+ raise RuntimeError(
+ 'Cannot build Fortran variant without a Fortran compiler.')
+
+ def configure_args(self):
+ spec = self.spec
+ config_args = ['--enable-shared']
+
+ if '+fortran' in spec:
+ config_args.append('--enable-fortran')
+
+ if '+mpi' in spec:
+ config_args.extend([
+ '--enable-parallel',
+ 'CC={0}'.format(spec['mpi'].mpicc),
+ 'CXX={0}'.format(spec['mpi'].mpicxx)
+ ])
+
+ if '+fortran' in spec:
+ config_args.append('FC={0}'.format(spec['mpi'].mpifc))
+
+ return config_args
diff --git a/var/spack/repos/builtin/packages/hadoop/package.py b/var/spack/repos/builtin/packages/hadoop/package.py
new file mode 100644
index 0000000000..a87b19a8cc
--- /dev/null
+++ b/var/spack/repos/builtin/packages/hadoop/package.py
@@ -0,0 +1,52 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class Hadoop(Package):
+ """The Apache Hadoop software library is a framework that
+ allows for the distributed processing of large data sets
+ across clusters of computers using simple programming models.
+ """
+
+ homepage = "http://hadoop.apache.org/"
+ url = "http://mirrors.ocf.berkeley.edu/apache/hadoop/common/hadoop-2.6.4/hadoop-2.6.4.tar.gz"
+
+ version('2.6.4', '37019f13d7dcd819727be158440b9442')
+
+ depends_on('jdk', type='run')
+
+ def install(self, spec, prefix):
+
+ def install_dir(dirname):
+ install_tree(dirname, join_path(prefix, dirname))
+
+ install_dir('bin')
+ install_dir('etc')
+ install_dir('include')
+ install_dir('lib')
+ install_dir('libexec')
+ install_dir('sbin')
+ install_dir('share')
diff --git a/var/spack/repos/builtin/packages/harfbuzz/package.py b/var/spack/repos/builtin/packages/harfbuzz/package.py
index e4c1b80de3..7c98c2a96a 100644
--- a/var/spack/repos/builtin/packages/harfbuzz/package.py
+++ b/var/spack/repos/builtin/packages/harfbuzz/package.py
@@ -24,6 +24,7 @@
##############################################################################
from spack import *
+
class Harfbuzz(Package):
"""The Harfbuzz package contains an OpenType text shaping engine."""
homepage = "http://www.freedesktop.org/wiki/Software/HarfBuzz/"
@@ -31,9 +32,12 @@ class Harfbuzz(Package):
version('0.9.37', 'bfe733250e34629a188d82e3b971bc1e')
+ depends_on("pkg-config", type="build")
depends_on("glib")
- depends_on("icu")
+ depends_on("icu4c")
depends_on("freetype")
+ depends_on("cairo")
+ depends_on("zlib")
def patch(self):
change_sed_delimiter('@', ';', 'src/Makefile.in')
diff --git a/var/spack/repos/builtin/packages/harminv/package.py b/var/spack/repos/builtin/packages/harminv/package.py
new file mode 100644
index 0000000000..184535ebb0
--- /dev/null
+++ b/var/spack/repos/builtin/packages/harminv/package.py
@@ -0,0 +1,54 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class Harminv(Package):
+ """Harminv is a free program (and accompanying library) to solve the
+ problem of harmonic inversion - given a discrete-time, finite-length
+ signal that consists of a sum of finitely-many sinusoids (possibly
+ exponentially decaying) in a given bandwidth, it determines the
+ frequencies, decay constants, amplitudes, and phases of those sinusoids."""
+
+ homepage = "http://ab-initio.mit.edu/wiki/index.php/Harminv"
+ url = "http://ab-initio.mit.edu/harminv/harminv-1.4.tar.gz"
+
+ version('1.4', 'b95e24a9bc7e07d3d2202d1605e9e86f')
+
+ depends_on('blas')
+ depends_on('lapack')
+
+ def install(self, spec, prefix):
+ config_args = [
+ '--prefix={0}'.format(prefix),
+ '--with-blas={0}'.format(spec['blas'].prefix.lib),
+ '--with-lapack={0}'.format(spec['lapack'].prefix.lib),
+ '--enable-shared'
+ ]
+
+ configure(*config_args)
+
+ make()
+ make('install')
diff --git a/var/spack/repos/builtin/packages/hdf/package.py b/var/spack/repos/builtin/packages/hdf/package.py
index 7ad4df2fde..2554bd0f96 100644
--- a/var/spack/repos/builtin/packages/hdf/package.py
+++ b/var/spack/repos/builtin/packages/hdf/package.py
@@ -24,6 +24,7 @@
##############################################################################
from spack import *
+
class Hdf(Package):
"""HDF4 (also known as HDF) is a library and multi-object
file format for storing and managing data between machines."""
@@ -33,37 +34,42 @@ class Hdf(Package):
list_url = "https://www.hdfgroup.org/ftp/HDF/releases/"
list_depth = 3
+ version('4.2.12', '79fd1454c899c05e34a3da0456ab0c1c')
version('4.2.11', '063f9928f3a19cc21367b71c3b8bbf19')
variant('szip', default=False, description="Enable szip support")
- depends_on("jpeg")
- depends_on("szip", when='+szip')
- depends_on("zlib")
-
-
- def url_for_version(self, version):
- return "https://www.hdfgroup.org/ftp/HDF/releases/HDF" + str(version) + "/src/hdf-" + str(version) + ".tar.gz"
+ depends_on('jpeg@6b:')
+ depends_on('szip', when='+szip')
+ depends_on('zlib@1.1.4:')
+ depends_on('bison', type='build')
+ depends_on('flex', type='build')
def install(self, spec, prefix):
config_args = [
'CFLAGS=-fPIC',
- '--prefix=%s' % prefix,
- '--with-jpeg=%s' % spec['jpeg'].prefix,
- '--with-zlib=%s' % spec['zlib'].prefix,
- '--disable-netcdf', # must be disabled to build NetCDF with HDF4 support
+ '--prefix={0}'.format(prefix),
+ '--with-jpeg={0}'.format(spec['jpeg'].prefix),
+ '--with-zlib={0}'.format(spec['zlib'].prefix),
+ '--disable-netcdf', # must be disabled to build NetCDF with HDF4
'--enable-fortran',
- '--disable-shared', # fortran and shared libraries are not compatible
+ '--disable-shared', # fortran and shared libs are not compatible
'--enable-static',
'--enable-production'
]
- # SZip support
+ # Szip support
if '+szip' in spec:
- config_args.append('--with-szlib=%s' % spec['szip'].prefix)
+ config_args.append('--with-szlib={0}'.format(spec['szip'].prefix))
+ else:
+ config_args.append('--without-szlib')
configure(*config_args)
make()
- make("install")
+
+ if self.run_tests:
+ make('check')
+
+ make('install')
diff --git a/var/spack/repos/builtin/packages/hdf5-blosc/package.py b/var/spack/repos/builtin/packages/hdf5-blosc/package.py
new file mode 100644
index 0000000000..088c1e9d9b
--- /dev/null
+++ b/var/spack/repos/builtin/packages/hdf5-blosc/package.py
@@ -0,0 +1,212 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+
+import os
+import shutil
+import sys
+
+from spack import *
+
+
+def _install_shlib(name, src, dst):
+ """Install a shared library from directory src to directory dst"""
+ if sys.platform == "darwin":
+ shlib0 = name + ".0.dylib"
+ shlib = name + ".dylib"
+ shutil.copyfile(join_path(src, shlib0), join_path(dst, shlib0))
+ os.symlink(shlib0, join_path(dst, shlib))
+ else:
+ shlib000 = name + ".so.0.0.0"
+ shlib0 = name + ".so.0"
+ shlib = name + ".dylib"
+ shutil.copyfile(join_path(src, shlib000), join_path(dst, shlib000))
+ os.symlink(shlib000, join_path(dst, shlib0))
+ os.symlink(shlib0, join_path(dst, shlib))
+
+
+class Hdf5Blosc(Package):
+ """Blosc filter for HDF5"""
+ homepage = "https://github.com/Blosc/hdf5-blosc"
+ url = "https://github.com/Blosc/hdf5-blosc"
+
+ version('master', git='https://github.com/Blosc/hdf5-blosc',
+ branch='master')
+
+ depends_on("c-blosc")
+ depends_on("hdf5")
+ depends_on("libtool", type='build')
+
+ parallel = False
+
+ def install(self, spec, prefix):
+ # The included cmake recipe doesn"t work for Darwin
+ # cmake(".", *std_cmake_args)
+ #
+ # make()
+ # make("install")
+ # if sys.platform == "darwin":
+ # fix_darwin_install_name(prefix.lib)
+
+ libtool = Executable(join_path(spec["libtool"].prefix.bin, "libtool"))
+
+ # TODO: these vars are not used.
+ # if "+mpi" in spec["hdf5"]:
+ # cc = "mpicc"
+ # else:
+ # cc = "cc"
+ # shlibext = "so" if sys.platform != "darwin" else "dylib"
+
+ mkdirp(prefix.include)
+ mkdirp(prefix.lib)
+
+ # Build and install filter
+ with working_dir("src"):
+ libtool("--mode=compile", "--tag=CC",
+ "cc", "-g", "-O",
+ "-c", "blosc_filter.c")
+ libtool("--mode=link", "--tag=CC",
+ "cc", "-g", "-O",
+ "-rpath", prefix.lib,
+ "-o", "libblosc_filter.la",
+ "blosc_filter.lo",
+ "-L%s" % spec["c-blosc"].prefix.lib, "-lblosc",
+ "-L%s" % spec["hdf5"].prefix.lib, "-lhdf5")
+ _install_shlib("libblosc_filter", ".libs", prefix.lib)
+
+ # Build and install plugin
+ # The plugin requires at least HDF5 1.8.11:
+ if spec["hdf5"].satisfies("@1.8.11:"):
+ libtool("--mode=compile", "--tag=CC",
+ "cc", "-g", "-O",
+ "-c", "blosc_plugin.c")
+ libtool("--mode=link", "--tag=CC",
+ "cc", "-g", "-O",
+ "-rpath", prefix.lib,
+ "-o", "libblosc_plugin.la",
+ "blosc_plugin.lo",
+ "-L%s" % prefix.lib, "-lblosc_filter",
+ "-L%s" % spec["c-blosc"].prefix.lib, "-lblosc",
+ "-L%s" % spec["hdf5"].prefix.lib, "-lhdf5")
+ _install_shlib("libblosc_plugin", ".libs", prefix.lib)
+
+ self.check_install(spec)
+
+ def check_install(self, spec):
+ "Build and run a small program to test the installed HDF5 Blosc plugin"
+ print "Checking HDF5-Blosc plugin..."
+ checkdir = "spack-check"
+ with working_dir(checkdir, create=True):
+ source = r"""\
+#include <hdf5.h>
+#include <assert.h>
+#include <stdio.h>
+#include <stdlib.h>
+
+#define FILTER_BLOSC 32001 /* Blosc filter ID registered with the HDF group */
+
+int main(int argc, char **argv) {
+ herr_t herr;
+ hid_t file = H5Fcreate("file.h5", H5F_ACC_TRUNC, H5P_DEFAULT, H5P_DEFAULT);
+ assert(file >= 0);
+ hsize_t dims[3] = {10, 10, 10};
+ hid_t space = H5Screate_simple(3, dims, NULL);
+ assert(space >= 0);
+ hid_t create_proplist = H5Pcreate(H5P_DATASET_CREATE);
+ assert(create_proplist >= 0);
+ herr = H5Pset_chunk(create_proplist, 3, dims);
+ assert(herr >= 0);
+ herr = H5Pset_filter(create_proplist, FILTER_BLOSC, H5Z_FLAG_OPTIONAL, 0,
+ NULL);
+ assert(herr >= 0);
+ htri_t all_filters_avail = H5Pall_filters_avail(create_proplist);
+ assert(all_filters_avail > 0);
+ hid_t dataset = H5Dcreate(file, "dataset", H5T_NATIVE_DOUBLE, space,
+ H5P_DEFAULT, create_proplist, H5P_DEFAULT);
+ assert(dataset >= 0);
+ double data[10][10][10];
+ for (int k=0; k<10; ++k) {
+ for (int j=0; j<10; ++j) {
+ for (int i=0; i<10; ++i) {
+ data[k][j][i] = 1.0 / (1.0 + i + j + k);
+ }
+ }
+ }
+ herr = H5Dwrite(dataset, H5T_NATIVE_DOUBLE, space, space, H5P_DEFAULT,
+ &data[0][0][0]);
+ assert(herr >= 0);
+ herr = H5Pclose(create_proplist);
+ assert(herr >= 0);
+ herr = H5Dclose(dataset);
+ assert(herr >= 0);
+ herr = H5Sclose(space);
+ assert(herr >= 0);
+ herr = H5Fclose(file);
+ assert(herr >= 0);
+ printf("Done.\n");
+ return 0;
+}
+"""
+ expected = """\
+Done.
+"""
+ with open("check.c", "w") as f:
+ f.write(source)
+ if "+mpi" in spec["hdf5"]:
+ cc = which("mpicc")
+ else:
+ cc = which("cc")
+ # TODO: Automate these path and library settings
+ cc("-c", "-I%s" % spec["hdf5"].prefix.include, "check.c")
+ cc("-o", "check", "check.o",
+ "-L%s" % spec["hdf5"].prefix.lib, "-lhdf5")
+ try:
+ check = Executable("./check")
+ output = check(return_output=True)
+ except:
+ output = ""
+ success = output == expected
+ if not success:
+ print "Produced output does not match expected output."
+ print "Expected output:"
+ print "-" * 80
+ print expected
+ print "-" * 80
+ print "Produced output:"
+ print "-" * 80
+ print output
+ print "-" * 80
+ print "Environment:"
+ env = which("env")
+ env()
+ raise RuntimeError("HDF5 Blosc plugin check failed")
+ shutil.rmtree(checkdir)
+
+ def setup_environment(self, spack_env, run_env):
+ spack_env.append_path("HDF5_PLUGIN_PATH", self.spec.prefix.lib)
+ run_env.append_path("HDF5_PLUGIN_PATH", self.spec.prefix.lib)
+
+ def setup_dependent_environment(self, spack_env, run_env, dependent_spec):
+ spack_env.append_path("HDF5_PLUGIN_PATH", self.spec.prefix.lib)
+ run_env.append_path("HDF5_PLUGIN_PATH", self.spec.prefix.lib)
diff --git a/var/spack/repos/builtin/packages/hdf5/package.py b/var/spack/repos/builtin/packages/hdf5/package.py
index 21137ef356..222af53601 100644
--- a/var/spack/repos/builtin/packages/hdf5/package.py
+++ b/var/spack/repos/builtin/packages/hdf5/package.py
@@ -22,15 +22,14 @@
# License along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
-
from spack import *
import shutil
-class Hdf5(Package):
+class Hdf5(AutotoolsPackage):
"""HDF5 is a data model, library, and file format for storing and managing
- data. It supports an unlimited variety of datatypes, and is designed for
- flexible and efficient I/O and for high volume and complex data.
+ data. It supports an unlimited variety of datatypes, and is designed for
+ flexible and efficient I/O and for high volume and complex data.
"""
homepage = "http://www.hdfgroup.org/HDF5/"
@@ -38,41 +37,58 @@ class Hdf5(Package):
list_url = "http://www.hdfgroup.org/ftp/HDF5/releases"
list_depth = 3
+ version('1.10.0-patch1', '9180ff0ef8dc2ef3f61bd37a7404f295')
version('1.10.0', 'bdc935337ee8282579cd6bc4270ad199')
- version('1.8.16', 'b8ed9a36ae142317f88b0c7ef4b9c618', preferred=True)
+ version('1.8.18', 'dd2148b740713ca0295442ec683d7b1c',
+ # The link for the latest version differs from the links for
+ # the previous releases. Do not forget to remove this once
+ # the version 1.8.18 is not the latest one for the 1.8.* branch.
+ url='http://hdfgroup.org/ftp/HDF5/current18/src/hdf5-1.8.18.tar.gz')
+ version('1.8.17', '7d572f8f3b798a628b8245af0391a0ca')
+ version('1.8.16', 'b8ed9a36ae142317f88b0c7ef4b9c618')
version('1.8.15', '03cccb5b33dbe975fdcd8ae9dc021f24')
+ version('1.8.14', 'a482686e733514a51cde12d6fe5c5d95')
version('1.8.13', 'c03426e9e77d7766944654280b467289')
+ version('1.8.12', 'd804802feb99b87fc668a90e6fa34411')
- variant('debug', default=False, description='Builds a debug version of the library')
- variant('shared', default=True, description='Builds a shared version of the library')
+ variant('debug', default=False,
+ description='Builds a debug version of the library')
+ variant('shared', default=True,
+ description='Builds a shared version of the library')
variant('cxx', default=True, description='Enable C++ support')
variant('fortran', default=True, description='Enable Fortran support')
- variant('mpi', default=False, description='Enable MPI support')
+ variant('mpi', default=True, description='Enable MPI support')
variant('szip', default=False, description='Enable szip support')
- variant('threadsafe', default=False, description='Enable thread-safe capabilities')
+ variant('threadsafe', default=False,
+ description='Enable thread-safe capabilities')
+ variant('pic', default=True,
+ description='Produce position-independent code (for shared libs)')
- depends_on("mpi", when='+mpi')
- depends_on("szip", when='+szip')
- depends_on("zlib")
+ depends_on('mpi', when='+mpi')
+ depends_on('szip', when='+szip')
+ depends_on('zlib@1.1.2:')
- def validate(self, spec):
+ @AutotoolsPackage.precondition('configure')
+ def validate(self):
"""
Checks if incompatible variants have been activated at the same time
:param spec: spec of the package
:raises RuntimeError: in case of inconsistencies
"""
+ spec = self.spec
if '+fortran' in spec and not self.compiler.fc:
msg = 'cannot build a fortran variant without a fortran compiler'
raise RuntimeError(msg)
if '+threadsafe' in spec and ('+cxx' in spec or '+fortran' in spec):
- raise RuntimeError("cannot use variant +threadsafe with either +cxx or +fortran")
+ msg = 'cannot use variant +threadsafe with either +cxx or +fortran'
+ raise RuntimeError(msg)
- def install(self, spec, prefix):
- self.validate(spec)
+ def configure_args(self):
+ spec = self.spec
# Handle compilation after spec validation
extra_args = []
@@ -107,6 +123,11 @@ class Hdf5(Package):
if spec.satisfies('@:1.8.16'):
extra_args.append('--enable-fortran2003')
+ if '+pic' in spec:
+ extra_args.append('CFLAGS={0}'.format(self.compiler.pic_flag))
+ extra_args.append('CXXFLAGS={0}'.format(self.compiler.pic_flag))
+ extra_args.append('FFLAGS={0}'.format(self.compiler.pic_flag))
+
if '+mpi' in spec:
# The HDF5 configure script warns if cxx and mpi are enabled
# together. There doesn't seem to be a real reason for this, except
@@ -115,16 +136,14 @@ class Hdf5(Package):
# this is not actually a problem.
extra_args.extend([
"--enable-parallel",
- "CC=%s" % join_path(spec['mpi'].prefix.bin, "mpicc"),
+ "CC=%s" % spec['mpi'].mpicc
])
if '+cxx' in spec:
- extra_args.append("CXX=%s" % join_path(spec['mpi'].prefix.bin,
- "mpic++"))
+ extra_args.append("CXX=%s" % spec['mpi'].mpicxx)
if '+fortran' in spec:
- extra_args.append("FC=%s" % join_path(spec['mpi'].prefix.bin,
- "mpifort"))
+ extra_args.append("FC=%s" % spec['mpi'].mpifc)
if '+szip' in spec:
extra_args.append("--with-szlib=%s" % spec['szip'].prefix)
@@ -135,17 +154,27 @@ class Hdf5(Package):
'--disable-hl',
])
- configure(
- "--prefix=%s" % prefix,
- "--with-zlib=%s" % spec['zlib'].prefix,
- *extra_args)
- make()
- make("install")
- self.check_install(spec)
-
- def check_install(self, spec):
- "Build and run a small program to test the installed HDF5 library"
- print "Checking HDF5 installation..."
+ return ["--with-zlib=%s" % spec['zlib'].prefix] + extra_args
+
+ def configure(self, spec, prefix):
+ # Run the default autotools package configure
+ super(Hdf5, self).configure(spec, prefix)
+
+ if '@:1.8.14' in spec:
+ # On Ubuntu14, HDF5 1.8.12 (and maybe other versions)
+ # mysteriously end up with "-l -l" in the postdeps in the
+ # libtool script. Patch this by removing the spurious -l's.
+ filter_file(
+ r'postdeps="([^"]*)"',
+ lambda m: 'postdeps="%s"' % ' '.join(
+ arg for arg in m.group(1).split(' ') if arg != '-l'),
+ 'libtool')
+
+ @AutotoolsPackage.sanity_check('install')
+ def check_install(self):
+ # Build and run a small program to test the installed HDF5 library
+ spec = self.spec
+ print("Checking HDF5 installation...")
checkdir = "spack-check"
with working_dir(checkdir, create=True):
source = r"""
@@ -163,17 +192,19 @@ int main(int argc, char **argv) {
"""
expected = """\
HDF5 version {version} {version}
-""".format(version=str(spec.version))
+""".format(version=str(spec.version.up_to(3)))
with open("check.c", 'w') as f:
f.write(source)
if '+mpi' in spec:
- cc = which(join_path(spec['mpi'].prefix.bin, "mpicc"))
+ cc = which('%s' % spec['mpi'].mpicc)
else:
cc = which('cc')
# TODO: Automate these path and library settings
cc('-c', "-I%s" % join_path(spec.prefix, "include"), "check.c")
cc('-o', "check", "check.o",
- "-L%s" % join_path(spec.prefix, "lib"), "-lhdf5",
+ "-L%s" % join_path(spec.prefix, "lib"),
+ "-L%s" % join_path(spec.prefix, "lib64"),
+ "-lhdf5",
"-lz")
try:
check = Executable('./check')
@@ -182,26 +213,37 @@ HDF5 version {version} {version}
output = ""
success = output == expected
if not success:
- print "Produced output does not match expected output."
- print "Expected output:"
- print '-'*80
- print expected
- print '-'*80
- print "Produced output:"
- print '-'*80
- print output
- print '-'*80
+ print("Produced output does not match expected output.")
+ print("Expected output:")
+ print('-' * 80)
+ print(expected)
+ print('-' * 80)
+ print("Produced output:")
+ print('-' * 80)
+ print(output)
+ print('-' * 80)
raise RuntimeError("HDF5 install check failed")
shutil.rmtree(checkdir)
def url_for_version(self, version):
- v = str(version)
+ # If we have a specific URL for this version, return it.
+ version_urls = self.version_urls()
+ if version in version_urls:
+ return version_urls[version]
+
+ base_url = "http://www.hdfgroup.org/ftp/HDF5/releases"
if version == Version("1.2.2"):
- return "http://www.hdfgroup.org/ftp/HDF5/releases/hdf5-" + v + ".tar.gz"
+ return "{0}/hdf5-{1}.tar.gz".format(base_url, version)
+ elif version < Version("1.6.6"):
+ return "{0}/hdf5-{1}/hdf5-{2}.tar.gz".format(
+ base_url, version.up_to(2), version)
elif version < Version("1.7"):
- return "http://www.hdfgroup.org/ftp/HDF5/releases/hdf5-" + version.up_to(2) + "/hdf5-" + v + ".tar.gz"
+ return "{0}/hdf5-{1}/hdf5-{2}/src/hdf5-{2}.tar.gz".format(
+ base_url, version.up_to(2), version)
elif version < Version("1.10"):
- return "http://www.hdfgroup.org/ftp/HDF5/releases/hdf5-" + v + "/src/hdf5-" + v + ".tar.gz"
+ return "{0}/hdf5-{1}/src/hdf5-{1}.tar.gz".format(
+ base_url, version)
else:
- return "http://www.hdfgroup.org/ftp/HDF5/releases/hdf5-" + version.up_to(2) + "/hdf5-" + v + "/src/hdf5-" + v + ".tar.gz"
+ return "{0}/hdf5-{1}/hdf5-{2}/src/hdf5-{2}.tar.gz".format(
+ base_url, version.up_to(2), version)
diff --git a/var/spack/repos/builtin/packages/help2man/package.py b/var/spack/repos/builtin/packages/help2man/package.py
new file mode 100644
index 0000000000..506b1c1465
--- /dev/null
+++ b/var/spack/repos/builtin/packages/help2man/package.py
@@ -0,0 +1,37 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class Help2man(AutotoolsPackage):
+ """help2man produces simple manual pages from the '--help' and '--version'
+ output of other commands."""
+
+ homepage = "https://www.gnu.org/software/help2man/"
+ url = "http://gnu.askapache.com/help2man/help2man-1.47.4.tar.xz"
+
+ version('1.47.4', '544aca496a7d89de3e5d99e56a2f03d3')
+
+ depends_on('gettext', type='build')
diff --git a/var/spack/repos/builtin/packages/hepmc/package.py b/var/spack/repos/builtin/packages/hepmc/package.py
new file mode 100644
index 0000000000..ab80dcf6ba
--- /dev/null
+++ b/var/spack/repos/builtin/packages/hepmc/package.py
@@ -0,0 +1,55 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+
+from spack import *
+
+
+class Hepmc(Package):
+ """The HepMC package is an object oriented, C++ event record for
+ High Energy Physics Monte Carlo generators and simulation."""
+
+ homepage = "http://hepmc.web.cern.ch/hepmc/"
+ url = "http://hepmc.web.cern.ch/hepmc/releases/hepmc2.06.09.tgz"
+
+ version('2.06.09', 'c47627ced4255b40e731b8666848b087')
+ version('2.06.08', 'a2e889114cafc4f60742029d69abd907')
+ version('2.06.07', '11d7035dccb0650b331f51520c6172e7')
+ version('2.06.06', '102e5503537a3ecd6ea6f466aa5bc4ae')
+ version('2.06.05', '2a4a2a945adf26474b8bdccf4f881d9c')
+
+ depends_on("cmake", type='build')
+
+ def install(self, spec, prefix):
+ build_directory = join_path(self.stage.path, 'spack-build')
+ source_directory = self.stage.source_path
+ options = [source_directory]
+ options.append('-Dmomentum:STRING=GEV')
+ options.append('-Dlength:STRING=MM')
+ options.extend(std_cmake_args)
+
+ with working_dir(build_directory, create=True):
+ cmake(*options)
+ make()
+ make('install')
diff --git a/var/spack/repos/builtin/packages/heppdt/package.py b/var/spack/repos/builtin/packages/heppdt/package.py
new file mode 100644
index 0000000000..54c846ae33
--- /dev/null
+++ b/var/spack/repos/builtin/packages/heppdt/package.py
@@ -0,0 +1,48 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+
+from spack import *
+
+
+class Heppdt(Package):
+ """The HepPID library contains translation methods for particle ID's
+ to and from various Monte Carlo generators and the PDG standard
+ numbering scheme. We realize that the generators adhere closely
+ to the standard, but there are occasional differences."""
+ homepage = "http://lcgapp.cern.ch/project/simu/HepPDT/"
+ url = "http://lcgapp.cern.ch/project/simu/HepPDT/download/HepPDT-2.06.01.tar.gz"
+
+ version('3.04.01', 'a8e93c7603d844266b62d6f189f0ac7e')
+ version('3.04.00', '2d2cd7552d3e9539148febacc6287db2')
+ version('3.03.02', '0b85f1809bb8b0b28a46f23c718b2773')
+ version('3.03.01', 'd411f3bfdf9c4350d802241ba2629cc2')
+ version('3.03.00', 'cd84d0a0454be982dcd8c285e060a7b3')
+ version('2.06.01', '5688b4bdbd84b48ed5dd2545a3dc33c0')
+
+ def install(self, spec, prefix):
+ configure('--prefix=%s' % prefix)
+
+ make()
+ make("install")
diff --git a/var/spack/repos/builtin/packages/hmmer/package.py b/var/spack/repos/builtin/packages/hmmer/package.py
new file mode 100644
index 0000000000..6a236e9fc9
--- /dev/null
+++ b/var/spack/repos/builtin/packages/hmmer/package.py
@@ -0,0 +1,76 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class Hmmer(Package):
+ """HMMER is used for searching sequence databases for sequence homologs,
+ and for making sequence alignments. It implements methods using
+ probabilistic models called profile hidden Markov models (profile HMMs).
+ """
+ homepage = 'http://www.hmmer.org'
+ url = 'http://eddylab.org/software/hmmer3/3.1b2/hmmer-3.1b2.tar.gz'
+
+ version('3.1b2', 'c8c141018bc0ccd7fc37b33f2b945d5f')
+ version('3.0', '4cf685f3bc524ba5b5cdaaa070a83588')
+ version('2.4i', 'dab234c87e026ac1de942450750acd20')
+ version('2.3.2', '5f073340c0cf761288f961a73821228a')
+ version('2.3.1', 'c724413e5761c630892506698a4716e2')
+
+ variant('mpi', default=True, description='Compile with MPI')
+ variant('gsl', default=False, description='Compile with GSL')
+
+ depends_on('mpi', when='+mpi')
+ depends_on('gsl', when='+gsl')
+
+ def url_for_version(self, version):
+ base_url = 'http://eddylab.org/software'
+
+ if version >= Version('3.0'):
+ return '{0}/hmmer3/{1}/hmmer-{1}.tar.gz'.format(base_url, version)
+ else:
+ return '{0}/hmmer/{1}/hmmer-{1}.tar.gz'.format(base_url, version)
+
+ def install(self, spec, prefix):
+ configure_args = [
+ '--prefix={0}'.format(prefix)
+ ]
+
+ if '+gsl' in self.spec:
+ configure_args.extend([
+ '--with-gsl',
+ 'LIBS=-lgsl -lgslcblas'
+ ])
+
+ if '+mpi' in self.spec:
+ configure_args.append('--enable-mpi')
+
+ configure(*configure_args)
+ make()
+
+ if self.run_tests:
+ make('check')
+
+ make('install')
diff --git a/var/spack/repos/builtin/packages/hoomd-blue/package.py b/var/spack/repos/builtin/packages/hoomd-blue/package.py
index 90d5107e3b..5e27f6aead 100644
--- a/var/spack/repos/builtin/packages/hoomd-blue/package.py
+++ b/var/spack/repos/builtin/packages/hoomd-blue/package.py
@@ -25,6 +25,7 @@
from spack import *
import os
+
class HoomdBlue(Package):
"""HOOMD-blue is a general-purpose particle simulation toolkit. It scales
from a single CPU core to thousands of GPUs.
@@ -45,18 +46,18 @@ class HoomdBlue(Package):
variant('doc', default=True, description='Generate documentation')
extends('python')
- depends_on('py-numpy')
+ depends_on('py-numpy', type=('build', 'run'))
depends_on('boost+python')
- depends_on('cmake')
+ depends_on('cmake', type='build')
depends_on('mpi', when='+mpi')
depends_on('cuda', when='+cuda')
- depends_on('doxygen', when='+doc')
+ depends_on('doxygen', when='+doc', type='build')
def install(self, spec, prefix):
cmake_args = [
'-DPYTHON_EXECUTABLE=%s/python' % spec['python'].prefix.bin,
- '-DBOOST_ROOT=%s' % spec['boost' ].prefix
+ '-DBOOST_ROOT=%s' % spec['boost'].prefix
]
# MPI support
@@ -73,9 +74,9 @@ class HoomdBlue(Package):
cmake_args.append('-DENABLE_CUDA=OFF')
# CUDA-aware MPI library support
- #if '+cuda' in spec and '+mpi' in spec:
+ # if '+cuda' in spec and '+mpi' in spec:
# cmake_args.append('-DENABLE_MPI_CUDA=ON')
- #else:
+ # else:
# cmake_args.append('-DENABLE_MPI_CUDA=OFF')
# There may be a bug in the MPI-CUDA code. See:
diff --git a/var/spack/repos/builtin/packages/hpctoolkit-externals/package.py b/var/spack/repos/builtin/packages/hpctoolkit-externals/package.py
new file mode 100644
index 0000000000..86d95a1e21
--- /dev/null
+++ b/var/spack/repos/builtin/packages/hpctoolkit-externals/package.py
@@ -0,0 +1,49 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class HpctoolkitExternals(Package):
+ """HPCToolkit performance analysis tool has many prerequisites and
+ HpctoolkitExternals package provides all these prerequisites."""
+
+ homepage = "http://hpctoolkit.org"
+
+ # Note: No precise release tags/branches provided
+ version('5.4',
+ git='https://github.com/HPCToolkit/hpctoolkit-externals.git',
+ commit='3d2953623357bb06e9a4b51eca90a4b039c2710e')
+
+ parallel = False
+
+ def install(self, spec, prefix):
+
+ options = ['CC=%s' % self.compiler.cc,
+ 'CXX=%s' % self.compiler.cxx]
+
+ with working_dir('spack-build', create=True):
+ configure = Executable('../configure')
+ configure('--prefix=%s' % prefix, *options)
+ make('install')
diff --git a/var/spack/repos/builtin/packages/hpctoolkit/package.py b/var/spack/repos/builtin/packages/hpctoolkit/package.py
new file mode 100644
index 0000000000..b6e03627de
--- /dev/null
+++ b/var/spack/repos/builtin/packages/hpctoolkit/package.py
@@ -0,0 +1,65 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class Hpctoolkit(Package):
+ """HPCToolkit is an integrated suite of tools for measurement and analysis
+ of program performance on computers ranging from multicore desktop systems
+ to the nation's largest supercomputers. By using statistical sampling of
+ timers and hardware performance counters, HPCToolkit collects accurate
+ measurements of a program's work, resource consumption, and inefficiency
+ and attributes them to the full calling context in which they occur."""
+
+ homepage = "http://hpctoolkit.org"
+
+ # Note: No precise release tags/branches provided
+ version('5.4', git='https://github.com/HPCToolkit/hpctoolkit.git',
+ commit='d9ca2112762e5a06ea31b5295d793e4a83272d19')
+
+ variant('mpi', default=True, description='Enable MPI supoort')
+ variant('papi', default=True, description='Enable PAPI counter support')
+
+ depends_on('hpctoolkit-externals')
+ depends_on('papi', when='+papi')
+ depends_on('mpi', when='+mpi')
+
+ def install(self, spec, prefix):
+
+ options = ['CC=%s' % self.compiler.cc,
+ 'CXX=%s' % self.compiler.cxx,
+ '--with-externals=%s' % spec['hpctoolkit-externals'].prefix]
+
+ if '+mpi' in spec:
+ options.extend(['MPICXX=%s' % spec['mpi'].mpicxx])
+
+ if '+papi' in spec:
+ options.extend(['--with-papi=%s' % spec['papi'].prefix])
+
+ # TODO: BG-Q configure option
+ with working_dir('spack-build', create=True):
+ configure = Executable('../configure')
+ configure('--prefix=%s' % prefix, *options)
+ make('install')
diff --git a/var/spack/repos/builtin/packages/hpl/package.py b/var/spack/repos/builtin/packages/hpl/package.py
new file mode 100644
index 0000000000..fa0013de17
--- /dev/null
+++ b/var/spack/repos/builtin/packages/hpl/package.py
@@ -0,0 +1,119 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+import os
+import platform
+
+
+class Hpl(Package):
+ """HPL is a software package that solves a (random) dense linear system
+ in double precision (64 bits) arithmetic on distributed-memory computers.
+ It can thus be regarded as a portable as well as freely available
+ implementation of the High Performance Computing Linpack Benchmark."""
+
+ homepage = "http://www.netlib.org/benchmark/hpl/"
+ url = "http://www.netlib.org/benchmark/hpl/hpl-2.2.tar.gz"
+
+ version('2.2', '0eb19e787c3dc8f4058db22c9e0c5320')
+
+ variant('openmp', default=False, description='Enable OpenMP support')
+
+ depends_on('mpi@1.1:')
+ depends_on('blas')
+
+ parallel = False
+
+ def configure(self, spec, arch):
+ # List of configuration options
+ # Order is important
+ config = []
+
+ # OpenMP support
+ if '+openmp' in spec:
+ config.append(
+ 'OMP_DEFS = {0}'.format(self.compiler.openmp_flag)
+ )
+
+ config.extend([
+ # Shell
+ 'SHELL = /bin/sh',
+ 'CD = cd',
+ 'CP = cp',
+ 'LN_S = ln -fs',
+ 'MKDIR = mkdir -p',
+ 'RM = /bin/rm -f',
+ 'TOUCH = touch',
+ # Platform identifier
+ 'ARCH = {0}'.format(arch),
+ # HPL Directory Structure / HPL library
+ 'TOPdir = {0}'.format(os.getcwd()),
+ 'INCdir = $(TOPdir)/include',
+ 'BINdir = $(TOPdir)/bin/$(ARCH)',
+ 'LIBdir = $(TOPdir)/lib/$(ARCH)',
+ 'HPLlib = $(LIBdir)/libhpl.a',
+ # Message Passing library (MPI)
+ 'MPinc = -I{0}'.format(spec['mpi'].prefix.include),
+ 'MPlib = -L{0}'.format(spec['mpi'].prefix.lib),
+ # Linear Algebra library (BLAS or VSIPL)
+ 'LAinc = {0}'.format(spec['blas'].prefix.include),
+ 'LAlib = {0}'.format(spec['blas'].blas_libs.joined()),
+ # F77 / C interface
+ 'F2CDEFS = -DAdd_ -DF77_INTEGER=int -DStringSunStyle',
+ # HPL includes / libraries / specifics
+ 'HPL_INCLUDES = -I$(INCdir) -I$(INCdir)/$(ARCH) ' +
+ '-I$(LAinc) -I$(MPinc)',
+ 'HPL_LIBS = $(HPLlib) $(LAlib) $(MPlib)',
+ 'HPL_OPTS = -DHPL_DETAILED_TIMING -DHPL_PROGRESS_REPORT',
+ 'HPL_DEFS = $(F2CDEFS) $(HPL_OPTS) $(HPL_INCLUDES)',
+ # Compilers / linkers - Optimization flags
+ 'CC = {0}'.format(spec['mpi'].mpicc),
+ 'CCNOOPT = $(HPL_DEFS)',
+ 'CCFLAGS = $(HPL_DEFS) -O3',
+ 'LINKER = $(CC)',
+ 'LINKFLAGS = $(CCFLAGS) $(OMP_DEFS)',
+ 'ARCHIVER = ar',
+ 'ARFLAGS = r',
+ 'RANLIB = echo'
+ ])
+
+ # Write configuration options to include file
+ with open('Make.{0}'.format(arch), 'w') as makefile:
+ for var in config:
+ makefile.write('{0}\n'.format(var))
+
+ def install(self, spec, prefix):
+ # Arch used for file naming purposes only
+ arch = '{0}-{1}'.format(platform.system(), platform.processor())
+
+ # Generate Makefile include
+ self.configure(spec, arch)
+
+ make('arch={0}'.format(arch))
+
+ # Manual installation
+ install_tree(join_path('bin', arch), prefix.bin)
+ install_tree(join_path('lib', arch), prefix.lib)
+ install_tree(join_path('include', arch), prefix.include)
+ install_tree('man', prefix.man)
diff --git a/var/spack/repos/builtin/packages/hpx5/package.py b/var/spack/repos/builtin/packages/hpx5/package.py
index cd0c0b7a7b..fe75c256f8 100644
--- a/var/spack/repos/builtin/packages/hpx5/package.py
+++ b/var/spack/repos/builtin/packages/hpx5/package.py
@@ -25,6 +25,7 @@
from spack import *
import os
+
class Hpx5(Package):
"""The HPX-5 Runtime System. HPX-5 (High Performance ParalleX) is an
open source, portable, performance-oriented runtime developed at
@@ -36,15 +37,16 @@ class Hpx5(Package):
applications enabling scientists to write code that performs and
scales better than contemporary runtimes."""
homepage = "http://hpx.crest.iu.edu"
- url = "http://hpx.crest.iu.edu/release/hpx-2.0.0.tar.gz"
+ url = "http://hpx.crest.iu.edu/release/hpx-3.1.0.tar.gz"
+ version('3.1.0', '9e90b8ac46788c009079632828c77628')
version('2.0.0', '3d2ff3aab6c46481f9ec65c5b2bfe7a6')
version('1.3.0', '2260ecc7f850e71a4d365a43017d8cee')
version('1.2.0', '4972005f85566af4afe8b71afbf1480f')
version('1.1.0', '646afb460ecb7e0eea713a634933ce4f')
version('1.0.0', '8020822adf6090bd59ed7fe465f6c6cb')
- variant('debug', default=False, description='Build a debug version of HPX-5')
+ variant('debug', default=False, description='Build debug version of HPX-5')
variant('photon', default=False, description='Enable Photon support')
variant('mpi', default=False, description='Enable MPI support')
diff --git a/var/spack/repos/builtin/packages/hsakmt/package.py b/var/spack/repos/builtin/packages/hsakmt/package.py
new file mode 100644
index 0000000000..0daad4afed
--- /dev/null
+++ b/var/spack/repos/builtin/packages/hsakmt/package.py
@@ -0,0 +1,41 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class Hsakmt(Package):
+ """hsakmt is a thunk library that provides a userspace interface to amdkfd
+ (AMD's HSA Linux kernel driver). It is the HSA equivalent of libdrm."""
+
+ homepage = "https://cgit.freedesktop.org/amd/hsakmt/"
+ url = "https://www.x.org/archive/individual/lib/hsakmt-1.0.0.tar.gz"
+
+ version('1.0.0', '9beb20104e505300daf541266c4c3c3d')
+
+ def install(self, spec, prefix):
+ configure('--prefix={0}'.format(prefix))
+
+ make()
+ make('install')
diff --git a/var/spack/repos/builtin/packages/htop/package.py b/var/spack/repos/builtin/packages/htop/package.py
new file mode 100644
index 0000000000..3df0a35356
--- /dev/null
+++ b/var/spack/repos/builtin/packages/htop/package.py
@@ -0,0 +1,41 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class Htop(Package):
+ """htop is an interactive text-mode process viewer for Unix systems."""
+
+ homepage = "https://github.com/hishamhm/htop"
+ url = "https://hisham.hm/htop/releases/2.0.2/htop-2.0.2.tar.gz"
+
+ version('2.0.2', '7d354d904bad591a931ad57e99fea84a')
+
+ depends_on('ncurses')
+
+ def install(self, spec, prefix):
+ configure('--prefix=%s' % prefix)
+ make()
+ make('install')
diff --git a/var/spack/repos/builtin/packages/htslib/package.py b/var/spack/repos/builtin/packages/htslib/package.py
new file mode 100644
index 0000000000..1a8b8fd2f5
--- /dev/null
+++ b/var/spack/repos/builtin/packages/htslib/package.py
@@ -0,0 +1,41 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class Htslib(Package):
+ """C library for high-throughput sequencing data formats."""
+
+ homepage = "https://github.com/samtools/htslib"
+ url = "https://github.com/samtools/htslib/releases/download/1.3.1/htslib-1.3.1.tar.bz2"
+
+ version('1.3.1', '16d78f90b72f29971b042e8da8be6843')
+
+ depends_on('zlib')
+
+ def install(self, spec, prefix):
+ configure('--prefix={0}'.format(prefix))
+ make()
+ make('install')
diff --git a/var/spack/repos/builtin/packages/hub/package.py b/var/spack/repos/builtin/packages/hub/package.py
index ed8b742e42..5d7c082897 100644
--- a/var/spack/repos/builtin/packages/hub/package.py
+++ b/var/spack/repos/builtin/packages/hub/package.py
@@ -1,3 +1,27 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
from spack import *
import os
diff --git a/var/spack/repos/builtin/packages/hwloc/package.py b/var/spack/repos/builtin/packages/hwloc/package.py
index ee0168c38e..111c424b22 100644
--- a/var/spack/repos/builtin/packages/hwloc/package.py
+++ b/var/spack/repos/builtin/packages/hwloc/package.py
@@ -24,6 +24,7 @@
##############################################################################
from spack import *
+
class Hwloc(Package):
"""The Portable Hardware Locality (hwloc) software package
provides a portable abstraction (across OS, versions,
@@ -41,6 +42,7 @@ class Hwloc(Package):
list_url = "http://www.open-mpi.org/software/hwloc/"
list_depth = 3
+ version('1.11.4', 'b6f23eb59074fd09fdd84905d50b103d')
version('1.11.3', 'c1d36a9de6028eac1d18ea4782ef958f')
version('1.11.2', 'e4ca55c2a5c5656da4a4e37c8fc51b23')
version('1.11.1', 'feb4e416a1b25963ed565d8b42252fdc')
diff --git a/var/spack/repos/builtin/packages/hydra/package.py b/var/spack/repos/builtin/packages/hydra/package.py
index 3d56056022..eee346ba49 100644
--- a/var/spack/repos/builtin/packages/hydra/package.py
+++ b/var/spack/repos/builtin/packages/hydra/package.py
@@ -24,6 +24,7 @@
##############################################################################
from spack import *
+
class Hydra(Package):
"""Hydra is a process management system for starting parallel jobs.
Hydra is designed to natively work with existing launcher daemons
@@ -37,7 +38,6 @@ class Hydra(Package):
version('3.2', '4d670916695bf7e3a869cc336a881b39')
-
def install(self, spec, prefix):
configure('--prefix=%s' % prefix)
diff --git a/var/spack/repos/builtin/packages/hypre/package.py b/var/spack/repos/builtin/packages/hypre/package.py
index f87dae9f4e..ed0f665462 100644
--- a/var/spack/repos/builtin/packages/hypre/package.py
+++ b/var/spack/repos/builtin/packages/hypre/package.py
@@ -23,7 +23,9 @@
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
from spack import *
-import os, sys
+import os
+import sys
+
class Hypre(Package):
"""Hypre is a library of high performance preconditioners that
@@ -33,34 +35,44 @@ class Hypre(Package):
homepage = "http://computation.llnl.gov/project/linear_solvers/software.php"
url = "http://computation.llnl.gov/project/linear_solvers/download/hypre-2.10.0b.tar.gz"
+ version('develop', git='https://github.com/LLNL/hypre', tag='master')
+ version('2.11.1', '3f02ef8fd679239a6723f60b7f796519')
version('2.10.1', 'dc048c4cabb3cd549af72591474ad674')
version('2.10.0b', '768be38793a35bb5d055905b271f5b8e')
# hypre does not know how to build shared libraries on Darwin
- variant('shared', default=sys.platform!='darwin', description="Build shared library version (disables static library)")
+ variant('shared', default=(sys.platform != 'darwin'),
+ description="Build shared library (disables static library)")
# SuperluDist have conflicting headers with those in Hypre
- variant('internal-superlu', default=True, description="Use internal Superlu routines")
+ variant('internal-superlu', default=True,
+ description="Use internal Superlu routines")
+ variant('int64', default=False,
+ description="Use 64bit integers")
depends_on("mpi")
depends_on("blas")
depends_on("lapack")
def install(self, spec, prefix):
- blas_dir = spec['blas'].prefix
- lapack_dir = spec['lapack'].prefix
- mpi_dir = spec['mpi'].prefix
-
- os.environ['CC'] = os.path.join(mpi_dir, 'bin', 'mpicc')
- os.environ['CXX'] = os.path.join(mpi_dir, 'bin', 'mpicxx')
- os.environ['F77'] = os.path.join(mpi_dir, 'bin', 'mpif77')
+ os.environ['CC'] = spec['mpi'].mpicc
+ os.environ['CXX'] = spec['mpi'].mpicxx
+ os.environ['F77'] = spec['mpi'].mpif77
+ # Note: --with-(lapack|blas)_libs= needs space separated list of names
+ lapack = spec['lapack'].lapack_libs
+ blas = spec['blas'].blas_libs
configure_args = [
- "--prefix=%s" % prefix,
- "--with-lapack-libs=lapack",
- "--with-lapack-lib-dirs=%s/lib" % lapack_dir,
- "--with-blas-libs=blas",
- "--with-blas-lib-dirs=%s/lib" % blas_dir]
+ '--prefix=%s' % prefix,
+ '--with-lapack-libs=%s' % ' '.join(lapack.names),
+ '--with-lapack-lib-dirs=%s' % ' '.join(lapack.directories),
+ '--with-blas-libs=%s' % ' '.join(blas.names),
+ '--with-blas-lib-dirs=%s' % ' '.join(blas.directories)
+ ]
+
+ if '+int64' in self.spec:
+ configure_args.append('--enable-bigint')
+
if '+shared' in self.spec:
configure_args.append("--enable-shared")
@@ -76,4 +88,12 @@ class Hypre(Package):
configure(*configure_args)
make()
+ if self.run_tests:
+ make("check")
+ make("test")
+ Executable(join_path('test', 'ij'))()
+ sstruct = Executable(join_path('test', 'struct'))
+ sstruct()
+ sstruct('-in', 'test/sstruct.in.default', '-solver', '40',
+ '-rhsone')
make("install")
diff --git a/var/spack/repos/builtin/packages/ibmisc/package.py b/var/spack/repos/builtin/packages/ibmisc/package.py
new file mode 100644
index 0000000000..f325205507
--- /dev/null
+++ b/var/spack/repos/builtin/packages/ibmisc/package.py
@@ -0,0 +1,78 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class Ibmisc(CMakePackage):
+ """Misc. reusable utilities used by IceBin."""
+
+ homepage = "https://github.com/citibeth/ibmisc"
+ url = "https://github.com/citibeth/ibmisc/tarball/123"
+
+ version('0.1.0', '12f2a32432a11db48e00217df18e59fa')
+
+ variant('everytrace', default=False,
+ description='Report errors through Everytrace')
+ variant('proj', default=True,
+ description='Compile utilities for PROJ.4 library')
+ variant('blitz', default=True,
+ description='Compile utilities for Blitz library')
+ variant('netcdf', default=True,
+ description='Compile utilities for NetCDF library')
+ variant('boost', default=True,
+ description='Compile utilities for Boost library')
+ variant('udunits2', default=True,
+ description='Compile utilities for UDUNITS2 library')
+ variant('googletest', default=True,
+ description='Compile utilities for Google Test library')
+ variant('python', default=True,
+ description='Compile utilities for use with Python/Cython')
+
+ extends('python')
+
+ depends_on('eigen')
+ depends_on('everytrace', when='+everytrace')
+ depends_on('proj', when='+proj')
+ depends_on('blitz', when='+blitz')
+ depends_on('netcdf-cxx4', when='+netcdf')
+ depends_on('udunits2', when='+udunits2')
+ depends_on('googletest', when='+googletest', type='build')
+ depends_on('py-cython', when='+python', type=('build', 'run'))
+ depends_on('py-numpy', when='+python', type=('build', 'run'))
+ depends_on('boost', when='+boost')
+
+ # Build dependencies
+ depends_on('doxygen', type='build')
+
+ def cmake_args(self):
+ spec = self.spec
+ return [
+ '-DUSE_EVERYTRACE=%s' % ('YES' if '+everytrace' in spec else 'NO'),
+ '-DUSE_PROJ4=%s' % ('YES' if '+proj' in spec else 'NO'),
+ '-DUSE_BLITZ=%s' % ('YES' if '+blitz' in spec else 'NO'),
+ '-DUSE_NETCDF=%s' % ('YES' if '+netcdf' in spec else 'NO'),
+ '-DUSE_BOOST=%s' % ('YES' if '+boost' in spec else 'NO'),
+ '-DUSE_UDUNITS2=%s' % ('YES' if '+udunits2' in spec else 'NO'),
+ '-DUSE_GTEST=%s' % ('YES' if '+googletest' in spec else 'NO')]
diff --git a/var/spack/repos/builtin/packages/iceauth/package.py b/var/spack/repos/builtin/packages/iceauth/package.py
new file mode 100644
index 0000000000..59c6e0e7b7
--- /dev/null
+++ b/var/spack/repos/builtin/packages/iceauth/package.py
@@ -0,0 +1,48 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class Iceauth(Package):
+ """The iceauth program is used to edit and display the authorization
+ information used in connecting with ICE. It operates very much
+ like the xauth program for X11 connection authentication records."""
+
+ homepage = "http://cgit.freedesktop.org/xorg/app/iceauth"
+ url = "https://www.x.org/archive/individual/app/iceauth-1.0.7.tar.gz"
+
+ version('1.0.7', '183e834ec8bd096ac084ad4acbc29f51')
+
+ depends_on('libice')
+
+ depends_on('xproto@7.0.22:', type='build')
+ depends_on('pkg-config@0.9.0:', type='build')
+ depends_on('util-macros', type='build')
+
+ def install(self, spec, prefix):
+ configure('--prefix={0}'.format(prefix))
+
+ make()
+ make('install')
diff --git a/var/spack/repos/builtin/packages/icet/package.py b/var/spack/repos/builtin/packages/icet/package.py
new file mode 100644
index 0000000000..f8260f1951
--- /dev/null
+++ b/var/spack/repos/builtin/packages/icet/package.py
@@ -0,0 +1,46 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class Icet(CMakePackage):
+ """The Image Composition Engine for Tiles (IceT) is a high-performance
+ sort-last parallel rendering library."""
+
+ homepage = "http://icet.sandia.gov"
+ url = "https://example.com/icet-1.2.3.tar.gz"
+
+ version('develop', branch='master',
+ git='https://gitlab.kitware.com/icet/icet.git')
+ version('2.1.1', '4f971c51105a64937460d482adca2a6c')
+
+ depends_on('mpi')
+
+ def url_for_version(self, version):
+ return ("https://gitlab.kitware.com/icet/icet/repository/"
+ "archive.tar.bz2?ref=IceT-{0}".format(version.dotted))
+
+ def cmake_args(self):
+ return ['-DICET_USE_OPENGL:BOOL=OFF']
diff --git a/var/spack/repos/builtin/packages/ico/package.py b/var/spack/repos/builtin/packages/ico/package.py
new file mode 100644
index 0000000000..5e523575ca
--- /dev/null
+++ b/var/spack/repos/builtin/packages/ico/package.py
@@ -0,0 +1,49 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class Ico(Package):
+ """ico is a simple animation program that may be used for testing various
+ X11 operations and extensions. It displays a wire-frame rotating
+ polyhedron, with hidden lines removed, or a solid-fill polyhedron with
+ hidden faces removed."""
+
+ homepage = "http://cgit.freedesktop.org/xorg/app/ico"
+ url = "https://www.x.org/archive/individual/app/ico-1.0.4.tar.gz"
+
+ version('1.0.4', '8833b2da01a7f919b0db8e5a49184c0f')
+
+ depends_on('libx11@0.99.1:')
+
+ depends_on('xproto@7.0.22:', type='build')
+ depends_on('pkg-config@0.9.0:', type='build')
+ depends_on('util-macros', type='build')
+
+ def install(self, spec, prefix):
+ configure('--prefix={0}'.format(prefix))
+
+ make()
+ make('install')
diff --git a/var/spack/repos/builtin/packages/icu4c/package.py b/var/spack/repos/builtin/packages/icu4c/package.py
index 6cfec99c3f..e2fcb7bd5f 100644
--- a/var/spack/repos/builtin/packages/icu4c/package.py
+++ b/var/spack/repos/builtin/packages/icu4c/package.py
@@ -24,18 +24,27 @@
##############################################################################
from spack import *
+
class Icu4c(Package):
- """ICU is a mature, widely used set of C/C++ and Java libraries
- providing Unicode and Globalization support for software applications."""
+ """ICU is a mature, widely used set of C/C++ and Java libraries providing
+ Unicode and Globalization support for software applications. ICU4C is the
+ C/C++ interface."""
homepage = "http://site.icu-project.org/"
- url = "http://downloads.sourceforge.net/project/icu/ICU4C/54.1/icu4c-54_1-src.tgz"
+ url = "http://download.icu-project.org/files/icu4c/57.1/icu4c-57_1-src.tgz"
+
+ version('57.1', '976734806026a4ef8bdd17937c8898b9')
- version('54_1', 'e844caed8f2ca24c088505b0d6271bc0')
+ def url_for_version(self, version):
+ base_url = "http://download.icu-project.org/files/icu4c"
+ return "{0}/{1}/icu4c-{2}-src.tgz".format(
+ base_url, version, version.underscored)
def install(self, spec, prefix):
- cd("source")
- configure("--prefix=%s" % prefix)
+ with working_dir('source'):
+ configure('--prefix={0}'.format(prefix),
+ '--enable-rpath')
- make()
- make("install")
+ make()
+ make('check')
+ make('install')
diff --git a/var/spack/repos/builtin/packages/ilmbase/package.py b/var/spack/repos/builtin/packages/ilmbase/package.py
new file mode 100644
index 0000000000..873c830623
--- /dev/null
+++ b/var/spack/repos/builtin/packages/ilmbase/package.py
@@ -0,0 +1,42 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class Ilmbase(Package):
+ """OpenEXR ILM Base libraries (high dynamic-range image file format)"""
+
+ homepage = "http://www.openexr.com/"
+ url = "http://download.savannah.nongnu.org/releases/openexr/ilmbase-2.2.0.tar.gz"
+
+ version('2.2.0', 'b540db502c5fa42078249f43d18a4652')
+ version('2.1.0', 'af1115f4d759c574ce84efcde9845d29')
+ version('2.0.1', '74c0d0d2873960bd0dc1993f8e03f0ae')
+ version('1.0.2', '26c133ee8ca48e1196fbfb3ffe292ab4')
+ version('0.9.0', '4df45f8116cb7a013b286caf6da30a2e')
+
+ def install(self, spec, prefix):
+ configure('--prefix={0}'.format(prefix))
+ make('install')
diff --git a/var/spack/repos/builtin/packages/image-magick/package.py b/var/spack/repos/builtin/packages/image-magick/package.py
new file mode 100644
index 0000000000..9efb0cd368
--- /dev/null
+++ b/var/spack/repos/builtin/packages/image-magick/package.py
@@ -0,0 +1,57 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class ImageMagick(Package):
+ """ImageMagick is a software suite to create, edit, compose,
+ or convert bitmap images."""
+
+ homepage = "http://www.imagemagick.org"
+ url = "https://github.com/ImageMagick/ImageMagick/archive/7.0.2-7.tar.gz"
+
+ version('7.0.2-7', 'c59cdc8df50e481b2bd1afe09ac24c08')
+ version('7.0.2-6', 'aa5689129c39a5146a3212bf5f26d478')
+
+ depends_on('jpeg')
+ depends_on('pango')
+ depends_on('libtool', type='build')
+ depends_on('libpng')
+ depends_on('freetype')
+ depends_on('fontconfig')
+ depends_on('libtiff')
+ depends_on('ghostscript')
+ depends_on('ghostscript-fonts')
+
+ def url_for_version(self, version):
+ return "https://github.com/ImageMagick/ImageMagick/archive/{0}.tar.gz".format(version)
+
+ def install(self, spec, prefix):
+ gs_font_dir = join_path(spec['ghostscript-fonts'].prefix.share, "font")
+ configure('--prefix={0}'.format(prefix),
+ '--with-gs-font-dir={0}'.format(gs_font_dir))
+ make()
+ make('check')
+ make('install')
diff --git a/var/spack/repos/builtin/packages/imake/package.py b/var/spack/repos/builtin/packages/imake/package.py
new file mode 100644
index 0000000000..32542b0391
--- /dev/null
+++ b/var/spack/repos/builtin/packages/imake/package.py
@@ -0,0 +1,43 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class Imake(Package):
+ """The imake build system."""
+
+ homepage = "http://www.snake.net/software/imake-stuff/"
+ url = "https://www.x.org/archive/individual/util/imake-1.0.7.tar.gz"
+
+ version('1.0.7', '186ca7b8ff0de8752f2a2d0426542363')
+
+ depends_on('xproto', type='build')
+ depends_on('pkg-config@0.9.0:', type='build')
+
+ def install(self, spec, prefix):
+ configure('--prefix={0}'.format(prefix))
+
+ make()
+ make('install')
diff --git a/var/spack/repos/builtin/packages/inputproto/package.py b/var/spack/repos/builtin/packages/inputproto/package.py
new file mode 100644
index 0000000000..915986ef68
--- /dev/null
+++ b/var/spack/repos/builtin/packages/inputproto/package.py
@@ -0,0 +1,45 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class Inputproto(Package):
+ """X Input Extension.
+
+ This extension defines a protocol to provide additional input devices
+ management such as graphic tablets."""
+
+ homepage = "http://cgit.freedesktop.org/xorg/proto/inputproto"
+ url = "https://www.x.org/archive/individual/proto/inputproto-2.3.2.tar.gz"
+
+ version('2.3.2', '6450bad6f8d5ebe354b01b734d1fd7ca')
+
+ depends_on('pkg-config@0.9.0:', type='build')
+ depends_on('util-macros', type='build')
+
+ def install(self, spec, prefix):
+ configure('--prefix={0}'.format(prefix))
+
+ make('install')
diff --git a/var/spack/repos/builtin/packages/intel-gpu-tools/package.py b/var/spack/repos/builtin/packages/intel-gpu-tools/package.py
new file mode 100644
index 0000000000..132e29839b
--- /dev/null
+++ b/var/spack/repos/builtin/packages/intel-gpu-tools/package.py
@@ -0,0 +1,67 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class IntelGpuTools(Package):
+ """Intel GPU Tools is a collection of tools for development and testing of
+ the Intel DRM driver. There are many macro-level test suites that get used
+ against the driver, including xtest, rendercheck, piglit, and oglconform,
+ but failures from those can be difficult to track down to kernel changes,
+ and many require complicated build procedures or specific testing
+ environments to get useful results. Therefore, Intel GPU Tools includes
+ low-level tools and tests specifically for development and testing of the
+ Intel DRM Driver."""
+
+ homepage = "https://cgit.freedesktop.org/xorg/app/intel-gpu-tools/"
+ url = "https://www.x.org/archive/individual/app/intel-gpu-tools-1.16.tar.gz"
+
+ version('1.16', '3996f10fc86a28ec59e1cf7b227dad78')
+
+ depends_on('libdrm@2.4.64:')
+ depends_on('libpciaccess@0.10:')
+ depends_on('cairo@1.12.0:')
+ depends_on('glib')
+
+ depends_on('flex', type='build')
+ depends_on('bison', type='build')
+ depends_on('python@3:', type='build')
+ depends_on('pkg-config@0.9.0:', type='build')
+ depends_on('util-macros', type='build')
+
+ # xrandr ?
+
+ # gtk-doc-tools
+ # libunwind-dev
+ # python-docutils
+ # x11proto-dri2-dev
+ # xutils-dev
+
+ def install(self, spec, prefix):
+ configure('--prefix={0}'.format(prefix))
+
+ make()
+ make('check')
+ make('install')
diff --git a/var/spack/repos/builtin/packages/intel-mkl/package.py b/var/spack/repos/builtin/packages/intel-mkl/package.py
new file mode 100644
index 0000000000..f369e10d38
--- /dev/null
+++ b/var/spack/repos/builtin/packages/intel-mkl/package.py
@@ -0,0 +1,99 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+import os
+
+from spack.pkg.builtin.intel import IntelInstaller
+
+
+class IntelMkl(IntelInstaller):
+ """Intel Math Kernel Library.
+
+ Note: You will have to add the download file to a
+ mirror so that Spack can find it. For instructions on how to set up a
+ mirror, see http://spack.readthedocs.io/en/latest/mirrors.html.
+
+ To set the threading layer at run time set MKL_THREADING_LAYER
+ variable to one of the following values: INTEL (default), SEQUENTIAL, PGI.
+ To set interface layer at run time, use set the MKL_INTERFACE_LAYER
+ variable to LP64 (default) or ILP64.
+ """
+
+ homepage = "https://software.intel.com/en-us/intel-mkl"
+
+ version('11.3.2.181', '536dbd82896d6facc16de8f961d17d65',
+ url="file://%s/l_mkl_11.3.2.181.tgz" % os.getcwd())
+ version('11.3.3.210', 'f72546df27f5ebb0941b5d21fd804e34',
+ url="file://%s/l_mkl_11.3.3.210.tgz" % os.getcwd())
+
+ variant('shared', default=True, description='Builds shared library')
+ variant('ilp64', default=False, description='64 bit integers')
+ variant('openmp', default=False, description='OpenMP multithreading layer')
+
+ # virtual dependency
+ provides('blas')
+ provides('lapack')
+ provides('mkl')
+ # TODO: MKL also provides implementation of Scalapack.
+
+ @property
+ def blas_libs(self):
+ shared = True if '+shared' in self.spec else False
+ suffix = dso_suffix if '+shared' in self.spec else 'a'
+ mkl_integer = ['libmkl_intel_ilp64'] if '+ilp64' in self.spec else ['libmkl_intel_lp64'] # NOQA: ignore=E501
+ mkl_threading = ['libmkl_sequential']
+ if '+openmp' in self.spec:
+ mkl_threading = ['libmkl_intel_thread', 'libiomp5'] if '%intel' in self.spec else ['libmkl_gnu_thread'] # NOQA: ignore=E501
+ # TODO: TBB threading: ['libmkl_tbb_thread', 'libtbb', 'libstdc++']
+ mkl_libs = find_libraries(
+ mkl_integer + ['libmkl_core'] + mkl_threading,
+ root=join_path(self.prefix.lib, 'intel64'),
+ shared=shared
+ )
+ system_libs = [
+ 'libpthread.{0}'.format(suffix),
+ 'libm.{0}'.format(suffix),
+ 'libdl.{0}'.format(suffix)
+ ]
+ return mkl_libs + system_libs
+
+ @property
+ def lapack_libs(self):
+ return self.blas_libs
+
+ def install(self, spec, prefix):
+ self.intel_prefix = os.path.join(prefix, "pkg")
+ IntelInstaller.install(self, spec, prefix)
+
+ mkl_dir = os.path.join(self.intel_prefix, "mkl")
+ for f in os.listdir(mkl_dir):
+ os.symlink(os.path.join(mkl_dir, f), os.path.join(self.prefix, f))
+
+ def setup_dependent_environment(self, spack_env, run_env, dependent_spec):
+ # set up MKLROOT for everyone using MKL package
+ spack_env.set('MKLROOT', self.prefix)
+
+ def setup_environment(self, spack_env, env):
+ env.set('MKLROOT', self.prefix)
diff --git a/var/spack/repos/builtin/packages/intel-parallel-studio/package.py b/var/spack/repos/builtin/packages/intel-parallel-studio/package.py
new file mode 100644
index 0000000000..8564eeb941
--- /dev/null
+++ b/var/spack/repos/builtin/packages/intel-parallel-studio/package.py
@@ -0,0 +1,362 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+import os
+import re
+
+from spack.pkg.builtin.intel import IntelInstaller, filter_pick, \
+ get_all_components
+
+
+class IntelParallelStudio(IntelInstaller):
+ """Intel Parallel Studio.
+
+ Note: You will have to add the download file to a
+ mirror so that Spack can find it. For instructions on how to set up a
+ mirror, see http://spack.readthedocs.io/en/latest/mirrors.html"""
+
+ homepage = "https://software.intel.com/en-us/intel-parallel-studio-xe"
+
+ version('professional.2017.1', '7f75a4a7e2c563be778c377f9d35a542')
+ version('cluster.2017.1', '7f75a4a7e2c563be778c377f9d35a542')
+ version('composer.2017.1', '1f31976931ed8ec424ac7c3ef56f5e85')
+ version('professional.2017.0', '34c98e3329d6ac57408b738ae1daaa01')
+ version('cluster.2017.0', '34c98e3329d6ac57408b738ae1daaa01')
+ version('composer.2016.3', '3208eeabee951fc27579177b593cefe9')
+ version('professional.2016.3', 'eda19bb0d0d19709197ede58f13443f3')
+ version('cluster.2016.3', 'eda19bb0d0d19709197ede58f13443f3')
+ version('composer.2016.2', '1133fb831312eb519f7da897fec223fa')
+ version('professional.2016.2', '70be832f2d34c9bf596a5e99d5f2d832')
+ version('cluster.2016.2', '70be832f2d34c9bf596a5e99d5f2d832')
+
+ variant('rpath', default=True, description="Add rpath to .cfg files")
+ variant('newdtags', default=False,
+ description="Allow use of --enable-new-dtags in MPI wrappers")
+ variant('all', default=False,
+ description="Install all files with the requested edition")
+ variant('mpi', default=True,
+ description="Install the Intel MPI library and ITAC tool")
+ variant('mkl', default=True, description="Install the Intel MKL library")
+ variant('daal',
+ default=True, description="Install the Intel DAAL libraries")
+ variant('ipp', default=True, description="Install the Intel IPP libraries")
+ variant('tools', default=True, description="Install the Intel Advisor, "
+ "VTune Amplifier, and Inspector tools")
+
+ variant('shared', default=True, description='Builds shared library')
+ variant('ilp64', default=False, description='64 bit integers')
+ variant('openmp', default=False, description='OpenMP multithreading layer')
+
+ provides('mpi', when='@cluster:+mpi')
+ provides('mkl', when='+mkl')
+ provides('daal', when='+daal')
+ provides('ipp', when='+ipp')
+
+ # virtual dependency
+ provides('blas', when='+mkl')
+ provides('lapack', when='+mkl')
+ # TODO: MKL also provides implementation of Scalapack.
+
+ @property
+ def blas_libs(self):
+ shared = True if '+shared' in self.spec else False
+ suffix = dso_suffix if '+shared' in self.spec else 'a'
+ mkl_integer = ['libmkl_intel_ilp64'] if '+ilp64' in self.spec else ['libmkl_intel_lp64'] # NOQA: ignore=E501
+ mkl_threading = ['libmkl_sequential']
+ if '+openmp' in self.spec:
+ mkl_threading = ['libmkl_intel_thread', 'libiomp5'] if '%intel' in self.spec else ['libmkl_gnu_thread'] # NOQA: ignore=E501
+ # TODO: TBB threading: ['libmkl_tbb_thread', 'libtbb', 'libstdc++']
+ mkl_libs = find_libraries(
+ mkl_integer + ['libmkl_core'] + mkl_threading,
+ root=join_path(self.prefix, 'mkl', 'lib', 'intel64'),
+ shared=shared
+ )
+ system_libs = [
+ 'libpthread.{0}'.format(suffix),
+ 'libm.{0}'.format(suffix),
+ 'libdl.{0}'.format(suffix)
+ ]
+ return mkl_libs + system_libs
+
+ @property
+ def lapack_libs(self):
+ return self.blas_libs
+
+ def url_for_version(self, version):
+ """Assume the tarball is in the current directory."""
+
+ version_tuple = str(version).split('.')
+ year = int(version_tuple[1])
+
+ url = "file://{0}/parallel_studio_xe_{1}".format(
+ os.getcwd(), version_tuple[1])
+
+ update_string = ""
+ if version_tuple[2] != '0':
+ update_string = "_update{0}".format(version_tuple[2])
+
+ if version_tuple[0] == 'composer':
+ if year == 2016:
+ url += "_composer_edition{0}".format(update_string)
+ else:
+ url += "{0}_composer_edition".format(update_string)
+ else:
+ url += update_string
+
+ return url + ".tgz"
+
+ def check_variants(self, spec):
+ error_message = '\t{variant} can not be turned off if "+all" is set'
+
+ if self.spec.satisfies('+all'):
+ errors = [error_message.format(variant=x)
+ for x in ('mpi', 'mkl', 'daal', 'ipp', 'tools')
+ if ('~' + x) in self.spec]
+ if errors:
+ errors = ['incompatible variants given'] + errors
+ raise InstallError('\n'.join(errors))
+
+ def install(self, spec, prefix):
+ self.check_variants(spec)
+
+ base_components = "ALL" # when in doubt, install everything
+ mpi_components = ""
+ mkl_components = ""
+ daal_components = ""
+ ipp_components = ""
+
+ if not spec.satisfies('+all'):
+ all_components = get_all_components()
+ regex = '(comp|openmp|intel-tbb|icc|ifort|psxe)'
+ base_components = \
+ filter_pick(all_components, re.compile(regex).search)
+ regex = '(icsxe|imb|mpi|itac|intel-ta|intel-tc|clck)'
+ mpi_components = \
+ filter_pick(all_components, re.compile(regex).search)
+ mkl_components = \
+ filter_pick(all_components, re.compile('(mkl)').search)
+ daal_components = \
+ filter_pick(all_components, re.compile('(daal)').search)
+ ipp_components = \
+ filter_pick(all_components, re.compile('(ipp)').search)
+ regex = '(gdb|vtune|inspector|advisor)'
+ tool_components = \
+ filter_pick(all_components, re.compile(regex).search)
+ components = base_components
+
+ if not spec.satisfies('+all'):
+ if spec.satisfies('+mpi'):
+ components += mpi_components
+ if spec.satisfies('+mkl'):
+ components += mkl_components
+ if spec.satisfies('+daal'):
+ components += daal_components
+ if spec.satisfies('+ipp'):
+ components += ipp_components
+ if spec.satisfies('+tools') and (spec.satisfies('@cluster') or
+ spec.satisfies('@professional')):
+ components += tool_components
+
+ if spec.satisfies('+all'):
+ self.intel_components = 'ALL'
+ else:
+ self.intel_components = ';'.join(components)
+ IntelInstaller.install(self, spec, prefix)
+
+ absbindir = os.path.dirname(
+ os.path.realpath(os.path.join(self.prefix.bin, "icc")))
+ abslibdir = os.path.dirname(
+ os.path.realpath(os.path.join(
+ self.prefix.lib, "intel64", "libimf.a")))
+
+ os.symlink(self.global_license_file, os.path.join(absbindir,
+ "license.lic"))
+ if spec.satisfies('+tools') and (spec.satisfies('@cluster') or
+ spec.satisfies('@professional')):
+ inspector_dir = "inspector_xe/licenses"
+ advisor_dir = "advisor_xe/licenses"
+ vtune_amplifier_dir = "vtune_amplifier_xe/licenses"
+
+ year = int(str(self.version).split('.')[1])
+ if year >= 2017:
+ inspector_dir = "inspector/licenses"
+ advisor_dir = "advisor/licenses"
+
+ os.mkdir(os.path.join(self.prefix, inspector_dir))
+ os.symlink(self.global_license_file, os.path.join(
+ self.prefix, inspector_dir, "license.lic"))
+ os.mkdir(os.path.join(self.prefix, advisor_dir))
+ os.symlink(self.global_license_file, os.path.join(
+ self.prefix, advisor_dir, "license.lic"))
+ os.mkdir(os.path.join(self.prefix, vtune_amplifier_dir))
+ os.symlink(self.global_license_file, os.path.join(
+ self.prefix, vtune_amplifier_dir, "license.lic"))
+
+ if (spec.satisfies('+all') or spec.satisfies('+mpi')) and \
+ spec.satisfies('@cluster'):
+ for ifile in os.listdir(os.path.join(self.prefix, "itac")):
+ if os.path.isdir(os.path.join(self.prefix, "itac", ifile)):
+ os.symlink(self.global_license_file,
+ os.path.join(self.prefix, "itac", ifile,
+ "license.lic"))
+ if os.path.isdir(os.path.join(self.prefix, "itac",
+ ifile, "intel64")):
+ os.symlink(self.global_license_file,
+ os.path.join(self.prefix, "itac",
+ ifile, "intel64",
+ "license.lic"))
+ if spec.satisfies('~newdtags'):
+ wrappers = ["mpif77", "mpif77", "mpif90", "mpif90",
+ "mpigcc", "mpigcc", "mpigxx", "mpigxx",
+ "mpiicc", "mpiicc", "mpiicpc", "mpiicpc",
+ "mpiifort", "mpiifort"]
+ wrapper_paths = []
+ for root, dirs, files in os.walk(spec.prefix):
+ for name in files:
+ if name in wrappers:
+ wrapper_paths.append(os.path.join(spec.prefix,
+ root, name))
+ for wrapper in wrapper_paths:
+ filter_file(r'-Xlinker --enable-new-dtags', r' ',
+ wrapper)
+
+ if spec.satisfies('+rpath'):
+ for compiler_command in ["icc", "icpc", "ifort"]:
+ cfgfilename = os.path.join(absbindir, "%s.cfg" %
+ compiler_command)
+ with open(cfgfilename, "w") as f:
+ f.write('-Xlinker -rpath -Xlinker %s\n' % abslibdir)
+
+ os.symlink(os.path.join(self.prefix.man, "common", "man1"),
+ os.path.join(self.prefix.man, "man1"))
+
+ def setup_environment(self, spack_env, run_env):
+ # TODO: Determine variables needed for the professional edition.
+
+ major_ver = self.version[1]
+
+ # Remove paths that were guessed but are incorrect for this package.
+ run_env.remove_path('LIBRARY_PATH',
+ join_path(self.prefix, 'lib'))
+ run_env.remove_path('LD_LIBRARY_PATH',
+ join_path(self.prefix, 'lib'))
+ run_env.remove_path('CPATH',
+ join_path(self.prefix, 'include'))
+
+ # Add the default set of variables
+ run_env.prepend_path('LIBRARY_PATH',
+ join_path(self.prefix, 'lib', 'intel64'))
+ run_env.prepend_path('LD_LIBRARY_PATH',
+ join_path(self.prefix, 'lib', 'intel64'))
+ run_env.prepend_path('LIBRARY_PATH',
+ join_path(self.prefix, 'tbb', 'lib',
+ 'intel64', 'gcc4.4'))
+ run_env.prepend_path('LD_LIBRARY_PATH',
+ join_path(self.prefix, 'tbb', 'lib',
+ 'intel64', 'gcc4.4'))
+ run_env.prepend_path('CPATH',
+ join_path(self.prefix, 'tbb', 'include'))
+ run_env.prepend_path('MIC_LIBRARY_PATH',
+ join_path(self.prefix, 'lib', 'mic'))
+ run_env.prepend_path('MIC_LD_LIBRARY_PATH',
+ join_path(self.prefix, 'lib', 'mic'))
+ run_env.prepend_path('MIC_LIBRARY_PATH',
+ join_path(self.prefix, 'tbb', 'lib', 'mic'))
+ run_env.prepend_path('MIC_LD_LIBRARY_PATH',
+ join_path(self.prefix, 'tbb', 'lib', 'mic'))
+
+ if self.spec.satisfies('+all'):
+ run_env.prepend_path('LD_LIBRARY_PATH',
+ join_path(self.prefix,
+ 'debugger_{0}'.format(major_ver),
+ 'libipt', 'intel64', 'lib'))
+ run_env.set('GDBSERVER_MIC',
+ join_path(self.prefix,
+ 'debugger_{0}'.format(major_ver), 'gdb',
+ 'targets', 'mic', 'bin', 'gdbserver'))
+ run_env.set('GDB_CROSS',
+ join_path(self.prefix,
+ 'debugger_{0}'.format(major_ver),
+ 'gdb', 'intel64_mic', 'bin', 'gdb-mic'))
+ run_env.set('MPM_LAUNCHER',
+ join_path(self.prefix,
+ 'debugger_{0}'.format(major_ver), 'mpm',
+ 'mic',
+ 'bin', 'start_mpm.sh'))
+ run_env.set('INTEL_PYTHONHOME',
+ join_path(self.prefix,
+ 'debugger_{0}'.format(major_ver), 'python',
+ 'intel64'))
+
+ if (self.spec.satisfies('+all') or self.spec.satisfies('+mpi')):
+ # Only I_MPI_ROOT is set here because setting the various PATH
+ # variables will potentially be in conflict with other MPI
+ # environment modules. The I_MPI_ROOT environment variable can be
+ # used as a base to set necessary PATH variables for using Intel
+ # MPI. It is also possible to set the variables in the modules.yaml
+ # file if Intel MPI is the dominant, or only, MPI on a system.
+ run_env.set('I_MPI_ROOT', join_path(self.prefix, 'impi'))
+
+ if self.spec.satisfies('+all') or self.spec.satisfies('+mkl'):
+ spack_env.set('MKLROOT', join_path(self.prefix, 'mkl'))
+
+ run_env.prepend_path('LD_LIBRARY_PATH',
+ join_path(self.prefix, 'mkl', 'lib',
+ 'intel64'))
+ run_env.prepend_path('LIBRARY_PATH',
+ join_path(self.prefix, 'mkl', 'lib',
+ 'intel64'))
+ run_env.prepend_path('CPATH',
+ join_path(self.prefix, 'mkl', 'include'))
+ run_env.prepend_path('MIC_LD_LIBRARY_PATH',
+ join_path(self.prefix, 'mkl', 'lib', 'mic'))
+ run_env.set('MKLROOT', join_path(self.prefix, 'mkl'))
+
+ if self.spec.satisfies('+all') or self.spec.satisfies('+daal'):
+ run_env.prepend_path('LD_LIBRARY_PATH',
+ join_path(self.prefix, 'daal', 'lib',
+ 'intel64_lin'))
+ run_env.prepend_path('LIBRARY_PATH',
+ join_path(self.prefix, 'daal', 'lib',
+ 'intel64_lin'))
+ run_env.prepend_path('CPATH',
+ join_path(self.prefix, 'daal', 'include'))
+ run_env.prepend_path('CLASSPATH',
+ join_path(self.prefix, 'daal', 'lib',
+ 'daal.jar'))
+ run_env.set('DAALROOT', join_path(self.prefix, 'daal'))
+
+ if self.spec.satisfies('+all') or self.spec.satisfies('+ipp'):
+ run_env.prepend_path('LD_LIBRARY_PATH',
+ join_path(self.prefix, 'ipp', 'lib',
+ 'intel64'))
+ run_env.prepend_path('LIBRARY_PATH',
+ join_path(self.prefix, 'ipp', 'lib',
+ 'intel64'))
+ run_env.prepend_path('CPATH',
+ join_path(self.prefix, 'ipp', 'include'))
+ run_env.prepend_path('MIC_LD_LIBRARY_PATH',
+ join_path(self.prefix, 'ipp', 'lib', 'mic'))
+ run_env.set('IPPROOT', join_path(self.prefix, 'ipp'))
diff --git a/var/spack/repos/builtin/packages/intel/package.py b/var/spack/repos/builtin/packages/intel/package.py
new file mode 100644
index 0000000000..4164383f5b
--- /dev/null
+++ b/var/spack/repos/builtin/packages/intel/package.py
@@ -0,0 +1,176 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+import os
+import re
+
+
+def filter_pick(input_list, regex_filter):
+ """Returns the items in input_list that are found in the regex_filter"""
+ return [l for l in input_list for m in (regex_filter(l),) if m]
+
+
+def unfilter_pick(input_list, regex_filter):
+ """Returns the items in input_list that are not found in the
+ regex_filter"""
+ return [l for l in input_list for m in (regex_filter(l),) if not m]
+
+
+def get_all_components():
+ """Returns a list of all the components associated with the downloaded
+ Intel package"""
+ all_components = []
+ with open("pset/mediaconfig.xml", "r") as f:
+ lines = f.readlines()
+ for line in lines:
+ if line.find('<Abbr>') != -1:
+ component = line[line.find('<Abbr>') + 6:line.find('</Abbr>')]
+ all_components.append(component)
+ return all_components
+
+
+class IntelInstaller(Package):
+ """Base package containing common methods for installing Intel software"""
+
+ homepage = "https://software.intel.com/en-us"
+ intel_components = "ALL"
+ license_required = True
+ license_comment = '#'
+ license_files = ['Licenses/license.lic']
+ license_vars = ['INTEL_LICENSE_FILE']
+ license_url = \
+ 'https://software.intel.com/en-us/articles/intel-license-manager-faq'
+
+ @property
+ def global_license_file(self):
+ """Returns the path where a global license file should be stored."""
+ if not self.license_files:
+ return
+ return join_path(self.global_license_dir, "intel",
+ os.path.basename(self.license_files[0]))
+
+ def install(self, spec, prefix):
+
+ if not hasattr(self, "intel_prefix"):
+ self.intel_prefix = self.prefix
+
+ silent_config_filename = 'silent.cfg'
+ with open(silent_config_filename, 'w') as f:
+ f.write("""
+ACCEPT_EULA=accept
+PSET_MODE=install
+CONTINUE_WITH_INSTALLDIR_OVERWRITE=yes
+PSET_INSTALL_DIR=%s
+NONRPM_DB_DIR=%s
+ACTIVATION_LICENSE_FILE=%s
+ACTIVATION_TYPE=license_file
+PHONEHOME_SEND_USAGE_DATA=no
+CONTINUE_WITH_OPTIONAL_ERROR=yes
+COMPONENTS=%s
+""" % (self.intel_prefix, self.intel_prefix, self.global_license_file,
+ self.intel_components))
+
+ install_script = Executable("./install.sh")
+ install_script('--silent', silent_config_filename)
+
+
+class Intel(IntelInstaller):
+ """Intel Compilers.
+
+ Note: You will have to add the download file to a
+ mirror so that Spack can find it. For instructions on how to set up a
+ mirror, see http://spack.readthedocs.io/en/latest/mirrors.html"""
+
+ homepage = "https://software.intel.com/en-us/intel-parallel-studio-xe"
+
+ # TODO: can also try the online installer (will download files on demand)
+ version('16.0.2', '1133fb831312eb519f7da897fec223fa',
+ url="file://%s/parallel_studio_xe_2016_composer_edition_update2.tgz"
+ % os.getcwd())
+ version('16.0.3', '3208eeabee951fc27579177b593cefe9',
+ url="file://%s/parallel_studio_xe_2016_composer_edition_update3.tgz"
+ % os.getcwd())
+
+ variant('rpath', default=True, description="Add rpath to .cfg files")
+
+ def install(self, spec, prefix):
+ components = []
+ all_components = get_all_components()
+ regex = '(comp|openmp|intel-tbb|icc|ifort|psxe|icsxe-pset)'
+ components = filter_pick(all_components, re.compile(regex).search)
+
+ self.intel_components = ';'.join(components)
+ IntelInstaller.install(self, spec, prefix)
+
+ absbindir = os.path.split(os.path.realpath(os.path.join(
+ self.prefix.bin, "icc")))[0]
+ abslibdir = os.path.split(os.path.realpath(os.path.join(
+ self.prefix.lib, "intel64", "libimf.a")))[0]
+
+ # symlink or copy?
+ os.symlink(self.global_license_file,
+ os.path.join(absbindir, "license.lic"))
+
+ if spec.satisfies('+rpath'):
+ for compiler_command in ["icc", "icpc", "ifort"]:
+ cfgfilename = os.path.join(absbindir, "%s.cfg" %
+ compiler_command)
+ with open(cfgfilename, "w") as f:
+ f.write('-Xlinker -rpath -Xlinker %s\n' % abslibdir)
+
+ os.symlink(os.path.join(self.prefix.man, "common", "man1"),
+ os.path.join(self.prefix.man, "man1"))
+
+ def setup_environment(self, spack_env, run_env):
+
+ # Remove paths that were guessed but are incorrect for this package.
+ run_env.remove_path('LIBRARY_PATH',
+ join_path(self.prefix, 'lib'))
+ run_env.remove_path('LD_LIBRARY_PATH',
+ join_path(self.prefix, 'lib'))
+ run_env.remove_path('CPATH',
+ join_path(self.prefix, 'include'))
+
+ # Add the default set of variables
+ run_env.prepend_path('LIBRARY_PATH',
+ join_path(self.prefix, 'lib', 'intel64'))
+ run_env.prepend_path('LD_LIBRARY_PATH',
+ join_path(self.prefix, 'lib', 'intel64'))
+ run_env.prepend_path('LIBRARY_PATH',
+ join_path(self.prefix, 'tbb', 'lib',
+ 'intel64', 'gcc4.4'))
+ run_env.prepend_path('LD_LIBRARY_PATH',
+ join_path(self.prefix, 'tbb', 'lib',
+ 'intel64', 'gcc4.4'))
+ run_env.prepend_path('CPATH',
+ join_path(self.prefix, 'tbb', 'include'))
+ run_env.prepend_path('MIC_LIBRARY_PATH',
+ join_path(self.prefix, 'lib', 'mic'))
+ run_env.prepend_path('MIC_LD_LIBRARY_PATH',
+ join_path(self.prefix, 'lib', 'mic'))
+ run_env.prepend_path('MIC_LIBRARY_PATH',
+ join_path(self.prefix, 'tbb', 'lib', 'mic'))
+ run_env.prepend_path('MIC_LD_LIBRARY_PATH',
+ join_path(self.prefix, 'tbb', 'lib', 'mic'))
diff --git a/var/spack/repos/builtin/packages/intltool/package.py b/var/spack/repos/builtin/packages/intltool/package.py
index 48830c474a..e934734691 100644
--- a/var/spack/repos/builtin/packages/intltool/package.py
+++ b/var/spack/repos/builtin/packages/intltool/package.py
@@ -24,8 +24,12 @@
##############################################################################
from spack import *
+
class Intltool(Package):
- """intltool is a set of tools to centralize translation of many different file formats using GNU gettext-compatible PO files."""
+ """intltool is a set of tools to centralize translation of many different
+ file formats using GNU gettext-compatible PO files.
+
+ """
homepage = 'https://freedesktop.org/wiki/Software/intltool/'
version('0.51.0', '12e517cac2b57a0121cda351570f1e63')
@@ -37,7 +41,7 @@ class Intltool(Package):
def install(self, spec, prefix):
# configure, build, install:
- options = ['--prefix=%s' % prefix ]
+ options = ['--prefix=%s' % prefix]
configure(*options)
make()
make('install')
diff --git a/var/spack/repos/builtin/packages/ior/package.py b/var/spack/repos/builtin/packages/ior/package.py
index 6aa72e5cc9..b8825c0fe4 100644
--- a/var/spack/repos/builtin/packages/ior/package.py
+++ b/var/spack/repos/builtin/packages/ior/package.py
@@ -25,6 +25,7 @@
from spack import *
import os
+
class Ior(Package):
"""The IOR software is used for benchmarking parallel file systems
using POSIX, MPI-IO, or HDF5 interfaces."""
@@ -41,7 +42,6 @@ class Ior(Package):
depends_on('hdf5+mpi', when='+hdf5')
depends_on('netcdf+mpi', when='+ncmpi')
-
def install(self, spec, prefix):
os.system('./bootstrap')
diff --git a/var/spack/repos/builtin/packages/ipopt/package.py b/var/spack/repos/builtin/packages/ipopt/package.py
index 47c5ca0c09..bd1e5f36ef 100644
--- a/var/spack/repos/builtin/packages/ipopt/package.py
+++ b/var/spack/repos/builtin/packages/ipopt/package.py
@@ -24,6 +24,7 @@
##############################################################################
from spack import *
+
class Ipopt(Package):
"""Ipopt (Interior Point OPTimizer, pronounced eye-pea-Opt) is a
software package for large-scale nonlinear optimization."""
@@ -38,9 +39,9 @@ class Ipopt(Package):
depends_on("blas")
depends_on("lapack")
- depends_on("pkg-config")
- depends_on("mumps+double~mpi")
-
+ depends_on("pkg-config", type='build')
+ depends_on("mumps+double~mpi")
+
def install(self, spec, prefix):
# Dependency directories
blas_dir = spec['blas'].prefix
@@ -52,10 +53,9 @@ class Ipopt(Package):
mumps_flags = "-ldmumps -lmumps_common -lpord -lmpiseq"
mumps_libcmd = "-L%s " % mumps_dir.lib + mumps_flags
- # By convention, spack links blas & lapack libs to libblas & liblapack
- blas_lib = "-L%s" % blas_dir.lib + " -lblas"
- lapack_lib = "-L%s" % lapack_dir.lib + " -llapack"
-
+ blas_lib = spec['blas'].blas_libs.ld_flags
+ lapack_lib = spec['lapack'].lapack_libs.ld_flags
+
configure_args = [
"--prefix=%s" % prefix,
"--with-mumps-incdir=%s" % mumps_dir.include,
@@ -65,8 +65,8 @@ class Ipopt(Package):
"--with-blas-lib=%s" % blas_lib,
"--with-lapack-incdir=%s" % lapack_dir.include,
"--with-lapack-lib=%s" % lapack_lib
- ]
-
+ ]
+
configure(*configure_args)
# IPOPT does not build correctly in parallel on OS X
diff --git a/var/spack/repos/builtin/packages/ipp/package.py b/var/spack/repos/builtin/packages/ipp/package.py
new file mode 100644
index 0000000000..c48ace79c1
--- /dev/null
+++ b/var/spack/repos/builtin/packages/ipp/package.py
@@ -0,0 +1,50 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+import os
+
+from spack.pkg.builtin.intel import IntelInstaller
+
+
+class Ipp(IntelInstaller):
+ """Intel Integrated Performance Primitives.
+
+ Note: You will have to add the download file to a
+ mirror so that Spack can find it. For instructions on how to set up a
+ mirror, see http://spack.readthedocs.io/en/latest/mirrors.html"""
+
+ homepage = "https://software.intel.com/en-us/intel-ipp"
+
+ version('9.0.3.210', '0e1520dd3de7f811a6ef6ebc7aa429a3',
+ url="file://%s/l_ipp_9.0.3.210.tgz" % os.getcwd())
+
+ def install(self, spec, prefix):
+
+ self.intel_prefix = os.path.join(prefix, "pkg")
+ IntelInstaller.install(self, spec, prefix)
+
+ ipp_dir = os.path.join(self.intel_prefix, "ipp")
+ for f in os.listdir(ipp_dir):
+ os.symlink(os.path.join(ipp_dir, f), os.path.join(self.prefix, f))
diff --git a/var/spack/repos/builtin/packages/isl/package.py b/var/spack/repos/builtin/packages/isl/package.py
index f456f62225..f6188e6b3e 100644
--- a/var/spack/repos/builtin/packages/isl/package.py
+++ b/var/spack/repos/builtin/packages/isl/package.py
@@ -24,12 +24,14 @@
##############################################################################
from spack import *
+
class Isl(Package):
"""isl is a thread-safe C library for manipulating sets and
relations of integer points bounded by affine constraints."""
homepage = "http://isl.gforge.inria.fr"
url = "http://isl.gforge.inria.fr/isl-0.14.tar.bz2"
+ version('0.18', '11436d6b205e516635b666090b94ab32')
version('0.14', 'acd347243fca5609e3df37dba47fd0bb')
depends_on("gmp")
diff --git a/var/spack/repos/builtin/packages/itstool/package.py b/var/spack/repos/builtin/packages/itstool/package.py
new file mode 100644
index 0000000000..b8b2b4459f
--- /dev/null
+++ b/var/spack/repos/builtin/packages/itstool/package.py
@@ -0,0 +1,44 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class Itstool(Package):
+ """ITS Tool allows you to translate your XML documents with PO files, using
+ rules from the W3C Internationalization Tag Set (ITS) to determine what
+ to translate and how to separate it into PO file messages."""
+
+ homepage = "http://itstool.org/"
+ url = "http://files.itstool.org/itstool/itstool-2.0.2.tar.bz2"
+
+ version('2.0.2', 'd472d877a7bc49899a73d442085b2f93')
+ version('2.0.1', '40935cfb08228488bd45575e5f001a34')
+ version('2.0.0', 'd8c702c3e8961db83d04182c2aa4730b')
+ version('1.2.0', 'c0925f6869e33af8e7fe56848c129152')
+
+ def install(self, spec, prefix):
+ configure("--prefix=%s" % prefix)
+ make()
+ make('install')
diff --git a/var/spack/repos/builtin/packages/jansson/package.py b/var/spack/repos/builtin/packages/jansson/package.py
new file mode 100644
index 0000000000..e6100607aa
--- /dev/null
+++ b/var/spack/repos/builtin/packages/jansson/package.py
@@ -0,0 +1,35 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class Jansson(CMakePackage):
+ """Jansson is a C library for encoding, decoding and manipulating JSON
+ data."""
+
+ homepage = "http://www.digip.org/jansson/"
+ url = "https://github.com/akheron/jansson/archive/v2.9.tar.gz"
+
+ version('2.9', 'd2db25c437b359fc5a065ed938962237')
diff --git a/var/spack/repos/builtin/packages/jasper/fix_alpha_channel_assert_fail.patch b/var/spack/repos/builtin/packages/jasper/fix_alpha_channel_assert_fail.patch
new file mode 100644
index 0000000000..cbf79ff971
--- /dev/null
+++ b/var/spack/repos/builtin/packages/jasper/fix_alpha_channel_assert_fail.patch
@@ -0,0 +1,25 @@
+diff --git a/src/libjasper/jpc/jpc_dec.c b/src/libjasper/jpc/jpc_dec.c
+index fa72a0e..1f4845f 100644
+--- a/src/libjasper/jpc/jpc_dec.c
++++ b/src/libjasper/jpc/jpc_dec.c
+@@ -1069,12 +1069,18 @@ static int jpc_dec_tiledecode(jpc_dec_t *dec, jpc_dec_tile_t *tile)
+ /* Apply an inverse intercomponent transform if necessary. */
+ switch (tile->cp->mctid) {
+ case JPC_MCT_RCT:
+- assert(dec->numcomps == 3);
++ if (dec->numcomps != 3 && dec->numcomps != 4) {
++ jas_eprintf("bad number of components (%d)\n", dec->numcomps);
++ return -1;
++ }
+ jpc_irct(tile->tcomps[0].data, tile->tcomps[1].data,
+ tile->tcomps[2].data);
+ break;
+ case JPC_MCT_ICT:
+- assert(dec->numcomps == 3);
++ if (dec->numcomps != 3 && dec->numcomps != 4) {
++ jas_eprintf("bad number of components (%d)\n", dec->numcomps);
++ return -1;
++ }
+ jpc_iict(tile->tcomps[0].data, tile->tcomps[1].data,
+ tile->tcomps[2].data);
+ break;
diff --git a/var/spack/repos/builtin/packages/jasper/package.py b/var/spack/repos/builtin/packages/jasper/package.py
new file mode 100644
index 0000000000..f450c7d155
--- /dev/null
+++ b/var/spack/repos/builtin/packages/jasper/package.py
@@ -0,0 +1,63 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class Jasper(Package):
+ """Library for manipulating JPEG-2000 images"""
+
+ homepage = "https://www.ece.uvic.ca/~frodo/jasper/"
+ url = "https://www.ece.uvic.ca/~frodo/jasper/software/jasper-1.900.1.zip"
+
+ version('1.900.1', 'a342b2b4495b3e1394e161eb5d85d754')
+
+ variant('shared', default=True,
+ description='Builds shared versions of the libraries')
+ variant('debug', default=False,
+ description='Builds debug versions of the libraries')
+
+ depends_on('libjpeg-turbo')
+
+ # Fixes a bug (still in upstream as of v.1.900.1) where an assertion fails
+ # when certain JPEG-2000 files with an alpha channel are processed
+ # see: https://bugs.debian.org/cgi-bin/bugreport.cgi?bug=469786
+ patch('fix_alpha_channel_assert_fail.patch')
+
+ def install(self, spec, prefix):
+ configure_options = [
+ '--prefix={0}'.format(prefix),
+ '--mandir={0}'.format(spec.prefix.man),
+ ]
+
+ if '+shared' in spec:
+ configure_options.append('--enable-shared')
+
+ if '+debug' not in spec:
+ configure_options.append('--disable-debug')
+
+ configure(*configure_options)
+
+ make()
+ make('install')
diff --git a/var/spack/repos/builtin/packages/jdk/package.py b/var/spack/repos/builtin/packages/jdk/package.py
index 2ec86f05e3..518a469435 100644
--- a/var/spack/repos/builtin/packages/jdk/package.py
+++ b/var/spack/repos/builtin/packages/jdk/package.py
@@ -22,49 +22,41 @@
# License along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
-#------------------------------------------------------------------------------
+#
# Author: Justin Too <too1@llnl.gov>
-#------------------------------------------------------------------------------
-import distutils
-from distutils import dir_util
-from subprocess import call
-
-import spack
+#
+import distutils.dir_util
from spack import *
-import llnl.util.tty as tty
+
class Jdk(Package):
"""The Java Development Kit (JDK) released by Oracle Corporation
in the form of a binary product aimed at Java developers."""
homepage = "http://www.oracle.com/technetwork/java/javase/downloads/index.html"
- version('8u66-linux-x64', '88f31f3d642c3287134297b8c10e61bf',
- url="http://download.oracle.com/otn-pub/java/jdk/8u66-b17/jdk-8u66-linux-x64.tar.gz")
-
# Oracle requires that you accept their License Agreement in order
# to access the Java packages in download.oracle.com. In order to
# automate this process, we need to utilize these additional curl
# commandline options.
#
# See http://stackoverflow.com/questions/10268583/how-to-automate-download-and-installation-of-java-jdk-on-linux
- curl_options=[
- '-j', # junk cookies
- '-H', # specify required License Agreement cookie
+ curl_options = [
+ '-j', # junk cookies
+ '-H', # specify required License Agreement cookie
'Cookie: oraclelicense=accept-securebackup-cookie']
- def do_fetch(self, mirror_only=False):
- # Add our custom curl commandline options
- tty.msg(
- "[Jdk] Adding required commandline options to curl " +
- "before performing fetch: %s" %
- (self.curl_options))
-
- for option in self.curl_options:
- spack.curl.add_default_arg(option)
-
- # Now perform the actual fetch
- super(Jdk, self).do_fetch(mirror_only)
-
+ version('8u66-linux-x64', '88f31f3d642c3287134297b8c10e61bf',
+ url="http://download.oracle.com/otn-pub/java/jdk/8u66-b17/jdk-8u66-linux-x64.tar.gz",
+ curl_options=curl_options)
+ version('8u92-linux-x64', '65a1cc17ea362453a6e0eb4f13be76e4',
+ url="http://download.oracle.com/otn-pub/java/jdk/8u92-b14/jdk-8u92-linux-x64.tar.gz",
+ curl_options=curl_options)
def install(self, spec, prefix):
distutils.dir_util.copy_tree(".", prefix)
+
+ def setup_environment(self, spack_env, run_env):
+ run_env.set('JAVA_HOME', self.spec.prefix)
+
+ def setup_dependent_environment(self, spack_env, run_env, dependent_spec):
+ spack_env.set('JAVA_HOME', self.spec.prefix)
diff --git a/var/spack/repos/builtin/packages/jemalloc/package.py b/var/spack/repos/builtin/packages/jemalloc/package.py
index a67754a513..839251f47b 100644
--- a/var/spack/repos/builtin/packages/jemalloc/package.py
+++ b/var/spack/repos/builtin/packages/jemalloc/package.py
@@ -24,11 +24,16 @@
##############################################################################
from spack import *
+
class Jemalloc(Package):
- """jemalloc is a general purpose malloc(3) implementation that emphasizes fragmentation avoidance and scalable concurrency support."""
+ """jemalloc is a general purpose malloc(3) implementation that emphasizes
+ fragmentation avoidance and scalable concurrency support."""
homepage = "http://www.canonware.com/jemalloc/"
url = "https://github.com/jemalloc/jemalloc/releases/download/4.0.4/jemalloc-4.0.4.tar.bz2"
+ version('4.3.1', 'f204c0ea1aef92fbb339dc640de338a6')
+ version('4.2.1', '094b0a7b8c77c464d0dc8f0643fd3901')
+ version('4.2.0', 'e6b5d5a1ea93a04207528d274efdd144')
version('4.1.0', 'c4e53c947905a533d5899e5cc3da1f94')
version('4.0.4', '687c5cc53b9a7ab711ccd680351ff988')
@@ -36,7 +41,7 @@ class Jemalloc(Package):
variant('prof', default=False, description='Enable heap profiling')
def install(self, spec, prefix):
- configure_args = ['--prefix=%s' % prefix,]
+ configure_args = ['--prefix=%s' % prefix, ]
if '+stats' in spec:
configure_args.append('--enable-stats')
diff --git a/var/spack/repos/builtin/packages/jpeg/package.py b/var/spack/repos/builtin/packages/jpeg/package.py
index e4a9d8535b..594240d950 100644
--- a/var/spack/repos/builtin/packages/jpeg/package.py
+++ b/var/spack/repos/builtin/packages/jpeg/package.py
@@ -24,6 +24,7 @@
##############################################################################
from spack import *
+
class Jpeg(Package):
"""libjpeg is a widely used free library with functions for handling the
JPEG image data format. It implements a JPEG codec (encoding and decoding)
diff --git a/var/spack/repos/builtin/packages/icu/package.py b/var/spack/repos/builtin/packages/jsoncpp/package.py
index c7cabb5d95..5169b338ee 100644
--- a/var/spack/repos/builtin/packages/icu/package.py
+++ b/var/spack/repos/builtin/packages/jsoncpp/package.py
@@ -24,26 +24,26 @@
##############################################################################
from spack import *
-class Icu(Package):
- """The International Components for Unicode (ICU) package is a
- mature, widely used set of C/C++ libraries providing Unicode and
- Globalization support for software applications. ICU is widely
- portable and gives applications the same results on all
- platforms."""
- # FIXME: add a proper url for your package's homepage here.
- homepage = "http://www.example.com"
- url = "http://download.icu-project.org/files/icu4c/54.1/icu4c-54_1-src.tgz"
- version('54.1', 'e844caed8f2ca24c088505b0d6271bc0')
+class Jsoncpp(Package):
+ """JsonCpp is a C++ library that allows manipulating JSON values,
+ including serialization and deserialization to and from strings.
+ It can also preserve existing comment in unserialization/serialization
+ steps, making it a convenient format to store user input files."""
+ homepage = "https://github.com/open-source-parsers/jsoncpp"
+ url = "https://github.com/open-source-parsers/jsoncpp/archive/1.7.3.tar.gz"
- def url_for_version(self, version):
- return "http://download.icu-project.org/files/icu4c/%s/icu4c-%s-src.tgz" % (
- version, str(version).replace('.', '_'))
+ version('1.7.3', 'aff6bfb5b81d9a28785429faa45839c5')
+ depends_on('cmake', type='build')
+ # depends_on('python', type='test')
def install(self, spec, prefix):
- with working_dir("source"):
- configure("--prefix=%s" % prefix)
+ with working_dir('spack-build', create=True):
+ cmake('..', '-DBUILD_SHARED_LIBS=ON', *std_cmake_args)
+
make()
- make("install")
+ if self.run_tests:
+ make('test') # Python needed to run tests
+ make('install')
diff --git a/var/spack/repos/builtin/packages/judy/package.py b/var/spack/repos/builtin/packages/judy/package.py
index 8d47767ef0..8b8b261e53 100644
--- a/var/spack/repos/builtin/packages/judy/package.py
+++ b/var/spack/repos/builtin/packages/judy/package.py
@@ -24,8 +24,9 @@
##############################################################################
from spack import *
+
class Judy(Package):
- """A general-purpose dynamic array, associative array and hash-trie - Judy"""
+ """Judy: General-purpose dynamic array, associative array and hash-trie."""
homepage = "http://judy.sourceforge.net/"
url = "http://downloads.sourceforge.net/project/judy/judy/Judy-1.0.5/Judy-1.0.5.tar.gz"
diff --git a/var/spack/repos/builtin/packages/julia/package.py b/var/spack/repos/builtin/packages/julia/package.py
index 69b52130e9..89db570b81 100644
--- a/var/spack/repos/builtin/packages/julia/package.py
+++ b/var/spack/repos/builtin/packages/julia/package.py
@@ -22,8 +22,11 @@
# License along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
+
from spack import *
import os
+import sys
+
class Julia(Package):
"""The Julia Language: A fresh approach to technical computing"""
@@ -32,28 +35,46 @@ class Julia(Package):
version('master',
git='https://github.com/JuliaLang/julia.git', branch='master')
+ version('release-0.5',
+ git='https://github.com/JuliaLang/julia.git', branch='release-0.5')
+ version('0.5.0', 'b61385671ba74767ab452363c43131fb', preferred=True)
+ version('release-0.4',
+ git='https://github.com/JuliaLang/julia.git', branch='release-0.4')
+ version('0.4.7', '75a7a7dd882b7840829d8f165e9b9078')
+ version('0.4.6', 'd88db18c579049c23ab8ef427ccedf5d')
version('0.4.5', '69141ff5aa6cee7c0ec8c85a34aa49a6')
version('0.4.3', '8a4a59fd335b05090dd1ebefbbe5aaac')
- patch('gc.patch')
+ # TODO: Split these out into jl-hdf5, jl-mpi packages etc.
+ variant("cxx", default=False, description="Prepare for Julia Cxx package")
+ variant("hdf5", default=False, description="Install Julia HDF5 package")
+ variant("mpi", default=True, description="Install Julia MPI package")
+ variant("plot", default=False,
+ description="Install Julia plotting packages")
+ variant("python", default=False,
+ description="Install Julia Python package")
+
+ patch('gc.patch', when='@0.4:0.4.5')
patch('openblas.patch', when='@0.4:0.4.5')
+ variant('binutils', default=sys.platform != 'darwin',
+ description="Build via binutils")
+
# Build-time dependencies:
# depends_on("awk")
- # depends_on("m4")
+ depends_on("m4", type="build")
# depends_on("pkg-config")
# Combined build-time and run-time dependencies:
- depends_on("binutils")
+ # (Yes, these are run-time dependencies used by Julia's package manager.)
+ depends_on("binutils", when='+binutils')
depends_on("cmake @2.8:")
- depends_on("git")
+ depends_on("curl")
+ depends_on("git", when='@:0.4')
+ depends_on("git", when='@release-0.4')
depends_on("openssl")
depends_on("python @2.7:2.999")
- # I think that Julia requires the dependencies above, but it
- # builds fine (on my system) without these. We should enable them
- # as necessary.
-
# Run-time dependencies:
# depends_on("arpack")
# depends_on("fftw +float")
@@ -88,30 +109,128 @@ class Julia(Package):
# USE_SYSTEM_LIBGIT2=0
# Run-time dependencies for Julia packages:
- depends_on("hdf5")
- depends_on("mpi")
+ depends_on("hdf5", when="+hdf5", type="run")
+ depends_on("mpi", when="+mpi", type="run")
+ depends_on("py-matplotlib", when="+plot", type="run")
def install(self, spec, prefix):
- if '@master' in spec:
- # Julia needs to know the offset from a specific commit
- git = which('git')
- git('fetch', '--unshallow')
-
+ # Julia needs git tags
+ if os.path.isfile(".git/shallow"):
+ git = which("git")
+ git("fetch", "--unshallow")
# Explicitly setting CC, CXX, or FC breaks building libuv, one
# of Julia's dependencies. This might be a Darwin-specific
# problem. Given how Spack sets up compilers, Julia should
# still use Spack's compilers, even if we don't specify them
# explicitly.
- options = [#"CC=cc",
- #"CXX=c++",
- #"FC=fc",
- #"USE_SYSTEM_ARPACK=1",
- #"USE_SYSTEM_FFTW=1",
- #"USE_SYSTEM_GMP=1",
- #"USE_SYSTEM_MPFR=1",
- #TODO "USE_SYSTEM_PCRE=1",
- "prefix=%s" % prefix]
+ options = [
+ # "CC=cc",
+ # "CXX=c++",
+ # "FC=fc",
+ # "USE_SYSTEM_ARPACK=1",
+ "override USE_SYSTEM_CURL=1",
+ # "USE_SYSTEM_FFTW=1",
+ # "USE_SYSTEM_GMP=1",
+ # "USE_SYSTEM_MPFR=1",
+ # "USE_SYSTEM_PCRE=1",
+ "prefix=%s" % prefix]
+ if "+cxx" in spec:
+ if "@master" not in spec:
+ raise InstallError(
+ "Variant +cxx requires the @master version of Julia")
+ options += [
+ "BUILD_LLVM_CLANG=1",
+ "LLVM_ASSERTIONS=1",
+ "USE_LLVM_SHLIB=1"]
with open('Make.user', 'w') as f:
f.write('\n'.join(options) + '\n')
make()
make("install")
+
+ # Julia's package manager needs a certificate
+ curl = which("curl")
+ cacert_file = join_path(prefix, "etc", "curl", "cacert.pem")
+ curl("--create-dirs",
+ "--output", cacert_file,
+ "https://curl.haxx.se/ca/cacert.pem")
+
+ # Put Julia's compiler cache into a private directory
+ cachedir = join_path(prefix, "var", "julia", "cache")
+ mkdirp(cachedir)
+
+ # Store Julia packages in a private directory
+ pkgdir = join_path(prefix, "var", "julia", "pkg")
+ mkdirp(pkgdir)
+
+ # Configure Julia
+ with open(join_path(prefix, "etc", "julia", "juliarc.jl"),
+ "a") as juliarc:
+ if "@master" in spec or "@release-0.5" in spec or "@0.5:" in spec:
+ # This is required for versions @0.5:
+ juliarc.write(
+ '# Point package manager to working certificates\n')
+ juliarc.write('LibGit2.set_ssl_cert_locations("%s")\n' %
+ cacert_file)
+ juliarc.write('\n')
+ juliarc.write('# Put compiler cache into a private directory\n')
+ juliarc.write('empty!(Base.LOAD_CACHE_PATH)\n')
+ juliarc.write('unshift!(Base.LOAD_CACHE_PATH, "%s")\n' % cachedir)
+ juliarc.write('\n')
+ juliarc.write('# Put Julia packages into a private directory\n')
+ juliarc.write('ENV["JULIA_PKGDIR"] = "%s"\n' % pkgdir)
+ juliarc.write('\n')
+
+ # Install some commonly used packages
+ julia = Executable(join_path(prefix.bin, "julia"))
+ julia("-e", 'Pkg.init(); Pkg.update()')
+
+ # Install HDF5
+ if "+hdf5" in spec:
+ with open(join_path(prefix, "etc", "julia", "juliarc.jl"),
+ "a") as juliarc:
+ juliarc.write('# HDF5\n')
+ juliarc.write('push!(Libdl.DL_LOAD_PATH, "%s")\n' %
+ spec["hdf5"].prefix.lib)
+ juliarc.write('\n')
+ julia("-e", 'Pkg.add("HDF5"); using HDF5')
+ julia("-e", 'Pkg.add("JLD"); using JLD')
+
+ # Install MPI
+ if "+mpi" in spec:
+ with open(join_path(prefix, "etc", "julia", "juliarc.jl"),
+ "a") as juliarc:
+ juliarc.write('# MPI\n')
+ juliarc.write('ENV["JULIA_MPI_C_COMPILER"] = "%s"\n' %
+ join_path(spec["mpi"].prefix.bin, "mpicc"))
+ juliarc.write('ENV["JULIA_MPI_Fortran_COMPILER"] = "%s"\n' %
+ join_path(spec["mpi"].prefix.bin, "mpifort"))
+ juliarc.write('\n')
+ julia("-e", 'Pkg.add("MPI"); using MPI')
+
+ # Install Python
+ if "+python" in spec or "+plot" in spec:
+ with open(join_path(prefix, "etc", "julia", "juliarc.jl"),
+ "a") as juliarc:
+ juliarc.write('# Python\n')
+ juliarc.write('ENV["PYTHON"] = "%s"\n' % spec["python"].prefix)
+ juliarc.write('\n')
+ # Python's OpenSSL package installer complains:
+ # Error: PREFIX too long: 166 characters, but only 128 allowed
+ # Error: post-link failed for: openssl-1.0.2g-0
+ julia("-e", 'Pkg.add("PyCall"); using PyCall')
+
+ if "+plot" in spec:
+ julia("-e", 'Pkg.add("PyPlot"); using PyPlot')
+ julia("-e", 'Pkg.add("Colors"); using Colors')
+ # These require maybe gtk and image-magick
+ julia("-e", 'Pkg.add("Plots"); using Plots')
+ julia("-e", 'Pkg.add("PlotRecipes"); using PlotRecipes')
+ julia("-e", 'Pkg.add("UnicodePlots"); using UnicodePlots')
+ julia("-e", """\
+using Plots
+using UnicodePlots
+unicodeplots()
+plot(x->sin(x)*cos(x), linspace(0, 2pi))
+""")
+
+ julia("-e", 'Pkg.status()')
diff --git a/var/spack/repos/builtin/packages/kbproto/package.py b/var/spack/repos/builtin/packages/kbproto/package.py
new file mode 100644
index 0000000000..356d1c6b4f
--- /dev/null
+++ b/var/spack/repos/builtin/packages/kbproto/package.py
@@ -0,0 +1,45 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class Kbproto(Package):
+ """X Keyboard Extension.
+
+ This extension defines a protcol to provide a number of new capabilities
+ and controls for text keyboards."""
+
+ homepage = "https://cgit.freedesktop.org/xorg/proto/kbproto"
+ url = "https://www.x.org/archive/individual/proto/kbproto-1.0.7.tar.gz"
+
+ version('1.0.7', '19acc5f02ae80381e216f443134e0bbb')
+
+ depends_on('pkg-config@0.9.0:', type='build')
+ depends_on('util-macros', type='build')
+
+ def install(self, spec, prefix):
+ configure('--prefix={0}'.format(prefix))
+
+ make('install')
diff --git a/var/spack/repos/builtin/packages/kdiff3/package.py b/var/spack/repos/builtin/packages/kdiff3/package.py
new file mode 100644
index 0000000000..48f4b9c379
--- /dev/null
+++ b/var/spack/repos/builtin/packages/kdiff3/package.py
@@ -0,0 +1,44 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class Kdiff3(Package):
+ """Compare and merge 2 or 3 files or directories."""
+ homepage = "http://kdiff3.sourceforge.net/"
+ url = "https://downloads.sourceforge.net/project/kdiff3/kdiff3/0.9.98/kdiff3-0.9.98.tar.gz"
+
+ version('0.9.98', 'b52f99f2cf2ea75ed5719315cbf77446')
+
+ depends_on("qt@5.2.0:")
+
+ def install(self, spec, prefix):
+ # make is done inside
+ configure('qt4')
+
+ # there is no make install, bummer...
+ mkdirp(self.prefix.bin)
+ install(join_path(self.stage.source_path, 'releaseQt', 'kdiff3'),
+ self.prefix.bin)
diff --git a/var/spack/repos/builtin/packages/kealib/package.py b/var/spack/repos/builtin/packages/kealib/package.py
index 7c73c4518b..5346fc8cb9 100644
--- a/var/spack/repos/builtin/packages/kealib/package.py
+++ b/var/spack/repos/builtin/packages/kealib/package.py
@@ -24,20 +24,21 @@
##############################################################################
from spack import *
+
class Kealib(Package):
"""An HDF5 Based Raster File Format
-
+
KEALib provides an implementation of the GDAL data model.
The format supports raster attribute tables, image pyramids,
meta-data and in-built statistics while also handling very
large files and compression throughout.
-
+
Based on the HDF5 standard, it also provides a base from which
other formats can be derived and is a good choice for long
term data archiving. An independent software library (libkea)
provides complete access to the KEA image format and a GDAL
driver allowing KEA images to be used from any GDAL supported software.
-
+
Development work on this project has been funded by Landcare Research.
"""
homepage = "http://kealib.org/"
@@ -51,7 +52,8 @@ class Kealib(Package):
with working_dir('trunk', create=False):
cmake_args = []
cmake_args.append("-DCMAKE_INSTALL_PREFIX=%s" % prefix)
- cmake_args.append("-DHDF5_INCLUDE_DIR=%s" % spec['hdf5'].prefix.include)
+ cmake_args.append("-DHDF5_INCLUDE_DIR=%s" %
+ spec['hdf5'].prefix.include)
cmake_args.append("-DHDF5_LIB_PATH=%s" % spec['hdf5'].prefix.lib)
cmake('.', *cmake_args)
diff --git a/var/spack/repos/builtin/packages/kripke/package.py b/var/spack/repos/builtin/packages/kripke/package.py
index d405e9a51d..cf8d2b7e39 100644
--- a/var/spack/repos/builtin/packages/kripke/package.py
+++ b/var/spack/repos/builtin/packages/kripke/package.py
@@ -24,6 +24,7 @@
##############################################################################
from spack import *
+
class Kripke(Package):
"""Kripke is a simple, scalable, 3D Sn deterministic particle
transport proxy/mini app.
diff --git a/var/spack/repos/builtin/packages/launchmon/package.py b/var/spack/repos/builtin/packages/launchmon/package.py
index f38bc38202..c2b289da4f 100644
--- a/var/spack/repos/builtin/packages/launchmon/package.py
+++ b/var/spack/repos/builtin/packages/launchmon/package.py
@@ -24,28 +24,20 @@
##############################################################################
from spack import *
+
class Launchmon(Package):
"""Software infrastructure that enables HPC run-time tools to
co-locate tool daemons with a parallel job."""
- homepage = "http://sourceforge.net/projects/launchmon"
- url = "http://downloads.sourceforge.net/project/launchmon/launchmon/1.0.1%20release/launchmon-1.0.1.tar.gz"
-
- version('1.0.1', '2f12465803409fd07f91174a4389eb2b')
- version('1.0.1-2', git='https://github.com/llnl/launchmon.git', commit='ff7e22424b8f375318951eb1c9282fcbbfa8aadf')
-
- depends_on('autoconf')
- depends_on('automake')
- depends_on('libtool')
-
+ homepage = "https://github.com/LLNL/LaunchMON"
+ url = "https://github.com/LLNL/LaunchMON/releases/download/v1.0.2/launchmon-v1.0.2.tar.gz"
- def patch(self):
- # This patch makes libgcrypt compile correctly with newer gcc versions.
- mf = FileFilter('tools/libgcrypt/tests/Makefile.in')
- mf.filter(r'(basic_LDADD\s*=\s*.*)', r'\1 -lgpg-error')
- mf.filter(r'(tsexp_LDADD\s*=\s*.*)', r'\1 -lgpg-error')
- mf.filter(r'(keygen_LDADD\s*=\s*.*)', r'\1 -lgpg-error')
- mf.filter(r'(benchmark_LDADD\s*=\s*.*)', r'\1 -lgpg-error')
+ version('1.0.2', '8d6ba77a0ec2eff2fde2c5cc8fa7ff7a')
+ depends_on('autoconf', type='build')
+ depends_on('automake', type='build')
+ depends_on('libtool', type='build')
+ depends_on('libgcrypt')
+ depends_on('libgpg-error')
def install(self, spec, prefix):
configure(
diff --git a/var/spack/repos/builtin/packages/lbxproxy/package.py b/var/spack/repos/builtin/packages/lbxproxy/package.py
new file mode 100644
index 0000000000..3de3ade0c9
--- /dev/null
+++ b/var/spack/repos/builtin/packages/lbxproxy/package.py
@@ -0,0 +1,58 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class Lbxproxy(Package):
+ """lbxproxy accepts client connections, multiplexes them over a single
+ connection to the X server, and performs various optimizations on the
+ X protocol to make it faster over low bandwidth and/or high latency
+ connections.
+
+ Note that the X server source from X.Org no longer supports the LBX
+ extension, so this program is only useful in connecting to older
+ X servers."""
+
+ homepage = "http://cgit.freedesktop.org/xorg/app/lbxproxy"
+ url = "https://www.x.org/archive/individual/app/lbxproxy-1.0.3.tar.gz"
+
+ version('1.0.3', '50a2a1ae15e8edf7582f76bcdf6b8197')
+
+ depends_on('libxext')
+ depends_on('liblbxutil')
+ depends_on('libx11')
+ depends_on('libice')
+
+ depends_on('xtrans', type='build')
+ depends_on('xproxymanagementprotocol', type='build')
+ depends_on('bigreqsproto', type='build')
+ depends_on('pkg-config@0.9.0:', type='build')
+ depends_on('util-macros', type='build')
+
+ def install(self, spec, prefix):
+ configure('--prefix={0}'.format(prefix))
+
+ make()
+ make('install')
diff --git a/var/spack/repos/builtin/packages/lcms/package.py b/var/spack/repos/builtin/packages/lcms/package.py
index 434d8e6c98..4d3fc59568 100644
--- a/var/spack/repos/builtin/packages/lcms/package.py
+++ b/var/spack/repos/builtin/packages/lcms/package.py
@@ -24,6 +24,7 @@
##############################################################################
from spack import *
+
class Lcms(Package):
"""Little cms is a color management library. Implements fast
transforms between ICC profiles. It is focused on speed, and is
diff --git a/var/spack/repos/builtin/packages/leveldb/package.py b/var/spack/repos/builtin/packages/leveldb/package.py
index 408f1d31c1..f571baa1ce 100644
--- a/var/spack/repos/builtin/packages/leveldb/package.py
+++ b/var/spack/repos/builtin/packages/leveldb/package.py
@@ -22,10 +22,10 @@
# License along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
-import os
import glob
from spack import *
+
class Leveldb(Package):
"""LevelDB is a fast key-value storage library written at Google
that provides an ordered mapping from string keys to string values."""
diff --git a/var/spack/repos/builtin/packages/libaio/package.py b/var/spack/repos/builtin/packages/libaio/package.py
new file mode 100644
index 0000000000..735ced047b
--- /dev/null
+++ b/var/spack/repos/builtin/packages/libaio/package.py
@@ -0,0 +1,43 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class Libaio(Package):
+ """This is the linux native Asynchronous I/O interface library."""
+
+ homepage = "https://git.fedorahosted.org/cgit/libaio.git"
+ url = "https://git.fedorahosted.org/cgit/libaio.git/snapshot/libaio-0.3.110-1.tar.gz"
+
+ version('0.3.110-1', 'eb6b1b435afadb5b80c5dd80984249f6')
+
+ def install(self, spec, prefix):
+ # libaio is not supported on OS X
+ if spec.satisfies('arch=darwin-x86_64'):
+ # create a dummy directory
+ mkdir(prefix.lib)
+ return
+
+ make('prefix={0}'.format(prefix), 'install')
diff --git a/var/spack/repos/builtin/packages/libapplewm/package.py b/var/spack/repos/builtin/packages/libapplewm/package.py
new file mode 100644
index 0000000000..85408053ad
--- /dev/null
+++ b/var/spack/repos/builtin/packages/libapplewm/package.py
@@ -0,0 +1,54 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class Libapplewm(Package):
+ """AppleWM is a simple library designed to interface with the Apple-WM
+ extension. This extension allows X window managers to better interact with
+ the Mac OS X Aqua user interface when running X11 in a rootless mode."""
+
+ homepage = "http://cgit.freedesktop.org/xorg/lib/libAppleWM"
+ url = "https://www.x.org/archive/individual/lib/libAppleWM-1.4.1.tar.gz"
+
+ version('1.4.1', '52c587641eb57f00978d28d98d487af8')
+
+ depends_on('libx11')
+ depends_on('libxext')
+
+ depends_on('xextproto', type='build')
+ depends_on('applewmproto@1.4:', type='build')
+ depends_on('pkg-config@0.9.0:', type='build')
+ depends_on('util-macros', type='build')
+
+ def install(self, spec, prefix):
+ configure('--prefix={0}'.format(prefix))
+
+ # Crashes with this error message on Linux:
+ # HIServices/Processes.h: No such file or directory
+ # May only build properly on macOS?
+
+ make()
+ make('install')
diff --git a/var/spack/repos/builtin/packages/libarchive/package.py b/var/spack/repos/builtin/packages/libarchive/package.py
index f11d732afa..e439bf894f 100644
--- a/var/spack/repos/builtin/packages/libarchive/package.py
+++ b/var/spack/repos/builtin/packages/libarchive/package.py
@@ -24,17 +24,34 @@
##############################################################################
from spack import *
+
class Libarchive(Package):
"""libarchive: C library and command-line tools for reading and
writing tar, cpio, zip, ISO, and other archive formats."""
+
homepage = "http://www.libarchive.org"
url = "http://www.libarchive.org/downloads/libarchive-3.1.2.tar.gz"
+ version('3.2.1', 'afa257047d1941a565216edbf0171e72')
version('3.1.2', 'efad5a503f66329bb9d2f4308b5de98a')
version('3.1.1', '1f3d883daf7161a0065e42a15bbf168f')
version('3.1.0', '095a287bb1fd687ab50c85955692bf3a')
+ depends_on('zlib')
+ depends_on('bzip2')
+ depends_on('lzma')
+ depends_on('lz4')
+ depends_on('xz')
+ depends_on('lzo')
+ depends_on('nettle')
+ depends_on('openssl')
+ depends_on('libxml2')
+ depends_on('expat')
+
def install(self, spec, prefix):
- configure("--prefix=%s" % prefix)
+ configure('--prefix={0}'.format(prefix))
+
make()
- make("install")
+ if self.run_tests:
+ make('check') # cannot build test suite with Intel compilers
+ make('install')
diff --git a/var/spack/repos/builtin/packages/libatomic-ops/package.py b/var/spack/repos/builtin/packages/libatomic-ops/package.py
new file mode 100644
index 0000000000..0167fbcb33
--- /dev/null
+++ b/var/spack/repos/builtin/packages/libatomic-ops/package.py
@@ -0,0 +1,42 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class LibatomicOps(Package):
+ """This package provides semi-portable access to hardware-provided
+ atomic memory update operations on a number architectures."""
+
+ homepage = "https://github.com/ivmai/libatomic_ops"
+ url = "http://www.hboehm.info/gc/gc_source/libatomic_ops-7.4.4.tar.gz"
+
+ version('7.4.4', '426d804baae12c372967a6d183e25af2')
+
+ def install(self, spec, prefix):
+ configure('--prefix={0}'.format(prefix),
+ '--enable-shared')
+
+ make()
+ make('install')
diff --git a/var/spack/repos/builtin/packages/libcerf/package.py b/var/spack/repos/builtin/packages/libcerf/package.py
index b30d76f4e9..1964f03b95 100644
--- a/var/spack/repos/builtin/packages/libcerf/package.py
+++ b/var/spack/repos/builtin/packages/libcerf/package.py
@@ -27,9 +27,10 @@ from spack import *
class Libcerf(Package):
- """
- A self-contained C library providing complex error functions, based on Faddeeva's plasma dispersion function
- w(z). Also provides Dawson's integral and Voigt's convolution of a Gaussian and a Lorentzian
+ """A self-contained C library providing complex error functions, based
+ on Faddeeva's plasma dispersion function w(z). Also provides Dawson's
+ integral and Voigt's convolution of a Gaussian and a Lorentzian
+
"""
homepage = "http://sourceforge.net/projects/libcerf"
url = "http://downloads.sourceforge.net/project/libcerf/libcerf-1.3.tgz"
@@ -37,6 +38,12 @@ class Libcerf(Package):
version('1.3', 'b3504c467204df71e62aeccf73a25612')
def install(self, spec, prefix):
- configure('--prefix=%s' % prefix)
+ options = []
+ # Clang reports unused functions as errors, see
+ # http://clang.debian.net/status.php?version=3.8.1&key=UNUSED_FUNCTION
+ if spec.satisfies('%clang'):
+ options.append('CFLAGS=-Wno-unused-function')
+
+ configure('--prefix=%s' % prefix, *options)
make()
make("install")
diff --git a/var/spack/repos/builtin/packages/libcircle/package.py b/var/spack/repos/builtin/packages/libcircle/package.py
index 75fdb96125..971c29f5f1 100644
--- a/var/spack/repos/builtin/packages/libcircle/package.py
+++ b/var/spack/repos/builtin/packages/libcircle/package.py
@@ -22,9 +22,9 @@
# License along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
-import os
from spack import *
+
class Libcircle(Package):
"""libcircle provides an efficient distributed queue on a cluster,
using self-stabilizing work stealing."""
@@ -32,7 +32,7 @@ class Libcircle(Package):
homepage = "https://github.com/hpc/libcircle"
version('0.2.1-rc.1', '2b1369a5736457239f908abf88143ec2',
- url='https://github.com/hpc/libcircle/releases/download/0.2.1-rc.1/libcircle-0.2.1-rc.1.tar.gz')
+ url='https://github.com/hpc/libcircle/releases/download/0.2.1-rc.1/libcircle-0.2.1-rc.1.tar.gz')
depends_on('mpi')
diff --git a/var/spack/repos/builtin/packages/libctl/package.py b/var/spack/repos/builtin/packages/libctl/package.py
new file mode 100644
index 0000000000..53d30ce5c3
--- /dev/null
+++ b/var/spack/repos/builtin/packages/libctl/package.py
@@ -0,0 +1,48 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class Libctl(Package):
+ """libctl is a free Guile-based library implementing flexible
+ control files for scientific simulations."""
+
+ homepage = "http://ab-initio.mit.edu/wiki/index.php/Libctl"
+ url = "http://ab-initio.mit.edu/libctl/libctl-3.2.2.tar.gz"
+
+ version('3.2.2', '5fd7634dc9ae8e7fa70a68473b9cbb68')
+
+ depends_on('guile')
+
+ def install(self, spec, prefix):
+ configure('--prefix={0}'.format(prefix),
+ '--enable-shared',
+ 'GUILE={0}'.format(join_path(
+ spec['guile'].prefix.bin, 'guile')),
+ 'GUILE_CONFIG={0}'.format(join_path(
+ spec['guile'].prefix.bin, 'guile-config')))
+
+ make()
+ make('install')
diff --git a/var/spack/repos/builtin/packages/libdmx/package.py b/var/spack/repos/builtin/packages/libdmx/package.py
new file mode 100644
index 0000000000..fa469fd423
--- /dev/null
+++ b/var/spack/repos/builtin/packages/libdmx/package.py
@@ -0,0 +1,49 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class Libdmx(Package):
+ """libdmx - X Window System DMX (Distributed Multihead X) extension
+ library."""
+
+ homepage = "http://cgit.freedesktop.org/xorg/lib/libdmx"
+ url = "https://www.x.org/archive/individual/lib/libdmx-1.1.3.tar.gz"
+
+ version('1.1.3', 'eed755e7cdb161e05f70e955f2b0ef4d')
+
+ depends_on('libx11')
+ depends_on('libxext')
+
+ depends_on('xextproto', type='build')
+ depends_on('dmxproto@2.2.99.1:', type='build')
+ depends_on('pkg-config@0.9.0:', type='build')
+ depends_on('util-macros', type='build')
+
+ def install(self, spec, prefix):
+ configure('--prefix={0}'.format(prefix))
+
+ make()
+ make('install')
diff --git a/var/spack/repos/builtin/packages/libdrm/package.py b/var/spack/repos/builtin/packages/libdrm/package.py
index b05412e588..ab9661269e 100644
--- a/var/spack/repos/builtin/packages/libdrm/package.py
+++ b/var/spack/repos/builtin/packages/libdrm/package.py
@@ -24,21 +24,27 @@
##############################################################################
from spack import *
+
class Libdrm(Package):
"""A userspace library for accessing the DRM, direct
rendering manager, on Linux, BSD and other operating
systems that support the ioctl interface."""
- homepage = "http://dri.freedesktop.org/libdrm/" # no real website...
+ homepage = "http://dri.freedesktop.org/libdrm/"
url = "http://dri.freedesktop.org/libdrm/libdrm-2.4.59.tar.gz"
+ version('2.4.70', 'a8c275bce5f3d71a5ca25e8fb60df084')
version('2.4.59', '105ac7af1afcd742d402ca7b4eb168b6')
version('2.4.33', '86e4e3debe7087d5404461e0032231c8')
- depends_on('libpciaccess')
+ depends_on('libpciaccess@0.10:')
+ depends_on('libpthread-stubs')
def install(self, spec, prefix):
- configure("--prefix=%s" % prefix)
+ configure('--prefix={0}'.format(prefix),
+ '--enable-static',
+ 'LIBS=-lrt') # This fixes a bug with `make check`
make()
- make("install")
+ make('check')
+ make('install')
diff --git a/var/spack/repos/builtin/packages/libdwarf/package.py b/var/spack/repos/builtin/packages/libdwarf/package.py
index 3f5a72116e..594271f655 100644
--- a/var/spack/repos/builtin/packages/libdwarf/package.py
+++ b/var/spack/repos/builtin/packages/libdwarf/package.py
@@ -23,11 +23,11 @@
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
from spack import *
-import os
# Only build certain parts of dwarf because the other ones break.
dwarf_dirs = ['libdwarf', 'dwarfdump2']
+
class Libdwarf(Package):
"""The DWARF Debugging Information Format is of interest to
programmers working on compilers and debuggers (and any one
@@ -41,18 +41,17 @@ class Libdwarf(Package):
MIPS/IRIX C compiler."""
homepage = "http://www.prevanders.net/dwarf.html"
- url = "http://www.prevanders.net/libdwarf-20130729.tar.gz"
+ url = "http://www.prevanders.net/libdwarf-20160507.tar.gz"
list_url = homepage
+ version('20160507', 'ae32d6f9ece5daf05e2d4b14822ea811')
version('20130729', '4cc5e48693f7b93b7aa0261e63c0e21d')
version('20130207', '64b42692e947d5180e162e46c689dfbf')
version('20130126', 'ded74a5e90edb5a12aac3c29d260c5db')
-
depends_on("libelf")
parallel = False
-
def install(self, spec, prefix):
# dwarf build does not set arguments for ar properly
make.add_default_arg('ARFLAGS=rcs')
@@ -69,7 +68,11 @@ class Libdwarf(Package):
install('libdwarf.h', prefix.include)
install('dwarf.h', prefix.include)
- with working_dir('dwarfdump2'):
+ if spec.satisfies('@20130126:20130729'):
+ dwarfdump_dir = 'dwarfdump2'
+ else:
+ dwarfdump_dir = 'dwarfdump'
+ with working_dir(dwarfdump_dir):
configure("--prefix=" + prefix)
# This makefile has strings of copy commands that
diff --git a/var/spack/repos/builtin/packages/libedit/package.py b/var/spack/repos/builtin/packages/libedit/package.py
index 4366344679..235e7648bc 100644
--- a/var/spack/repos/builtin/packages/libedit/package.py
+++ b/var/spack/repos/builtin/packages/libedit/package.py
@@ -24,12 +24,14 @@
##############################################################################
from spack import *
+
class Libedit(Package):
"""An autotools compatible port of the NetBSD editline library"""
homepage = "http://thrysoee.dk/editline/"
url = "http://thrysoee.dk/editline/libedit-20150325-3.1.tar.gz"
- version('3.1', '43cdb5df3061d78b5e9d59109871b4f6', url="http://thrysoee.dk/editline/libedit-20150325-3.1.tar.gz")
+ version('3.1', '43cdb5df3061d78b5e9d59109871b4f6',
+ url="http://thrysoee.dk/editline/libedit-20150325-3.1.tar.gz")
depends_on('ncurses')
diff --git a/var/spack/repos/builtin/packages/libelf/package.py b/var/spack/repos/builtin/packages/libelf/package.py
index 1e1deea818..5af4ab705d 100644
--- a/var/spack/repos/builtin/packages/libelf/package.py
+++ b/var/spack/repos/builtin/packages/libelf/package.py
@@ -24,7 +24,8 @@
##############################################################################
from spack import *
-class Libelf(Package):
+
+class Libelf(AutotoolsPackage):
"""libelf lets you read, modify or create ELF object files in an
architecture-independent way. The library takes care of size
and endian issues, e.g. you can process a file for SPARC
@@ -38,12 +39,11 @@ class Libelf(Package):
provides('elf')
- def install(self, spec, prefix):
- configure("--prefix=" + prefix,
- "--enable-shared",
- "--disable-dependency-tracking",
- "--disable-debug")
- make()
+ def configure_args(self):
+ args = ["--enable-shared",
+ "--disable-dependency-tracking",
+ "--disable-debug"]
+ return args
- # The mkdir commands in libelf's install can fail in parallel
- make("install", parallel=False)
+ def install(self, spec, prefix):
+ make('install', parallel=False)
diff --git a/var/spack/repos/builtin/packages/libemos/package.py b/var/spack/repos/builtin/packages/libemos/package.py
new file mode 100644
index 0000000000..0275c7ede9
--- /dev/null
+++ b/var/spack/repos/builtin/packages/libemos/package.py
@@ -0,0 +1,52 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class Libemos(Package):
+ """The Interpolation library (EMOSLIB) includes Interpolation software and
+ BUFR & CREX encoding/decoding routines."""
+
+ homepage = "https://software.ecmwf.int/wiki/display/EMOS/Emoslib"
+ url = "https://software.ecmwf.int/wiki/download/attachments/3473472/libemos-4.4.2-Source.tar.gz"
+
+ version('4.4.2', 'f15a9aff0f40861f3f046c9088197376')
+
+ depends_on('cmake', type='build')
+ depends_on('grib-api')
+
+ def install(self, spec, prefix):
+ options = []
+ options.extend(std_cmake_args)
+
+ options.append('-DGRIB_API_PATH=%s' % spec['grib_api'].prefix)
+
+ # To support long pathnames that spack generates
+ options.append('-DCMAKE_Fortran_FLAGS=-ffree-line-length-none')
+
+ with working_dir('spack-build', create=True):
+ cmake('..', *options)
+ make()
+ make('install')
diff --git a/var/spack/repos/builtin/packages/libepoxy/package.py b/var/spack/repos/builtin/packages/libepoxy/package.py
new file mode 100644
index 0000000000..364ea1e30c
--- /dev/null
+++ b/var/spack/repos/builtin/packages/libepoxy/package.py
@@ -0,0 +1,39 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class Libepoxy(Package):
+ """Epoxy is a library for handling OpenGL function pointer management for
+ you."""
+ homepage = "https://github.com/anholt/libepoxy"
+ url = "https://github.com/anholt/libepoxy/releases/download/v1.3.1/libepoxy-1.3.1.tar.bz2"
+
+ version('1.3.1', '96f6620a9b005a503e7b44b0b528287d')
+
+ def install(self, spec, prefix):
+ configure('--prefix={0}'.format(prefix))
+ make()
+ make('install')
diff --git a/var/spack/repos/builtin/packages/libevent/package.py b/var/spack/repos/builtin/packages/libevent/package.py
index 3ae427a2eb..65b3a716c0 100644
--- a/var/spack/repos/builtin/packages/libevent/package.py
+++ b/var/spack/repos/builtin/packages/libevent/package.py
@@ -24,11 +24,13 @@
##############################################################################
from spack import *
+
class Libevent(Package):
"""The libevent API provides a mechanism to execute a callback function
- when a specific event occurs on a file descriptor or after a timeout has been
- reached. Furthermore, libevent also support callbacks due to signals or regular
- timeouts.
+ when a specific event occurs on a file descriptor or after a
+ timeout has been reached. Furthermore, libevent also support
+ callbacks due to signals or regular timeouts.
+
"""
homepage = "http://libevent.org"
@@ -46,7 +48,8 @@ class Libevent(Package):
version('2.0.13', 'af786b4b3f790c9d3279792edf7867fc')
version('2.0.12', '42986228baf95e325778ed328a93e070')
- variant('openssl', default=True, description="Build with encryption enabled at the libevent level.")
+ variant('openssl', default=True,
+ description="Build with encryption enabled at the libevent level.")
depends_on('openssl', when='+openssl')
def install(self, spec, prefix):
diff --git a/var/spack/repos/builtin/packages/libffi/package.py b/var/spack/repos/builtin/packages/libffi/package.py
index 1d3c85c8e0..ddb7709ddf 100644
--- a/var/spack/repos/builtin/packages/libffi/package.py
+++ b/var/spack/repos/builtin/packages/libffi/package.py
@@ -24,18 +24,16 @@
##############################################################################
from spack import *
-class Libffi(Package):
+
+class Libffi(AutotoolsPackage):
"""The libffi library provides a portable, high level programming
interface to various calling conventions. This allows a programmer
to call any function specified by a call interface description at
run time."""
homepage = "https://sourceware.org/libffi/"
-
- version('3.2.1','83b89587607e3eb65c70d361f13bab43',url = "ftp://sourceware.org/pub/libffi/libffi-3.2.1.tar.gz")
- #version('3.1', 'f5898b29bbfd70502831a212d9249d10',url = "ftp://sourceware.org/pub/libffi/libffi-3.1.tar.gz") # Has a bug $(lib64) instead of ${lib64} in libffi.pc
-
- def install(self, spec, prefix):
- configure("--prefix=%s" % prefix)
- make()
- make("install")
+ version('3.2.1', '83b89587607e3eb65c70d361f13bab43',
+ url="ftp://sourceware.org/pub/libffi/libffi-3.2.1.tar.gz")
+ # version('3.1', 'f5898b29bbfd70502831a212d9249d10',url =
+ # "ftp://sourceware.org/pub/libffi/libffi-3.1.tar.gz") # Has a bug
+ # $(lib64) instead of ${lib64} in libffi.pc
diff --git a/var/spack/repos/builtin/packages/libfontenc/package.py b/var/spack/repos/builtin/packages/libfontenc/package.py
new file mode 100644
index 0000000000..1c7fa196d9
--- /dev/null
+++ b/var/spack/repos/builtin/packages/libfontenc/package.py
@@ -0,0 +1,46 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class Libfontenc(Package):
+ """libfontenc - font encoding library."""
+
+ homepage = "http://cgit.freedesktop.org/xorg/lib/libfontenc"
+ url = "https://www.x.org/archive/individual/lib/libfontenc-1.1.3.tar.gz"
+
+ version('1.1.3', '0ffa28542aa7d246299b1f7211cdb768')
+
+ depends_on('zlib')
+
+ depends_on('xproto', type='build')
+ depends_on('pkg-config@0.9.0:', type='build')
+ depends_on('util-macros', type='build')
+
+ def install(self, spec, prefix):
+ configure('--prefix={0}'.format(prefix))
+
+ make()
+ make('install')
diff --git a/var/spack/repos/builtin/packages/libfs/package.py b/var/spack/repos/builtin/packages/libfs/package.py
new file mode 100644
index 0000000000..96bf62afd7
--- /dev/null
+++ b/var/spack/repos/builtin/packages/libfs/package.py
@@ -0,0 +1,49 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class Libfs(Package):
+ """libFS - X Font Service client library.
+
+ This library is used by clients of X Font Servers (xfs), such as
+ xfsinfo, fslsfonts, and the X servers themselves."""
+
+ homepage = "http://cgit.freedesktop.org/xorg/lib/libFS"
+ url = "https://www.x.org/archive/individual/lib/libFS-1.0.7.tar.gz"
+
+ version('1.0.7', 'd8c1246f5b3d0e7ccf2190d3bf2ecb73')
+
+ depends_on('xproto@7.0.17:', type='build')
+ depends_on('fontsproto', type='build')
+ depends_on('xtrans', type='build')
+ depends_on('pkg-config@0.9.0:', type='build')
+ depends_on('util-macros', type='build')
+
+ def install(self, spec, prefix):
+ configure('--prefix={0}'.format(prefix))
+
+ make()
+ make('install')
diff --git a/var/spack/repos/builtin/packages/libgcrypt/package.py b/var/spack/repos/builtin/packages/libgcrypt/package.py
index b1a316cc1b..b556def4d3 100644
--- a/var/spack/repos/builtin/packages/libgcrypt/package.py
+++ b/var/spack/repos/builtin/packages/libgcrypt/package.py
@@ -24,6 +24,7 @@
##############################################################################
from spack import *
+
class Libgcrypt(Package):
"""Libgcrypt is a general purpose cryptographic library based on
the code from GnuPG. It provides functions for all cryptographic
diff --git a/var/spack/repos/builtin/packages/libgd/package.py b/var/spack/repos/builtin/packages/libgd/package.py
index 5e4e420842..6329adf8f3 100644
--- a/var/spack/repos/builtin/packages/libgd/package.py
+++ b/var/spack/repos/builtin/packages/libgd/package.py
@@ -27,27 +27,46 @@ from spack import *
class Libgd(Package):
- """
- GD is an open source code library for the dynamic creation of images by programmers. GD is written in C, and
- "wrappers" are available for Perl, PHP and other languages. GD creates PNG, JPEG, GIF, WebP, XPM, BMP images,
- among other formats. GD is commonly used to generate charts, graphics, thumbnails, and most anything else, on the
- fly. While not restricted to use on the web, the most common applications of GD involve website development.
+ """GD is an open source code library for the dynamic creation of images
+ by programmers. GD is written in C, and "wrappers" are available
+ for Perl, PHP and other languages. GD creates PNG, JPEG, GIF,
+ WebP, XPM, BMP images, among other formats. GD is commonly used to
+ generate charts, graphics, thumbnails, and most anything else, on
+ the fly. While not restricted to use on the web, the most common
+ applications of GD involve website development.
+
"""
homepage = "https://github.com/libgd/libgd"
url = "https://github.com/libgd/libgd/archive/gd-2.1.1.tar.gz"
+ version('2.2.3', 'a67bd15fa33d4aac0a1c7904aed19f49')
version('2.1.1', 'e91a1a99903e460e7ba00a794e72cc1e')
+ # Build dependencies
+ depends_on('autoconf', type='build')
+ depends_on('automake', type='build')
+ depends_on('libtool', type='build')
+ depends_on('m4', type='build')
+ depends_on('gettext', type='build')
+ depends_on('pkg-config', type='build')
+
depends_on('libpng')
+ depends_on('libtiff')
+ depends_on('fontconfig')
def install(self, spec, prefix):
-
- with working_dir('spack-build', create=True):
- cmake('..',
- '-DENABLE_JPEG:BOOL=ON',
- '-DENABLE_PNG:BOOL=ON',
- '-DENABLE_TIFF:BOOL=ON',
- *std_cmake_args)
- make()
- make("install")
+ autoreconf("--install", "--force",
+ "-I", "m4",
+ "-I", join_path(spec['gettext'].prefix,
+ "share", "aclocal"),
+ "-I", join_path(spec['pkg-config'].prefix,
+ "share", "aclocal"),
+ "-I", join_path(spec['automake'].prefix,
+ "share", "aclocal"),
+ "-I", join_path(spec['libtool'].prefix,
+ "share", "aclocal")
+ )
+ configure('--prefix={0}'.format(prefix))
+ make()
+ make("install")
diff --git a/var/spack/repos/builtin/packages/libgpg-error/package.py b/var/spack/repos/builtin/packages/libgpg-error/package.py
index 3fe82a69e7..a0e2acd516 100644
--- a/var/spack/repos/builtin/packages/libgpg-error/package.py
+++ b/var/spack/repos/builtin/packages/libgpg-error/package.py
@@ -24,6 +24,7 @@
##############################################################################
from spack import *
+
class LibgpgError(Package):
"""Libgpg-error is a small library that defines common error
values for all GnuPG components. Among these are GPG, GPGSM,
diff --git a/var/spack/repos/builtin/packages/libgtextutils/package.py b/var/spack/repos/builtin/packages/libgtextutils/package.py
new file mode 100644
index 0000000000..201e031869
--- /dev/null
+++ b/var/spack/repos/builtin/packages/libgtextutils/package.py
@@ -0,0 +1,40 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class Libgtextutils(Package):
+ """Gordon's Text utils Library."""
+
+ homepage = "https://github.com/agordon/libgtextutils"
+ url = "https://github.com/agordon/libgtextutils/releases/download/0.7/libgtextutils-0.7.tar.gz"
+
+ version('0.7', '593c7c62e3c76ec49f5736eed4f96806')
+
+ def install(self, spec, prefix):
+ configure('--prefix={0}'.format(prefix))
+
+ make()
+ make('install')
diff --git a/var/spack/repos/builtin/packages/libhio/package.py b/var/spack/repos/builtin/packages/libhio/package.py
new file mode 100644
index 0000000000..17bd86d310
--- /dev/null
+++ b/var/spack/repos/builtin/packages/libhio/package.py
@@ -0,0 +1,45 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class Libhio(Package):
+ """
+ A library for writing to hierarchical data store systems.
+ """
+
+ homepage = "https://github.com/hpc/libhio/"
+ url = "https://github.com/hpc/libhio/releases/download/hio.1.3.0.1/libhio-1.3.0.1.tar.gz"
+
+ version('1.3.0.1', 'c073541de8dd70aeb8878bd00d6d877f')
+
+ depends_on("libjson-c")
+ depends_on("bzip2")
+ depends_on("pkg-config", type="build")
+
+ def install(self, spec, prefix):
+ configure('--prefix=%s' % prefix)
+ make()
+ make("install")
diff --git a/var/spack/repos/builtin/packages/libice/package.py b/var/spack/repos/builtin/packages/libice/package.py
new file mode 100644
index 0000000000..1f6fd2f901
--- /dev/null
+++ b/var/spack/repos/builtin/packages/libice/package.py
@@ -0,0 +1,45 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class Libice(Package):
+ """libICE - Inter-Client Exchange Library."""
+
+ homepage = "http://cgit.freedesktop.org/xorg/lib/libICE"
+ url = "https://www.x.org/archive/individual/lib/libICE-1.0.9.tar.gz"
+
+ version('1.0.9', '95812d61df8139c7cacc1325a26d5e37')
+
+ depends_on('xproto', type='build')
+ depends_on('xtrans', type='build')
+ depends_on('pkg-config@0.9.0:', type='build')
+ depends_on('util-macros', type='build')
+
+ def install(self, spec, prefix):
+ configure('--prefix={0}'.format(prefix))
+
+ make()
+ make('install')
diff --git a/var/spack/repos/builtin/packages/libiconv/gets.patch b/var/spack/repos/builtin/packages/libiconv/gets.patch
new file mode 100644
index 0000000000..7c4b5562ec
--- /dev/null
+++ b/var/spack/repos/builtin/packages/libiconv/gets.patch
@@ -0,0 +1,13 @@
+--- a/srclib/stdio.in.h
++++ b/srclib/stdio.in.h
+@@ -692,10 +692,6 @@
+ # undef gets
+ # endif
+ _GL_CXXALIASWARN (gets);
+-/* It is very rare that the developer ever has full control of stdin,
+- so any use of gets warrants an unconditional warning. Assume it is
+- always declared, since it is required by C89. */
+-_GL_WARN_ON_USE (gets, "gets is a security hole - use fgets instead");
+ #endif
+
+
diff --git a/var/spack/repos/builtin/packages/libiconv/package.py b/var/spack/repos/builtin/packages/libiconv/package.py
new file mode 100644
index 0000000000..72f67ec80d
--- /dev/null
+++ b/var/spack/repos/builtin/packages/libiconv/package.py
@@ -0,0 +1,48 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+import shutil
+
+
+class Libiconv(AutotoolsPackage):
+ """GNU libiconv provides an implementation of the iconv() function
+ and the iconv program for character set conversion."""
+
+ homepage = "https://www.gnu.org/software/libiconv/"
+ url = "http://ftp.gnu.org/pub/gnu/libiconv/libiconv-1.14.tar.gz"
+
+ version('1.14', 'e34509b1623cec449dfeb73d7ce9c6c6')
+
+ # We cannot set up a warning for gets(), since gets() is not part
+ # of C11 any more and thus might not exist.
+ patch("gets.patch")
+
+ def configure_args(self):
+ args = ['--enable-extra-encodings']
+
+ # A hack to patch config.guess in the libcharset sub directory
+ shutil.copyfile('./build-aux/config.guess',
+ 'libcharset/build-aux/config.guess')
+ return args
diff --git a/var/spack/repos/builtin/packages/libint/package.py b/var/spack/repos/builtin/packages/libint/package.py
new file mode 100644
index 0000000000..2ad5e93191
--- /dev/null
+++ b/var/spack/repos/builtin/packages/libint/package.py
@@ -0,0 +1,104 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class Libint(Package):
+ """Libint is a high-performance library for computing
+ Gaussian integrals in quantum mechanics."""
+
+ homepage = "https://github.com/evaleev/libint"
+ url = "https://github.com/evaleev/libint/archive/v2.1.0.tar.gz"
+
+ version('2.1.0', 'd0dcb985fe32ddebc78fe571ce37e2d6')
+ version('1.1.6', '990f67b55f49ecc18f32c58da9240684')
+ version('1.1.5', '379b7d0718ff398715d6898807adf628')
+
+ # Build dependencies
+ depends_on('autoconf@2.52:', type='build')
+ depends_on('automake', type='build')
+ depends_on('libtool', type='build')
+
+ # Libint 2 dependencies
+ depends_on('boost', when='@2:')
+ depends_on('gmp', when='@2:')
+
+ def url_for_version(self, version):
+ base_url = "https://github.com/evaleev/libint/archive"
+ if version == Version('1.0.0'):
+ return "{0}/LIBINT_1_00.tar.gz".format(base_url)
+ elif version < Version('2.1.0'):
+ return "{0}/release-{1}.tar.gz".format(base_url, version.dashed)
+ else:
+ return "{0}/v{1}.tar.gz".format(base_url, version)
+
+ def install(self, spec, prefix):
+ # Generate configure
+ libtoolize()
+ aclocal('-I', 'lib/autoconf')
+ autoconf()
+
+ config_args = [
+ '--prefix={0}'.format(prefix),
+ '--enable-shared'
+ ]
+
+ # Optimizations for the Intel compiler, suggested by CP2K
+ optflags = '-O2'
+ if self.compiler.name == 'intel':
+ optflags += ' -xAVX -axCORE-AVX2 -ipo'
+ if which('xiar'):
+ env['AR'] = 'xiar'
+
+ env['CFLAGS'] = optflags
+ env['CXXFLAGS'] = optflags
+
+ # Optimization flag names have changed in libint 2
+ if self.version < Version('2.0.0'):
+ config_args.extend([
+ '--with-cc-optflags={0}'.format(optflags),
+ '--with-cxx-optflags={0}'.format(optflags)
+ ])
+ else:
+ config_args.extend([
+ '--with-cxx-optflags={0}'.format(optflags),
+ '--with-cxxgen-optflags={0}'.format(optflags)
+ ])
+
+ # Options required by CP2K, removed in libint 2
+ if self.version < Version('2.0.0'):
+ config_args.extend([
+ '--with-libint-max-am=5',
+ '--with-libderiv-max-am1=4'
+ ])
+
+ configure(*config_args)
+ make()
+
+ # Testing suite was added in libint 2
+ if self.version >= Version('2.0.0'):
+ make('check')
+
+ make('install')
diff --git a/var/spack/repos/builtin/packages/libjpeg-turbo/package.py b/var/spack/repos/builtin/packages/libjpeg-turbo/package.py
index 8615b2ed83..3fe159d7b9 100644
--- a/var/spack/repos/builtin/packages/libjpeg-turbo/package.py
+++ b/var/spack/repos/builtin/packages/libjpeg-turbo/package.py
@@ -24,21 +24,27 @@
##############################################################################
from spack import *
+
class LibjpegTurbo(Package):
- """libjpeg-turbo is a fork of the original IJG libjpeg which uses
- SIMD to accelerate baseline JPEG compression and
- decompression. libjpeg is a library that implements JPEG image
- encoding, decoding and transcoding."""
+ """libjpeg-turbo is a fork of the original IJG libjpeg which uses SIMD to
+ accelerate baseline JPEG compression and decompression. libjpeg is a
+ library that implements JPEG image encoding, decoding and
+ transcoding."""
+
homepage = "http://libjpeg-turbo.virtualgl.org"
url = "http://downloads.sourceforge.net/libjpeg-turbo/libjpeg-turbo-1.3.1.tar.gz"
+ version('1.5.0', '3fc5d9b6a8bce96161659ae7a9939257')
version('1.3.1', '2c3a68129dac443a72815ff5bb374b05')
- # Can use either of these.
- depends_on("yasm")
- depends_on("nasm")
+ # Can use either of these. But in the current version of the package
+ # only nasm is used. In order to use yasm an environmental variable
+ # NASM must be set.
+ # TODO: Implement the selection between two supported assemblers.
+ # depends_on("yasm", type='build')
+ depends_on("nasm", type='build')
def install(self, spec, prefix):
- configure("--prefix=%s" % prefix)
+ configure("--prefix=" + prefix)
make()
make("install")
diff --git a/var/spack/repos/builtin/packages/libjson-c/package.py b/var/spack/repos/builtin/packages/libjson-c/package.py
index 75f3e130ad..561c8ab71f 100644
--- a/var/spack/repos/builtin/packages/libjson-c/package.py
+++ b/var/spack/repos/builtin/packages/libjson-c/package.py
@@ -24,15 +24,12 @@
##############################################################################
from spack import *
-class LibjsonC(Package):
+
+class LibjsonC(AutotoolsPackage):
""" A JSON implementation in C """
homepage = "https://github.com/json-c/json-c/wiki"
url = "https://s3.amazonaws.com/json-c_releases/releases/json-c-0.11.tar.gz"
- version('0.11', 'aa02367d2f7a830bf1e3376f77881e98')
-
- def install(self, spec, prefix):
- configure('--prefix=%s' % prefix)
+ parallel = False
- make()
- make("install")
+ version('0.11', 'aa02367d2f7a830bf1e3376f77881e98')
diff --git a/var/spack/repos/builtin/packages/liblbxutil/package.py b/var/spack/repos/builtin/packages/liblbxutil/package.py
new file mode 100644
index 0000000000..1de59defd6
--- /dev/null
+++ b/var/spack/repos/builtin/packages/liblbxutil/package.py
@@ -0,0 +1,51 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class Liblbxutil(Package):
+ """liblbxutil - Low Bandwith X extension (LBX) utility routines."""
+
+ homepage = "http://cgit.freedesktop.org/xorg/lib/liblbxutil"
+ url = "https://www.x.org/archive/individual/lib/liblbxutil-1.1.0.tar.gz"
+
+ version('1.1.0', '2735cd23625d4cc870ec4eb7ca272788')
+
+ depends_on('xextproto@7.0.99.1:', type='build')
+ depends_on('xproto', type='build')
+ depends_on('pkg-config@0.9.0:', type='build')
+ depends_on('util-macros', type='build')
+
+ # There is a bug in the library that causes the following messages:
+ # undefined symbol: Xfree
+ # undefined symbol: Xalloc
+ # See https://bugs.freedesktop.org/show_bug.cgi?id=8421
+ # Adding a dependency on libxdmcp and adding LIBS=-lXdmcp did not fix it
+
+ def install(self, spec, prefix):
+ configure('--prefix={0}'.format(prefix))
+
+ make()
+ make('install')
diff --git a/var/spack/repos/builtin/packages/libmesh/package.py b/var/spack/repos/builtin/packages/libmesh/package.py
new file mode 100644
index 0000000000..6ceef8dbf6
--- /dev/null
+++ b/var/spack/repos/builtin/packages/libmesh/package.py
@@ -0,0 +1,56 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class Libmesh(Package):
+ """The libMesh library provides a framework for the numerical simulation of
+ partial differential equations using arbitrary unstructured
+ discretizations on serial and parallel platforms."""
+
+ homepage = "http://libmesh.github.io/"
+ url = "https://github.com/libMesh/libmesh/releases/download/v1.0.0/libmesh-1.0.0.tar.bz2"
+
+ version('1.0.0', 'cb464fc63ea0b71b1e69fa3f5d4f93a4')
+
+ variant('mpi', default=True, description='Enables MPI parallelism')
+
+ depends_on('mpi', when='+mpi')
+
+ # Parallel version of libmesh needs parallel solvers
+ depends_on('petsc+mpi', when='+mpi')
+
+ def install(self, spec, prefix):
+ config_args = ["--prefix=%s" % prefix]
+
+ if '+mpi' in spec:
+ config_args.append('CC=%s' % spec['mpi'].mpicc)
+ config_args.append('CXX=%s' % spec['mpi'].mpicxx)
+ config_args.append('PETSC_DIR=%s' % spec['petsc'].prefix)
+
+ configure(*config_args)
+
+ make()
+ make('install')
diff --git a/var/spack/repos/builtin/packages/libmng/package.py b/var/spack/repos/builtin/packages/libmng/package.py
index dd0903c4bd..a77aada79c 100644
--- a/var/spack/repos/builtin/packages/libmng/package.py
+++ b/var/spack/repos/builtin/packages/libmng/package.py
@@ -24,6 +24,7 @@
##############################################################################
from spack import *
+
class Libmng(Package):
"""libmng -THE reference library for reading, displaying, writing
and examining Multiple-Image Network Graphics. MNG is the animation
@@ -39,7 +40,8 @@ class Libmng(Package):
def patch(self):
# jpeg requires stdio to beincluded before its headrs.
- filter_file(r'^(\#include \<jpeglib\.h\>)', '#include<stdio.h>\n\\1', 'libmng_types.h')
+ filter_file(r'^(\#include \<jpeglib\.h\>)',
+ '#include<stdio.h>\n\\1', 'libmng_types.h')
def install(self, spec, prefix):
configure("--prefix=%s" % prefix)
diff --git a/var/spack/repos/builtin/packages/libmonitor/package.py b/var/spack/repos/builtin/packages/libmonitor/package.py
index 883d8af405..f680baa265 100644
--- a/var/spack/repos/builtin/packages/libmonitor/package.py
+++ b/var/spack/repos/builtin/packages/libmonitor/package.py
@@ -24,19 +24,19 @@
##############################################################################
from spack import *
+
class Libmonitor(Package):
"""Libmonitor is a library for process and thread control."""
- homepage = "http://hpctoolkit.org"
-
- version('20130218', svn='http://libmonitor.googlecode.com/svn/trunk/', revision=146)
- variant('krellpatch', default=False, description="build with openspeedshop based patch.")
-
+ homepage = "https://github.com/HPCToolkit/libmonitor"
+ version('20130218', git='https://github.com/HPCToolkit/libmonitor.git',
+ commit='4f2311e')
+ variant('krellpatch', default=False,
+ description="build with openspeedshop based patch.")
patch('libmonitorkrell-0000.patch', when='@20130218+krellpatch')
patch('libmonitorkrell-0001.patch', when='@20130218+krellpatch')
patch('libmonitorkrell-0002.patch', when='@20130218+krellpatch')
-
def install(self, spec, prefix):
configure("--prefix=" + prefix)
make()
diff --git a/var/spack/repos/builtin/packages/libNBC/package.py b/var/spack/repos/builtin/packages/libnbc/package.py
index ed1d0ce96f..414498a37a 100644
--- a/var/spack/repos/builtin/packages/libNBC/package.py
+++ b/var/spack/repos/builtin/packages/libnbc/package.py
@@ -24,6 +24,7 @@
##############################################################################
from spack import *
+
class Libnbc(Package):
"""LibNBC is a prototypic implementation of a nonblocking
interface for MPI collective operations. Based on ANSI C and
diff --git a/var/spack/repos/builtin/packages/liboldx/package.py b/var/spack/repos/builtin/packages/liboldx/package.py
new file mode 100644
index 0000000000..9e85f1ed65
--- /dev/null
+++ b/var/spack/repos/builtin/packages/liboldx/package.py
@@ -0,0 +1,45 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class Liboldx(Package):
+ """X version 10 backwards compatibility."""
+
+ homepage = "https://cgit.freedesktop.org/xorg/lib/liboldX/"
+ url = "https://www.x.org/archive/individual/lib/liboldX-1.0.1.tar.gz"
+
+ version('1.0.1', 'ea7c4b6a19bf2d04100e2580abf83fae')
+
+ depends_on('libx11')
+
+ depends_on('pkg-config@0.9.0:', type='build')
+ depends_on('util-macros', type='build')
+
+ def install(self, spec, prefix):
+ configure('--prefix={0}'.format(prefix))
+
+ make()
+ make('install')
diff --git a/var/spack/repos/builtin/packages/libpciaccess/package.py b/var/spack/repos/builtin/packages/libpciaccess/package.py
index 5d1e93eab7..f4a3cf7157 100644
--- a/var/spack/repos/builtin/packages/libpciaccess/package.py
+++ b/var/spack/repos/builtin/packages/libpciaccess/package.py
@@ -23,25 +23,28 @@
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
from spack import *
-import os.path
+
class Libpciaccess(Package):
"""Generic PCI access library."""
homepage = "http://cgit.freedesktop.org/xorg/lib/libpciaccess/"
- url = "http://xorg.freedesktop.org/archive/individual/lib/libpciaccess-0.13.4.tar.bz2"
+ url = "http://xorg.freedesktop.org/archive/individual/lib/libpciaccess-0.13.4.tar.gz"
- version('0.13.4', 'ace78aec799b1cf6dfaea55d3879ed9f')
+ version('0.13.4', 'cc1fad87da60682af1d5fa43a5da45a4')
- depends_on('libtool')
+ depends_on('libtool', type='build')
+ depends_on('pkg-config@0.9.0:', type='build')
+ depends_on('util-macros', type='build')
def install(self, spec, prefix):
# libpciaccess does not support OS X
- if spec.satisfies('=darwin-x86_64'):
+ if spec.satisfies('platform=darwin'):
# create a dummy directory
mkdir(prefix.lib)
return
- configure("--prefix=%s" % prefix)
+ configure('--prefix={0}'.format(prefix))
+
make()
- make("install")
+ make('install')
diff --git a/var/spack/repos/builtin/packages/libpng/package.py b/var/spack/repos/builtin/packages/libpng/package.py
index 951b91eabd..46bbcebb89 100644
--- a/var/spack/repos/builtin/packages/libpng/package.py
+++ b/var/spack/repos/builtin/packages/libpng/package.py
@@ -24,21 +24,27 @@
##############################################################################
from spack import *
-class Libpng(Package):
- """libpng graphics file format"""
+
+class Libpng(AutotoolsPackage):
+ """libpng is the official PNG reference library."""
+
homepage = "http://www.libpng.org/pub/png/libpng.html"
- url = "http://download.sourceforge.net/libpng/libpng-1.6.16.tar.gz"
+ url = "http://download.sourceforge.net/libpng/libpng-1.6.27.tar.gz"
- version('1.6.16', '1a4ad377919ab15b54f6cb6a3ae2622d')
- version('1.6.15', '829a256f3de9307731d4f52dc071916d')
- version('1.6.14', '2101b3de1d5f348925990f9aa8405660')
- version('1.5.26', '3ca98347a5541a2dad55cd6d07ee60a9')
- version('1.4.19', '89bcbc4fc8b31f4a403906cf4f662330')
- version('1.2.56', '9508fc59d10a1ffadd9aae35116c19ee')
+ version('1.6.27', '58698519e9f6126c1caeefc28dbcbd5f')
+ # From http://www.libpng.org/pub/png/libpng.html (2017-01-04)
+ # Virtually all libpng versions through 1.6.26, 1.5.27,
+ # 1.4.19, 1.2.56, and 1.0.66, respectively, have a
+ # null-pointer-dereference bug in png_set_text_2() when an
+ # image-editing application adds, removes, and re-adds text
+ # chunks to a PNG image. (This bug does not affect pure
+ # viewers, nor are there any known editors that could trigger
+ # it without interactive user input. It has been assigned ID
+ # CVE-2016-10087.) The vulnerability is fixed in versions
+ # 1.6.27, 1.5.28, 1.4.20, 1.2.57, and 1.0.67, released on 29
+ # December 2016.
- depends_on('zlib')
+ # Required for qt@3
+ version('1.2.57', 'dfcda3603e29dcc11870c48f838ef75b')
- def install(self, spec, prefix):
- configure("--prefix=%s" % prefix)
- make()
- make("install")
+ depends_on('zlib@1.0.4:') # 1.2.5 or later recommended
diff --git a/var/spack/repos/builtin/packages/libpthread-stubs/package.py b/var/spack/repos/builtin/packages/libpthread-stubs/package.py
new file mode 100644
index 0000000000..fdaf327c2a
--- /dev/null
+++ b/var/spack/repos/builtin/packages/libpthread-stubs/package.py
@@ -0,0 +1,40 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class LibpthreadStubs(Package):
+ """The libpthread-stubs package provides weak aliases for pthread
+ functions not provided in libc or otherwise available by default."""
+
+ homepage = "https://xcb.freedesktop.org/"
+ url = "https://xcb.freedesktop.org/dist/libpthread-stubs-0.3.tar.gz"
+
+ version('0.3', 'a09d928c4af54fe5436002345ef71138')
+
+ def install(self, spec, prefix):
+ configure('--prefix={0}'.format(prefix))
+
+ make('install')
diff --git a/var/spack/repos/builtin/packages/libquo/package.py b/var/spack/repos/builtin/packages/libquo/package.py
new file mode 100644
index 0000000000..8856913bb2
--- /dev/null
+++ b/var/spack/repos/builtin/packages/libquo/package.py
@@ -0,0 +1,68 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+import os
+
+
+class Libquo(Package):
+ """A high-level, easy to use programming interface tailored specifically
+ for MPI/MPI+X codes that may benefit from evolving process binding
+ policies during their execution. QUO allows for arbitrary process binding
+ policies to be enacted and reverted during the execution of an MPI/MPI+X
+ application as different computational phases are entered and exited,
+ respectively."""
+
+ homepage = "https://github.com/losalamos/libquo"
+ url = "https://github.com/losalamos/libquo/archive/v1.2.9.tar.gz"
+
+ version('1.2.9', 'ca82ab33f13e2b89983f81e7c02e98c2')
+
+ depends_on('mpi')
+ depends_on('autoconf', type='build')
+ depends_on('automake', type='build')
+ depends_on('libtool', type='build')
+
+ def install(self, spec, prefix):
+ autoreconf_options = [
+ '--install',
+ '--verbose',
+ '--force',
+ '-I', 'config',
+ '-I', os.path.join(spec['automake'].prefix,
+ 'share', 'aclocal'),
+ '-I', os.path.join(spec['libtool'].prefix,
+ 'share', 'aclocal')
+ ]
+ autoreconf(*autoreconf_options)
+
+ configure_options = [
+ '--prefix={0}'.format(prefix),
+ 'CC=%s' % join_path(spec['mpi'].prefix.bin, "mpicc"),
+ 'FC=%s' % join_path(spec['mpi'].prefix.bin, "mpif90")
+ ]
+ configure(*configure_options)
+
+ make()
+ make('install')
diff --git a/var/spack/repos/builtin/packages/libsigsegv/package.py b/var/spack/repos/builtin/packages/libsigsegv/package.py
index 14acdcbcd2..a5a3e8eb5f 100644
--- a/var/spack/repos/builtin/packages/libsigsegv/package.py
+++ b/var/spack/repos/builtin/packages/libsigsegv/package.py
@@ -24,16 +24,16 @@
##############################################################################
from spack import *
-class Libsigsegv(Package):
+
+class Libsigsegv(AutotoolsPackage):
"""GNU libsigsegv is a library for handling page faults in user mode."""
+
homepage = "https://www.gnu.org/software/libsigsegv/"
url = "ftp://ftp.gnu.org/gnu/libsigsegv/libsigsegv-2.10.tar.gz"
- version('2.10', '7f96fb1f65b3b8cbc1582fb7be774f0f')
+ patch('patch.new_config_guess', when='@2.10')
- def install(self, spec, prefix):
- configure('--prefix=%s' % prefix,
- '--enable-shared')
+ version('2.10', '7f96fb1f65b3b8cbc1582fb7be774f0f')
- make()
- make("install")
+ def configure_args(self):
+ return ['--enable-shared']
diff --git a/var/spack/repos/builtin/packages/libsigsegv/patch.new_config_guess b/var/spack/repos/builtin/packages/libsigsegv/patch.new_config_guess
new file mode 100644
index 0000000000..3ecc7458f7
--- /dev/null
+++ b/var/spack/repos/builtin/packages/libsigsegv/patch.new_config_guess
@@ -0,0 +1,1187 @@
+--- a/build-aux/config.guess 2010-11-06 04:18:40.000000000 -0700
++++ b/build-aux/config.guess 2016-10-18 13:38:39.950379000 -0700
+@@ -1,14 +1,12 @@
+ #! /bin/sh
+ # Attempt to guess a canonical system name.
+-# Copyright (C) 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
+-# 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010
+-# Free Software Foundation, Inc.
++# Copyright 1992-2016 Free Software Foundation, Inc.
+
+-timestamp='2010-09-24'
++timestamp='2016-10-02'
+
+ # This file is free software; you can redistribute it and/or modify it
+ # under the terms of the GNU General Public License as published by
+-# the Free Software Foundation; either version 2 of the License, or
++# the Free Software Foundation; either version 3 of the License, or
+ # (at your option) any later version.
+ #
+ # This program is distributed in the hope that it will be useful, but
+@@ -17,26 +15,22 @@
+ # General Public License for more details.
+ #
+ # You should have received a copy of the GNU General Public License
+-# along with this program; if not, write to the Free Software
+-# Foundation, Inc., 51 Franklin Street - Fifth Floor, Boston, MA
+-# 02110-1301, USA.
++# along with this program; if not, see <http://www.gnu.org/licenses/>.
+ #
+ # As a special exception to the GNU General Public License, if you
+ # distribute this file as part of a program that contains a
+ # configuration script generated by Autoconf, you may include it under
+-# the same distribution terms that you use for the rest of that program.
+-
+-
+-# Originally written by Per Bothner. Please send patches (context
+-# diff format) to <config-patches@gnu.org> and include a ChangeLog
+-# entry.
++# the same distribution terms that you use for the rest of that
++# program. This Exception is an additional permission under section 7
++# of the GNU General Public License, version 3 ("GPLv3").
+ #
+-# This script attempts to guess a canonical system name similar to
+-# config.sub. If it succeeds, it prints the system name on stdout, and
+-# exits with 0. Otherwise, it exits with 1.
++# Originally written by Per Bothner; maintained since 2000 by Ben Elliston.
+ #
+ # You can get the latest version of this script from:
+-# http://git.savannah.gnu.org/gitweb/?p=config.git;a=blob_plain;f=config.guess;hb=HEAD
++# http://git.savannah.gnu.org/gitweb/?p=config.git;a=blob_plain;f=config.guess
++#
++# Please send patches to <config-patches@gnu.org>.
++
+
+ me=`echo "$0" | sed -e 's,.*/,,'`
+
+@@ -56,9 +50,7 @@
+ GNU config.guess ($timestamp)
+
+ Originally written by Per Bothner.
+-Copyright (C) 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999, 2000,
+-2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010 Free
+-Software Foundation, Inc.
++Copyright 1992-2016 Free Software Foundation, Inc.
+
+ This is free software; see the source for copying conditions. There is NO
+ warranty; not even for MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE."
+@@ -92,7 +84,7 @@
+ exit 1
+ fi
+
+-trap 'exit 1' HUP INT TERM
++trap 'exit 1' 1 2 15
+
+ # CC_FOR_BUILD -- compiler used by this script. Note that the use of a
+ # compiler to aid in system detection is discouraged as it requires
+@@ -106,7 +98,7 @@
+
+ set_cc_for_build='
+ trap "exitcode=\$?; (rm -f \$tmpfiles 2>/dev/null; rmdir \$tmp 2>/dev/null) && exit \$exitcode" 0 ;
+-trap "rm -f \$tmpfiles 2>/dev/null; rmdir \$tmp 2>/dev/null; exit 1" HUP INT PIPE TERM ;
++trap "rm -f \$tmpfiles 2>/dev/null; rmdir \$tmp 2>/dev/null; exit 1" 1 2 13 15 ;
+ : ${TMPDIR=/tmp} ;
+ { tmp=`(umask 077 && mktemp -d "$TMPDIR/cgXXXXXX") 2>/dev/null` && test -n "$tmp" && test -d "$tmp" ; } ||
+ { test -n "$RANDOM" && tmp=$TMPDIR/cg$$-$RANDOM && (umask 077 && mkdir $tmp) ; } ||
+@@ -140,12 +132,33 @@
+ UNAME_SYSTEM=`(uname -s) 2>/dev/null` || UNAME_SYSTEM=unknown
+ UNAME_VERSION=`(uname -v) 2>/dev/null` || UNAME_VERSION=unknown
+
++case "${UNAME_SYSTEM}" in
++Linux|GNU|GNU/*)
++ # If the system lacks a compiler, then just pick glibc.
++ # We could probably try harder.
++ LIBC=gnu
++
++ eval $set_cc_for_build
++ cat <<-EOF > $dummy.c
++ #include <features.h>
++ #if defined(__UCLIBC__)
++ LIBC=uclibc
++ #elif defined(__dietlibc__)
++ LIBC=dietlibc
++ #else
++ LIBC=gnu
++ #endif
++ EOF
++ eval `$CC_FOR_BUILD -E $dummy.c 2>/dev/null | grep '^LIBC' | sed 's, ,,g'`
++ ;;
++esac
++
+ # Note: order is significant - the case branches are not exclusive.
+
+ case "${UNAME_MACHINE}:${UNAME_SYSTEM}:${UNAME_RELEASE}:${UNAME_VERSION}" in
+ *:NetBSD:*:*)
+ # NetBSD (nbsd) targets should (where applicable) match one or
+- # more of the tupples: *-*-netbsdelf*, *-*-netbsdaout*,
++ # more of the tuples: *-*-netbsdelf*, *-*-netbsdaout*,
+ # *-*-netbsdecoff* and *-*-netbsd*. For targets that recently
+ # switched to ELF, *-*-netbsd* would select the old
+ # object file format. This provides both forward
+@@ -155,19 +168,29 @@
+ # Note: NetBSD doesn't particularly care about the vendor
+ # portion of the name. We always set it to "unknown".
+ sysctl="sysctl -n hw.machine_arch"
+- UNAME_MACHINE_ARCH=`(/sbin/$sysctl 2>/dev/null || \
+- /usr/sbin/$sysctl 2>/dev/null || echo unknown)`
++ UNAME_MACHINE_ARCH=`(uname -p 2>/dev/null || \
++ /sbin/$sysctl 2>/dev/null || \
++ /usr/sbin/$sysctl 2>/dev/null || \
++ echo unknown)`
+ case "${UNAME_MACHINE_ARCH}" in
+ armeb) machine=armeb-unknown ;;
+ arm*) machine=arm-unknown ;;
+ sh3el) machine=shl-unknown ;;
+ sh3eb) machine=sh-unknown ;;
+ sh5el) machine=sh5le-unknown ;;
++ earmv*)
++ arch=`echo ${UNAME_MACHINE_ARCH} | sed -e 's,^e\(armv[0-9]\).*$,\1,'`
++ endian=`echo ${UNAME_MACHINE_ARCH} | sed -ne 's,^.*\(eb\)$,\1,p'`
++ machine=${arch}${endian}-unknown
++ ;;
+ *) machine=${UNAME_MACHINE_ARCH}-unknown ;;
+ esac
+ # The Operating System including object format, if it has switched
+- # to ELF recently, or will in the future.
++ # to ELF recently (or will in the future) and ABI.
+ case "${UNAME_MACHINE_ARCH}" in
++ earm*)
++ os=netbsdelf
++ ;;
+ arm*|i386|m68k|ns32k|sh3*|sparc|vax)
+ eval $set_cc_for_build
+ if echo __ELF__ | $CC_FOR_BUILD -E - 2>/dev/null \
+@@ -181,7 +204,14 @@
+ fi
+ ;;
+ *)
+- os=netbsd
++ os=netbsd
++ ;;
++ esac
++ # Determine ABI tags.
++ case "${UNAME_MACHINE_ARCH}" in
++ earm*)
++ expr='s/^earmv[0-9]/-eabi/;s/eb$//'
++ abi=`echo ${UNAME_MACHINE_ARCH} | sed -e "$expr"`
+ ;;
+ esac
+ # The OS release
+@@ -194,18 +224,26 @@
+ release='-gnu'
+ ;;
+ *)
+- release=`echo ${UNAME_RELEASE}|sed -e 's/[-_].*/\./'`
++ release=`echo ${UNAME_RELEASE} | sed -e 's/[-_].*//' | cut -d. -f1,2`
+ ;;
+ esac
+ # Since CPU_TYPE-MANUFACTURER-KERNEL-OPERATING_SYSTEM:
+ # contains redundant information, the shorter form:
+ # CPU_TYPE-MANUFACTURER-OPERATING_SYSTEM is used.
+- echo "${machine}-${os}${release}"
++ echo "${machine}-${os}${release}${abi}"
++ exit ;;
++ *:Bitrig:*:*)
++ UNAME_MACHINE_ARCH=`arch | sed 's/Bitrig.//'`
++ echo ${UNAME_MACHINE_ARCH}-unknown-bitrig${UNAME_RELEASE}
+ exit ;;
+ *:OpenBSD:*:*)
+ UNAME_MACHINE_ARCH=`arch | sed 's/OpenBSD.//'`
+ echo ${UNAME_MACHINE_ARCH}-unknown-openbsd${UNAME_RELEASE}
+ exit ;;
++ *:LibertyBSD:*:*)
++ UNAME_MACHINE_ARCH=`arch | sed 's/^.*BSD\.//'`
++ echo ${UNAME_MACHINE_ARCH}-unknown-libertybsd${UNAME_RELEASE}
++ exit ;;
+ *:ekkoBSD:*:*)
+ echo ${UNAME_MACHINE}-unknown-ekkobsd${UNAME_RELEASE}
+ exit ;;
+@@ -218,13 +256,16 @@
+ *:MirBSD:*:*)
+ echo ${UNAME_MACHINE}-unknown-mirbsd${UNAME_RELEASE}
+ exit ;;
++ *:Sortix:*:*)
++ echo ${UNAME_MACHINE}-unknown-sortix
++ exit ;;
+ alpha:OSF1:*:*)
+ case $UNAME_RELEASE in
+ *4.0)
+ UNAME_RELEASE=`/usr/sbin/sizer -v | awk '{print $3}'`
+ ;;
+ *5.*)
+- UNAME_RELEASE=`/usr/sbin/sizer -v | awk '{print $4}'`
++ UNAME_RELEASE=`/usr/sbin/sizer -v | awk '{print $4}'`
+ ;;
+ esac
+ # According to Compaq, /usr/sbin/psrinfo has been available on
+@@ -234,43 +275,46 @@
+ ALPHA_CPU_TYPE=`/usr/sbin/psrinfo -v | sed -n -e 's/^ The alpha \(.*\) processor.*$/\1/p' | head -n 1`
+ case "$ALPHA_CPU_TYPE" in
+ "EV4 (21064)")
+- UNAME_MACHINE="alpha" ;;
++ UNAME_MACHINE=alpha ;;
+ "EV4.5 (21064)")
+- UNAME_MACHINE="alpha" ;;
++ UNAME_MACHINE=alpha ;;
+ "LCA4 (21066/21068)")
+- UNAME_MACHINE="alpha" ;;
++ UNAME_MACHINE=alpha ;;
+ "EV5 (21164)")
+- UNAME_MACHINE="alphaev5" ;;
++ UNAME_MACHINE=alphaev5 ;;
+ "EV5.6 (21164A)")
+- UNAME_MACHINE="alphaev56" ;;
++ UNAME_MACHINE=alphaev56 ;;
+ "EV5.6 (21164PC)")
+- UNAME_MACHINE="alphapca56" ;;
++ UNAME_MACHINE=alphapca56 ;;
+ "EV5.7 (21164PC)")
+- UNAME_MACHINE="alphapca57" ;;
++ UNAME_MACHINE=alphapca57 ;;
+ "EV6 (21264)")
+- UNAME_MACHINE="alphaev6" ;;
++ UNAME_MACHINE=alphaev6 ;;
+ "EV6.7 (21264A)")
+- UNAME_MACHINE="alphaev67" ;;
++ UNAME_MACHINE=alphaev67 ;;
+ "EV6.8CB (21264C)")
+- UNAME_MACHINE="alphaev68" ;;
++ UNAME_MACHINE=alphaev68 ;;
+ "EV6.8AL (21264B)")
+- UNAME_MACHINE="alphaev68" ;;
++ UNAME_MACHINE=alphaev68 ;;
+ "EV6.8CX (21264D)")
+- UNAME_MACHINE="alphaev68" ;;
++ UNAME_MACHINE=alphaev68 ;;
+ "EV6.9A (21264/EV69A)")
+- UNAME_MACHINE="alphaev69" ;;
++ UNAME_MACHINE=alphaev69 ;;
+ "EV7 (21364)")
+- UNAME_MACHINE="alphaev7" ;;
++ UNAME_MACHINE=alphaev7 ;;
+ "EV7.9 (21364A)")
+- UNAME_MACHINE="alphaev79" ;;
++ UNAME_MACHINE=alphaev79 ;;
+ esac
+ # A Pn.n version is a patched version.
+ # A Vn.n version is a released version.
+ # A Tn.n version is a released field test version.
+ # A Xn.n version is an unreleased experimental baselevel.
+ # 1.2 uses "1.2" for uname -r.
+- echo ${UNAME_MACHINE}-dec-osf`echo ${UNAME_RELEASE} | sed -e 's/^[PVTX]//' | tr 'ABCDEFGHIJKLMNOPQRSTUVWXYZ' 'abcdefghijklmnopqrstuvwxyz'`
+- exit ;;
++ echo ${UNAME_MACHINE}-dec-osf`echo ${UNAME_RELEASE} | sed -e 's/^[PVTX]//' | tr ABCDEFGHIJKLMNOPQRSTUVWXYZ abcdefghijklmnopqrstuvwxyz`
++ # Reset EXIT trap before exiting to avoid spurious non-zero exit code.
++ exitcode=$?
++ trap '' 0
++ exit $exitcode ;;
+ Alpha\ *:Windows_NT*:*)
+ # How do we know it's Interix rather than the generic POSIX subsystem?
+ # Should we change UNAME_MACHINE based on the output of uname instead
+@@ -296,12 +340,12 @@
+ echo s390-ibm-zvmoe
+ exit ;;
+ *:OS400:*:*)
+- echo powerpc-ibm-os400
++ echo powerpc-ibm-os400
+ exit ;;
+ arm:RISC*:1.[012]*:*|arm:riscix:1.[012]*:*)
+ echo arm-acorn-riscix${UNAME_RELEASE}
+ exit ;;
+- arm:riscos:*:*|arm:RISCOS:*:*)
++ arm*:riscos:*:*|arm*:RISCOS:*:*)
+ echo arm-unknown-riscos
+ exit ;;
+ SR2?01:HI-UX/MPP:*:* | SR8000:HI-UX/MPP:*:*)
+@@ -339,16 +383,16 @@
+ exit ;;
+ i86pc:SunOS:5.*:* | i86xen:SunOS:5.*:*)
+ eval $set_cc_for_build
+- SUN_ARCH="i386"
++ SUN_ARCH=i386
+ # If there is a compiler, see if it is configured for 64-bit objects.
+ # Note that the Sun cc does not turn __LP64__ into 1 like gcc does.
+ # This test works for both compilers.
+- if [ "$CC_FOR_BUILD" != 'no_compiler_found' ]; then
++ if [ "$CC_FOR_BUILD" != no_compiler_found ]; then
+ if (echo '#ifdef __amd64'; echo IS_64BIT_ARCH; echo '#endif') | \
+- (CCOPTS= $CC_FOR_BUILD -E - 2>/dev/null) | \
++ (CCOPTS="" $CC_FOR_BUILD -E - 2>/dev/null) | \
+ grep IS_64BIT_ARCH >/dev/null
+ then
+- SUN_ARCH="x86_64"
++ SUN_ARCH=x86_64
+ fi
+ fi
+ echo ${SUN_ARCH}-pc-solaris2`echo ${UNAME_RELEASE}|sed -e 's/[^.]*//'`
+@@ -373,7 +417,7 @@
+ exit ;;
+ sun*:*:4.2BSD:*)
+ UNAME_RELEASE=`(sed 1q /etc/motd | awk '{print substr($5,1,3)}') 2>/dev/null`
+- test "x${UNAME_RELEASE}" = "x" && UNAME_RELEASE=3
++ test "x${UNAME_RELEASE}" = x && UNAME_RELEASE=3
+ case "`/bin/arch`" in
+ sun3)
+ echo m68k-sun-sunos${UNAME_RELEASE}
+@@ -395,23 +439,23 @@
+ # MiNT. But MiNT is downward compatible to TOS, so this should
+ # be no problem.
+ atarist[e]:*MiNT:*:* | atarist[e]:*mint:*:* | atarist[e]:*TOS:*:*)
+- echo m68k-atari-mint${UNAME_RELEASE}
++ echo m68k-atari-mint${UNAME_RELEASE}
+ exit ;;
+ atari*:*MiNT:*:* | atari*:*mint:*:* | atarist[e]:*TOS:*:*)
+ echo m68k-atari-mint${UNAME_RELEASE}
+- exit ;;
++ exit ;;
+ *falcon*:*MiNT:*:* | *falcon*:*mint:*:* | *falcon*:*TOS:*:*)
+- echo m68k-atari-mint${UNAME_RELEASE}
++ echo m68k-atari-mint${UNAME_RELEASE}
+ exit ;;
+ milan*:*MiNT:*:* | milan*:*mint:*:* | *milan*:*TOS:*:*)
+- echo m68k-milan-mint${UNAME_RELEASE}
+- exit ;;
++ echo m68k-milan-mint${UNAME_RELEASE}
++ exit ;;
+ hades*:*MiNT:*:* | hades*:*mint:*:* | *hades*:*TOS:*:*)
+- echo m68k-hades-mint${UNAME_RELEASE}
+- exit ;;
++ echo m68k-hades-mint${UNAME_RELEASE}
++ exit ;;
+ *:*MiNT:*:* | *:*mint:*:* | *:*TOS:*:*)
+- echo m68k-unknown-mint${UNAME_RELEASE}
+- exit ;;
++ echo m68k-unknown-mint${UNAME_RELEASE}
++ exit ;;
+ m68k:machten:*:*)
+ echo m68k-apple-machten${UNAME_RELEASE}
+ exit ;;
+@@ -481,8 +525,8 @@
+ echo m88k-motorola-sysv3
+ exit ;;
+ AViiON:dgux:*:*)
+- # DG/UX returns AViiON for all architectures
+- UNAME_PROCESSOR=`/usr/bin/uname -p`
++ # DG/UX returns AViiON for all architectures
++ UNAME_PROCESSOR=`/usr/bin/uname -p`
+ if [ $UNAME_PROCESSOR = mc88100 ] || [ $UNAME_PROCESSOR = mc88110 ]
+ then
+ if [ ${TARGET_BINARY_INTERFACE}x = m88kdguxelfx ] || \
+@@ -495,7 +539,7 @@
+ else
+ echo i586-dg-dgux${UNAME_RELEASE}
+ fi
+- exit ;;
++ exit ;;
+ M88*:DolphinOS:*:*) # DolphinOS (SVR3)
+ echo m88k-dolphin-sysv3
+ exit ;;
+@@ -559,8 +603,9 @@
+ else
+ IBM_ARCH=powerpc
+ fi
+- if [ -x /usr/bin/oslevel ] ; then
+- IBM_REV=`/usr/bin/oslevel`
++ if [ -x /usr/bin/lslpp ] ; then
++ IBM_REV=`/usr/bin/lslpp -Lqc bos.rte.libc |
++ awk -F: '{ print $3 }' | sed s/[0-9]*$/0/`
+ else
+ IBM_REV=${UNAME_VERSION}.${UNAME_RELEASE}
+ fi
+@@ -595,58 +640,58 @@
+ 9000/[678][0-9][0-9])
+ if [ -x /usr/bin/getconf ]; then
+ sc_cpu_version=`/usr/bin/getconf SC_CPU_VERSION 2>/dev/null`
+- sc_kernel_bits=`/usr/bin/getconf SC_KERNEL_BITS 2>/dev/null`
+- case "${sc_cpu_version}" in
+- 523) HP_ARCH="hppa1.0" ;; # CPU_PA_RISC1_0
+- 528) HP_ARCH="hppa1.1" ;; # CPU_PA_RISC1_1
+- 532) # CPU_PA_RISC2_0
+- case "${sc_kernel_bits}" in
+- 32) HP_ARCH="hppa2.0n" ;;
+- 64) HP_ARCH="hppa2.0w" ;;
+- '') HP_ARCH="hppa2.0" ;; # HP-UX 10.20
+- esac ;;
+- esac
++ sc_kernel_bits=`/usr/bin/getconf SC_KERNEL_BITS 2>/dev/null`
++ case "${sc_cpu_version}" in
++ 523) HP_ARCH=hppa1.0 ;; # CPU_PA_RISC1_0
++ 528) HP_ARCH=hppa1.1 ;; # CPU_PA_RISC1_1
++ 532) # CPU_PA_RISC2_0
++ case "${sc_kernel_bits}" in
++ 32) HP_ARCH=hppa2.0n ;;
++ 64) HP_ARCH=hppa2.0w ;;
++ '') HP_ARCH=hppa2.0 ;; # HP-UX 10.20
++ esac ;;
++ esac
+ fi
+ if [ "${HP_ARCH}" = "" ]; then
+ eval $set_cc_for_build
+- sed 's/^ //' << EOF >$dummy.c
++ sed 's/^ //' << EOF >$dummy.c
+
+- #define _HPUX_SOURCE
+- #include <stdlib.h>
+- #include <unistd.h>
+-
+- int main ()
+- {
+- #if defined(_SC_KERNEL_BITS)
+- long bits = sysconf(_SC_KERNEL_BITS);
+- #endif
+- long cpu = sysconf (_SC_CPU_VERSION);
+-
+- switch (cpu)
+- {
+- case CPU_PA_RISC1_0: puts ("hppa1.0"); break;
+- case CPU_PA_RISC1_1: puts ("hppa1.1"); break;
+- case CPU_PA_RISC2_0:
+- #if defined(_SC_KERNEL_BITS)
+- switch (bits)
+- {
+- case 64: puts ("hppa2.0w"); break;
+- case 32: puts ("hppa2.0n"); break;
+- default: puts ("hppa2.0"); break;
+- } break;
+- #else /* !defined(_SC_KERNEL_BITS) */
+- puts ("hppa2.0"); break;
+- #endif
+- default: puts ("hppa1.0"); break;
+- }
+- exit (0);
+- }
++ #define _HPUX_SOURCE
++ #include <stdlib.h>
++ #include <unistd.h>
++
++ int main ()
++ {
++ #if defined(_SC_KERNEL_BITS)
++ long bits = sysconf(_SC_KERNEL_BITS);
++ #endif
++ long cpu = sysconf (_SC_CPU_VERSION);
++
++ switch (cpu)
++ {
++ case CPU_PA_RISC1_0: puts ("hppa1.0"); break;
++ case CPU_PA_RISC1_1: puts ("hppa1.1"); break;
++ case CPU_PA_RISC2_0:
++ #if defined(_SC_KERNEL_BITS)
++ switch (bits)
++ {
++ case 64: puts ("hppa2.0w"); break;
++ case 32: puts ("hppa2.0n"); break;
++ default: puts ("hppa2.0"); break;
++ } break;
++ #else /* !defined(_SC_KERNEL_BITS) */
++ puts ("hppa2.0"); break;
++ #endif
++ default: puts ("hppa1.0"); break;
++ }
++ exit (0);
++ }
+ EOF
+- (CCOPTS= $CC_FOR_BUILD -o $dummy $dummy.c 2>/dev/null) && HP_ARCH=`$dummy`
++ (CCOPTS="" $CC_FOR_BUILD -o $dummy $dummy.c 2>/dev/null) && HP_ARCH=`$dummy`
+ test -z "$HP_ARCH" && HP_ARCH=hppa
+ fi ;;
+ esac
+- if [ ${HP_ARCH} = "hppa2.0w" ]
++ if [ ${HP_ARCH} = hppa2.0w ]
+ then
+ eval $set_cc_for_build
+
+@@ -659,12 +704,12 @@
+ # $ CC_FOR_BUILD="cc +DA2.0w" ./config.guess
+ # => hppa64-hp-hpux11.23
+
+- if echo __LP64__ | (CCOPTS= $CC_FOR_BUILD -E - 2>/dev/null) |
++ if echo __LP64__ | (CCOPTS="" $CC_FOR_BUILD -E - 2>/dev/null) |
+ grep -q __LP64__
+ then
+- HP_ARCH="hppa2.0w"
++ HP_ARCH=hppa2.0w
+ else
+- HP_ARCH="hppa64"
++ HP_ARCH=hppa64
+ fi
+ fi
+ echo ${HP_ARCH}-hp-hpux${HPUX_REV}
+@@ -731,22 +776,22 @@
+ exit ;;
+ C1*:ConvexOS:*:* | convex:ConvexOS:C1*:*)
+ echo c1-convex-bsd
+- exit ;;
++ exit ;;
+ C2*:ConvexOS:*:* | convex:ConvexOS:C2*:*)
+ if getsysinfo -f scalar_acc
+ then echo c32-convex-bsd
+ else echo c2-convex-bsd
+ fi
+- exit ;;
++ exit ;;
+ C34*:ConvexOS:*:* | convex:ConvexOS:C34*:*)
+ echo c34-convex-bsd
+- exit ;;
++ exit ;;
+ C38*:ConvexOS:*:* | convex:ConvexOS:C38*:*)
+ echo c38-convex-bsd
+- exit ;;
++ exit ;;
+ C4*:ConvexOS:*:* | convex:ConvexOS:C4*:*)
+ echo c4-convex-bsd
+- exit ;;
++ exit ;;
+ CRAY*Y-MP:*:*:*)
+ echo ymp-cray-unicos${UNAME_RELEASE} | sed -e 's/\.[^.]*$/.X/'
+ exit ;;
+@@ -769,15 +814,15 @@
+ echo craynv-cray-unicosmp${UNAME_RELEASE} | sed -e 's/\.[^.]*$/.X/'
+ exit ;;
+ F30[01]:UNIX_System_V:*:* | F700:UNIX_System_V:*:*)
+- FUJITSU_PROC=`uname -m | tr 'ABCDEFGHIJKLMNOPQRSTUVWXYZ' 'abcdefghijklmnopqrstuvwxyz'`
+- FUJITSU_SYS=`uname -p | tr 'ABCDEFGHIJKLMNOPQRSTUVWXYZ' 'abcdefghijklmnopqrstuvwxyz' | sed -e 's/\///'`
+- FUJITSU_REL=`echo ${UNAME_RELEASE} | sed -e 's/ /_/'`
+- echo "${FUJITSU_PROC}-fujitsu-${FUJITSU_SYS}${FUJITSU_REL}"
+- exit ;;
++ FUJITSU_PROC=`uname -m | tr ABCDEFGHIJKLMNOPQRSTUVWXYZ abcdefghijklmnopqrstuvwxyz`
++ FUJITSU_SYS=`uname -p | tr ABCDEFGHIJKLMNOPQRSTUVWXYZ abcdefghijklmnopqrstuvwxyz | sed -e 's/\///'`
++ FUJITSU_REL=`echo ${UNAME_RELEASE} | sed -e 's/ /_/'`
++ echo "${FUJITSU_PROC}-fujitsu-${FUJITSU_SYS}${FUJITSU_REL}"
++ exit ;;
+ 5000:UNIX_System_V:4.*:*)
+- FUJITSU_SYS=`uname -p | tr 'ABCDEFGHIJKLMNOPQRSTUVWXYZ' 'abcdefghijklmnopqrstuvwxyz' | sed -e 's/\///'`
+- FUJITSU_REL=`echo ${UNAME_RELEASE} | tr 'ABCDEFGHIJKLMNOPQRSTUVWXYZ' 'abcdefghijklmnopqrstuvwxyz' | sed -e 's/ /_/'`
+- echo "sparc-fujitsu-${FUJITSU_SYS}${FUJITSU_REL}"
++ FUJITSU_SYS=`uname -p | tr ABCDEFGHIJKLMNOPQRSTUVWXYZ abcdefghijklmnopqrstuvwxyz | sed -e 's/\///'`
++ FUJITSU_REL=`echo ${UNAME_RELEASE} | tr ABCDEFGHIJKLMNOPQRSTUVWXYZ abcdefghijklmnopqrstuvwxyz | sed -e 's/ /_/'`
++ echo "sparc-fujitsu-${FUJITSU_SYS}${FUJITSU_REL}"
+ exit ;;
+ i*86:BSD/386:*:* | i*86:BSD/OS:*:* | *:Ascend\ Embedded/OS:*:*)
+ echo ${UNAME_MACHINE}-pc-bsdi${UNAME_RELEASE}
+@@ -789,30 +834,35 @@
+ echo ${UNAME_MACHINE}-unknown-bsdi${UNAME_RELEASE}
+ exit ;;
+ *:FreeBSD:*:*)
+- case ${UNAME_MACHINE} in
+- pc98)
+- echo i386-unknown-freebsd`echo ${UNAME_RELEASE}|sed -e 's/[-(].*//'` ;;
++ UNAME_PROCESSOR=`/usr/bin/uname -p`
++ case ${UNAME_PROCESSOR} in
+ amd64)
+ echo x86_64-unknown-freebsd`echo ${UNAME_RELEASE}|sed -e 's/[-(].*//'` ;;
+ *)
+- echo ${UNAME_MACHINE}-unknown-freebsd`echo ${UNAME_RELEASE}|sed -e 's/[-(].*//'` ;;
++ echo ${UNAME_PROCESSOR}-unknown-freebsd`echo ${UNAME_RELEASE}|sed -e 's/[-(].*//'` ;;
+ esac
+ exit ;;
+ i*:CYGWIN*:*)
+ echo ${UNAME_MACHINE}-pc-cygwin
+ exit ;;
++ *:MINGW64*:*)
++ echo ${UNAME_MACHINE}-pc-mingw64
++ exit ;;
+ *:MINGW*:*)
+ echo ${UNAME_MACHINE}-pc-mingw32
+ exit ;;
++ *:MSYS*:*)
++ echo ${UNAME_MACHINE}-pc-msys
++ exit ;;
+ i*:windows32*:*)
+- # uname -m includes "-pc" on this system.
+- echo ${UNAME_MACHINE}-mingw32
++ # uname -m includes "-pc" on this system.
++ echo ${UNAME_MACHINE}-mingw32
+ exit ;;
+ i*:PW*:*)
+ echo ${UNAME_MACHINE}-pc-pw32
+ exit ;;
+ *:Interix*:*)
+- case ${UNAME_MACHINE} in
++ case ${UNAME_MACHINE} in
+ x86)
+ echo i586-pc-interix${UNAME_RELEASE}
+ exit ;;
+@@ -849,15 +899,22 @@
+ exit ;;
+ *:GNU:*:*)
+ # the GNU system
+- echo `echo ${UNAME_MACHINE}|sed -e 's,[-/].*$,,'`-unknown-gnu`echo ${UNAME_RELEASE}|sed -e 's,/.*$,,'`
++ echo `echo ${UNAME_MACHINE}|sed -e 's,[-/].*$,,'`-unknown-${LIBC}`echo ${UNAME_RELEASE}|sed -e 's,/.*$,,'`
+ exit ;;
+ *:GNU/*:*:*)
+ # other systems with GNU libc and userland
+- echo ${UNAME_MACHINE}-unknown-`echo ${UNAME_SYSTEM} | sed 's,^[^/]*/,,' | tr '[A-Z]' '[a-z]'``echo ${UNAME_RELEASE}|sed -e 's/[-(].*//'`-gnu
++ echo ${UNAME_MACHINE}-unknown-`echo ${UNAME_SYSTEM} | sed 's,^[^/]*/,,' | tr "[:upper:]" "[:lower:]"``echo ${UNAME_RELEASE}|sed -e 's/[-(].*//'`-${LIBC}
+ exit ;;
+ i*86:Minix:*:*)
+ echo ${UNAME_MACHINE}-pc-minix
+ exit ;;
++ aarch64:Linux:*:*)
++ echo ${UNAME_MACHINE}-unknown-linux-${LIBC}
++ exit ;;
++ aarch64_be:Linux:*:*)
++ UNAME_MACHINE=aarch64_be
++ echo ${UNAME_MACHINE}-unknown-linux-${LIBC}
++ exit ;;
+ alpha:Linux:*:*)
+ case `sed -n '/^cpu model/s/^.*: \(.*\)/\1/p' < /proc/cpuinfo` in
+ EV5) UNAME_MACHINE=alphaev5 ;;
+@@ -867,52 +924,62 @@
+ EV6) UNAME_MACHINE=alphaev6 ;;
+ EV67) UNAME_MACHINE=alphaev67 ;;
+ EV68*) UNAME_MACHINE=alphaev68 ;;
+- esac
++ esac
+ objdump --private-headers /bin/sh | grep -q ld.so.1
+- if test "$?" = 0 ; then LIBC="libc1" ; else LIBC="" ; fi
+- echo ${UNAME_MACHINE}-unknown-linux-gnu${LIBC}
++ if test "$?" = 0 ; then LIBC=gnulibc1 ; fi
++ echo ${UNAME_MACHINE}-unknown-linux-${LIBC}
++ exit ;;
++ arc:Linux:*:* | arceb:Linux:*:*)
++ echo ${UNAME_MACHINE}-unknown-linux-${LIBC}
+ exit ;;
+ arm*:Linux:*:*)
+ eval $set_cc_for_build
+ if echo __ARM_EABI__ | $CC_FOR_BUILD -E - 2>/dev/null \
+ | grep -q __ARM_EABI__
+ then
+- echo ${UNAME_MACHINE}-unknown-linux-gnu
++ echo ${UNAME_MACHINE}-unknown-linux-${LIBC}
+ else
+- echo ${UNAME_MACHINE}-unknown-linux-gnueabi
++ if echo __ARM_PCS_VFP | $CC_FOR_BUILD -E - 2>/dev/null \
++ | grep -q __ARM_PCS_VFP
++ then
++ echo ${UNAME_MACHINE}-unknown-linux-${LIBC}eabi
++ else
++ echo ${UNAME_MACHINE}-unknown-linux-${LIBC}eabihf
++ fi
+ fi
+ exit ;;
+ avr32*:Linux:*:*)
+- echo ${UNAME_MACHINE}-unknown-linux-gnu
++ echo ${UNAME_MACHINE}-unknown-linux-${LIBC}
+ exit ;;
+ cris:Linux:*:*)
+- echo cris-axis-linux-gnu
++ echo ${UNAME_MACHINE}-axis-linux-${LIBC}
+ exit ;;
+ crisv32:Linux:*:*)
+- echo crisv32-axis-linux-gnu
++ echo ${UNAME_MACHINE}-axis-linux-${LIBC}
++ exit ;;
++ e2k:Linux:*:*)
++ echo ${UNAME_MACHINE}-unknown-linux-${LIBC}
+ exit ;;
+ frv:Linux:*:*)
+- echo frv-unknown-linux-gnu
++ echo ${UNAME_MACHINE}-unknown-linux-${LIBC}
++ exit ;;
++ hexagon:Linux:*:*)
++ echo ${UNAME_MACHINE}-unknown-linux-${LIBC}
+ exit ;;
+ i*86:Linux:*:*)
+- LIBC=gnu
+- eval $set_cc_for_build
+- sed 's/^ //' << EOF >$dummy.c
+- #ifdef __dietlibc__
+- LIBC=dietlibc
+- #endif
+-EOF
+- eval `$CC_FOR_BUILD -E $dummy.c 2>/dev/null | grep '^LIBC'`
+- echo "${UNAME_MACHINE}-pc-linux-${LIBC}"
++ echo ${UNAME_MACHINE}-pc-linux-${LIBC}
+ exit ;;
+ ia64:Linux:*:*)
+- echo ${UNAME_MACHINE}-unknown-linux-gnu
++ echo ${UNAME_MACHINE}-unknown-linux-${LIBC}
++ exit ;;
++ k1om:Linux:*:*)
++ echo ${UNAME_MACHINE}-unknown-linux-${LIBC}
+ exit ;;
+ m32r*:Linux:*:*)
+- echo ${UNAME_MACHINE}-unknown-linux-gnu
++ echo ${UNAME_MACHINE}-unknown-linux-${LIBC}
+ exit ;;
+ m68*:Linux:*:*)
+- echo ${UNAME_MACHINE}-unknown-linux-gnu
++ echo ${UNAME_MACHINE}-unknown-linux-${LIBC}
+ exit ;;
+ mips:Linux:*:* | mips64:Linux:*:*)
+ eval $set_cc_for_build
+@@ -931,54 +998,69 @@
+ #endif
+ EOF
+ eval `$CC_FOR_BUILD -E $dummy.c 2>/dev/null | grep '^CPU'`
+- test x"${CPU}" != x && { echo "${CPU}-unknown-linux-gnu"; exit; }
++ test x"${CPU}" != x && { echo "${CPU}-unknown-linux-${LIBC}"; exit; }
+ ;;
+- or32:Linux:*:*)
+- echo or32-unknown-linux-gnu
++ mips64el:Linux:*:*)
++ echo ${UNAME_MACHINE}-unknown-linux-${LIBC}
++ exit ;;
++ openrisc*:Linux:*:*)
++ echo or1k-unknown-linux-${LIBC}
++ exit ;;
++ or32:Linux:*:* | or1k*:Linux:*:*)
++ echo ${UNAME_MACHINE}-unknown-linux-${LIBC}
+ exit ;;
+ padre:Linux:*:*)
+- echo sparc-unknown-linux-gnu
++ echo sparc-unknown-linux-${LIBC}
+ exit ;;
+ parisc64:Linux:*:* | hppa64:Linux:*:*)
+- echo hppa64-unknown-linux-gnu
++ echo hppa64-unknown-linux-${LIBC}
+ exit ;;
+ parisc:Linux:*:* | hppa:Linux:*:*)
+ # Look for CPU level
+ case `grep '^cpu[^a-z]*:' /proc/cpuinfo 2>/dev/null | cut -d' ' -f2` in
+- PA7*) echo hppa1.1-unknown-linux-gnu ;;
+- PA8*) echo hppa2.0-unknown-linux-gnu ;;
+- *) echo hppa-unknown-linux-gnu ;;
++ PA7*) echo hppa1.1-unknown-linux-${LIBC} ;;
++ PA8*) echo hppa2.0-unknown-linux-${LIBC} ;;
++ *) echo hppa-unknown-linux-${LIBC} ;;
+ esac
+ exit ;;
+ ppc64:Linux:*:*)
+- echo powerpc64-unknown-linux-gnu
++ echo powerpc64-unknown-linux-${LIBC}
+ exit ;;
+ ppc:Linux:*:*)
+- echo powerpc-unknown-linux-gnu
++ echo powerpc-unknown-linux-${LIBC}
++ exit ;;
++ ppc64le:Linux:*:*)
++ echo powerpc64le-unknown-linux-${LIBC}
++ exit ;;
++ ppcle:Linux:*:*)
++ echo powerpcle-unknown-linux-${LIBC}
++ exit ;;
++ riscv32:Linux:*:* | riscv64:Linux:*:*)
++ echo ${UNAME_MACHINE}-unknown-linux-${LIBC}
+ exit ;;
+ s390:Linux:*:* | s390x:Linux:*:*)
+- echo ${UNAME_MACHINE}-ibm-linux
++ echo ${UNAME_MACHINE}-ibm-linux-${LIBC}
+ exit ;;
+ sh64*:Linux:*:*)
+- echo ${UNAME_MACHINE}-unknown-linux-gnu
++ echo ${UNAME_MACHINE}-unknown-linux-${LIBC}
+ exit ;;
+ sh*:Linux:*:*)
+- echo ${UNAME_MACHINE}-unknown-linux-gnu
++ echo ${UNAME_MACHINE}-unknown-linux-${LIBC}
+ exit ;;
+ sparc:Linux:*:* | sparc64:Linux:*:*)
+- echo ${UNAME_MACHINE}-unknown-linux-gnu
++ echo ${UNAME_MACHINE}-unknown-linux-${LIBC}
+ exit ;;
+ tile*:Linux:*:*)
+- echo ${UNAME_MACHINE}-tilera-linux-gnu
++ echo ${UNAME_MACHINE}-unknown-linux-${LIBC}
+ exit ;;
+ vax:Linux:*:*)
+- echo ${UNAME_MACHINE}-dec-linux-gnu
++ echo ${UNAME_MACHINE}-dec-linux-${LIBC}
+ exit ;;
+ x86_64:Linux:*:*)
+- echo x86_64-unknown-linux-gnu
++ echo ${UNAME_MACHINE}-pc-linux-${LIBC}
+ exit ;;
+ xtensa*:Linux:*:*)
+- echo ${UNAME_MACHINE}-unknown-linux-gnu
++ echo ${UNAME_MACHINE}-unknown-linux-${LIBC}
+ exit ;;
+ i*86:DYNIX/ptx:4*:*)
+ # ptx 4.0 does uname -s correctly, with DYNIX/ptx in there.
+@@ -987,11 +1069,11 @@
+ echo i386-sequent-sysv4
+ exit ;;
+ i*86:UNIX_SV:4.2MP:2.*)
+- # Unixware is an offshoot of SVR4, but it has its own version
+- # number series starting with 2...
+- # I am not positive that other SVR4 systems won't match this,
++ # Unixware is an offshoot of SVR4, but it has its own version
++ # number series starting with 2...
++ # I am not positive that other SVR4 systems won't match this,
+ # I just have to hope. -- rms.
+- # Use sysv4.2uw... so that sysv4* matches it.
++ # Use sysv4.2uw... so that sysv4* matches it.
+ echo ${UNAME_MACHINE}-pc-sysv4.2uw${UNAME_VERSION}
+ exit ;;
+ i*86:OS/2:*:*)
+@@ -1023,7 +1105,7 @@
+ fi
+ exit ;;
+ i*86:*:5:[678]*)
+- # UnixWare 7.x, OpenUNIX and OpenServer 6.
++ # UnixWare 7.x, OpenUNIX and OpenServer 6.
+ case `/bin/uname -X | grep "^Machine"` in
+ *486*) UNAME_MACHINE=i486 ;;
+ *Pentium) UNAME_MACHINE=i586 ;;
+@@ -1051,13 +1133,13 @@
+ exit ;;
+ pc:*:*:*)
+ # Left here for compatibility:
+- # uname -m prints for DJGPP always 'pc', but it prints nothing about
+- # the processor, so we play safe by assuming i586.
++ # uname -m prints for DJGPP always 'pc', but it prints nothing about
++ # the processor, so we play safe by assuming i586.
+ # Note: whatever this is, it MUST be the same as what config.sub
+- # prints for the "djgpp" host, or else GDB configury will decide that
++ # prints for the "djgpp" host, or else GDB configure will decide that
+ # this is a cross-build.
+ echo i586-pc-msdosdjgpp
+- exit ;;
++ exit ;;
+ Intel:Mach:3*:*)
+ echo i386-pc-mach3
+ exit ;;
+@@ -1092,8 +1174,8 @@
+ /bin/uname -p 2>/dev/null | /bin/grep entium >/dev/null \
+ && { echo i586-ncr-sysv4.3${OS_REL}; exit; } ;;
+ 3[34]??:*:4.0:* | 3[34]??,*:*:4.0:*)
+- /bin/uname -p 2>/dev/null | grep 86 >/dev/null \
+- && { echo i486-ncr-sysv4; exit; } ;;
++ /bin/uname -p 2>/dev/null | grep 86 >/dev/null \
++ && { echo i486-ncr-sysv4; exit; } ;;
+ NCR*:*:4.2:* | MPRAS*:*:4.2:*)
+ OS_REL='.3'
+ test -r /etc/.relid \
+@@ -1136,10 +1218,10 @@
+ echo ns32k-sni-sysv
+ fi
+ exit ;;
+- PENTIUM:*:4.0*:*) # Unisys `ClearPath HMP IX 4000' SVR4/MP effort
+- # says <Richard.M.Bartel@ccMail.Census.GOV>
+- echo i586-unisys-sysv4
+- exit ;;
++ PENTIUM:*:4.0*:*) # Unisys `ClearPath HMP IX 4000' SVR4/MP effort
++ # says <Richard.M.Bartel@ccMail.Census.GOV>
++ echo i586-unisys-sysv4
++ exit ;;
+ *:UNIX_System_V:4*:FTX*)
+ # From Gerald Hewes <hewes@openmarket.com>.
+ # How about differentiating between stratus architectures? -djm
+@@ -1165,11 +1247,11 @@
+ exit ;;
+ R[34]000:*System_V*:*:* | R4000:UNIX_SYSV:*:* | R*000:UNIX_SV:*:*)
+ if [ -d /usr/nec ]; then
+- echo mips-nec-sysv${UNAME_RELEASE}
++ echo mips-nec-sysv${UNAME_RELEASE}
+ else
+- echo mips-unknown-sysv${UNAME_RELEASE}
++ echo mips-unknown-sysv${UNAME_RELEASE}
+ fi
+- exit ;;
++ exit ;;
+ BeBox:BeOS:*:*) # BeOS running on hardware made by Be, PPC only.
+ echo powerpc-be-beos
+ exit ;;
+@@ -1182,6 +1264,9 @@
+ BePC:Haiku:*:*) # Haiku running on Intel PC compatible.
+ echo i586-pc-haiku
+ exit ;;
++ x86_64:Haiku:*:*)
++ echo x86_64-unknown-haiku
++ exit ;;
+ SX-4:SUPER-UX:*:*)
+ echo sx4-nec-superux${UNAME_RELEASE}
+ exit ;;
+@@ -1200,6 +1285,9 @@
+ SX-8R:SUPER-UX:*:*)
+ echo sx8r-nec-superux${UNAME_RELEASE}
+ exit ;;
++ SX-ACE:SUPER-UX:*:*)
++ echo sxace-nec-superux${UNAME_RELEASE}
++ exit ;;
+ Power*:Rhapsody:*:*)
+ echo powerpc-apple-rhapsody${UNAME_RELEASE}
+ exit ;;
+@@ -1208,24 +1296,36 @@
+ exit ;;
+ *:Darwin:*:*)
+ UNAME_PROCESSOR=`uname -p` || UNAME_PROCESSOR=unknown
+- case $UNAME_PROCESSOR in
+- i386)
+- eval $set_cc_for_build
+- if [ "$CC_FOR_BUILD" != 'no_compiler_found' ]; then
+- if (echo '#ifdef __LP64__'; echo IS_64BIT_ARCH; echo '#endif') | \
+- (CCOPTS= $CC_FOR_BUILD -E - 2>/dev/null) | \
+- grep IS_64BIT_ARCH >/dev/null
+- then
+- UNAME_PROCESSOR="x86_64"
+- fi
+- fi ;;
+- unknown) UNAME_PROCESSOR=powerpc ;;
+- esac
++ eval $set_cc_for_build
++ if test "$UNAME_PROCESSOR" = unknown ; then
++ UNAME_PROCESSOR=powerpc
++ fi
++ if test `echo "$UNAME_RELEASE" | sed -e 's/\..*//'` -le 10 ; then
++ if [ "$CC_FOR_BUILD" != no_compiler_found ]; then
++ if (echo '#ifdef __LP64__'; echo IS_64BIT_ARCH; echo '#endif') | \
++ (CCOPTS="" $CC_FOR_BUILD -E - 2>/dev/null) | \
++ grep IS_64BIT_ARCH >/dev/null
++ then
++ case $UNAME_PROCESSOR in
++ i386) UNAME_PROCESSOR=x86_64 ;;
++ powerpc) UNAME_PROCESSOR=powerpc64 ;;
++ esac
++ fi
++ fi
++ elif test "$UNAME_PROCESSOR" = i386 ; then
++ # Avoid executing cc on OS X 10.9, as it ships with a stub
++ # that puts up a graphical alert prompting to install
++ # developer tools. Any system running Mac OS X 10.7 or
++ # later (Darwin 11 and later) is required to have a 64-bit
++ # processor. This is not true of the ARM version of Darwin
++ # that Apple uses in portable devices.
++ UNAME_PROCESSOR=x86_64
++ fi
+ echo ${UNAME_PROCESSOR}-apple-darwin${UNAME_RELEASE}
+ exit ;;
+ *:procnto*:*:* | *:QNX:[0123456789]*:*)
+ UNAME_PROCESSOR=`uname -p`
+- if test "$UNAME_PROCESSOR" = "x86"; then
++ if test "$UNAME_PROCESSOR" = x86; then
+ UNAME_PROCESSOR=i386
+ UNAME_MACHINE=pc
+ fi
+@@ -1237,7 +1337,7 @@
+ NEO-?:NONSTOP_KERNEL:*:*)
+ echo neo-tandem-nsk${UNAME_RELEASE}
+ exit ;;
+- NSE-?:NONSTOP_KERNEL:*:*)
++ NSE-*:NONSTOP_KERNEL:*:*)
+ echo nse-tandem-nsk${UNAME_RELEASE}
+ exit ;;
+ NSR-?:NONSTOP_KERNEL:*:*)
+@@ -1256,7 +1356,7 @@
+ # "uname -m" is not consistent, so use $cputype instead. 386
+ # is converted to i386 for consistency with other x86
+ # operating systems.
+- if test "$cputype" = "386"; then
++ if test "$cputype" = 386; then
+ UNAME_MACHINE=i386
+ else
+ UNAME_MACHINE="$cputype"
+@@ -1282,13 +1382,13 @@
+ echo pdp10-unknown-its
+ exit ;;
+ SEI:*:*:SEIUX)
+- echo mips-sei-seiux${UNAME_RELEASE}
++ echo mips-sei-seiux${UNAME_RELEASE}
+ exit ;;
+ *:DragonFly:*:*)
+ echo ${UNAME_MACHINE}-unknown-dragonfly`echo ${UNAME_RELEASE}|sed -e 's/[-(].*//'`
+ exit ;;
+ *:*VMS:*:*)
+- UNAME_MACHINE=`(uname -p) 2>/dev/null`
++ UNAME_MACHINE=`(uname -p) 2>/dev/null`
+ case "${UNAME_MACHINE}" in
+ A*) echo alpha-dec-vms ; exit ;;
+ I*) echo ia64-dec-vms ; exit ;;
+@@ -1298,7 +1398,7 @@
+ echo i386-pc-xenix
+ exit ;;
+ i*86:skyos:*:*)
+- echo ${UNAME_MACHINE}-pc-skyos`echo ${UNAME_RELEASE}` | sed -e 's/ .*$//'
++ echo ${UNAME_MACHINE}-pc-skyos`echo ${UNAME_RELEASE} | sed -e 's/ .*$//'`
+ exit ;;
+ i*86:rdos:*:*)
+ echo ${UNAME_MACHINE}-pc-rdos
+@@ -1306,174 +1406,28 @@
+ i*86:AROS:*:*)
+ echo ${UNAME_MACHINE}-pc-aros
+ exit ;;
+-esac
+-
+-#echo '(No uname command or uname output not recognized.)' 1>&2
+-#echo "${UNAME_MACHINE}:${UNAME_SYSTEM}:${UNAME_RELEASE}:${UNAME_VERSION}" 1>&2
+-
+-eval $set_cc_for_build
+-cat >$dummy.c <<EOF
+-#ifdef _SEQUENT_
+-# include <sys/types.h>
+-# include <sys/utsname.h>
+-#endif
+-main ()
+-{
+-#if defined (sony)
+-#if defined (MIPSEB)
+- /* BFD wants "bsd" instead of "newsos". Perhaps BFD should be changed,
+- I don't know.... */
+- printf ("mips-sony-bsd\n"); exit (0);
+-#else
+-#include <sys/param.h>
+- printf ("m68k-sony-newsos%s\n",
+-#ifdef NEWSOS4
+- "4"
+-#else
+- ""
+-#endif
+- ); exit (0);
+-#endif
+-#endif
+-
+-#if defined (__arm) && defined (__acorn) && defined (__unix)
+- printf ("arm-acorn-riscix\n"); exit (0);
+-#endif
+-
+-#if defined (hp300) && !defined (hpux)
+- printf ("m68k-hp-bsd\n"); exit (0);
+-#endif
+-
+-#if defined (NeXT)
+-#if !defined (__ARCHITECTURE__)
+-#define __ARCHITECTURE__ "m68k"
+-#endif
+- int version;
+- version=`(hostinfo | sed -n 's/.*NeXT Mach \([0-9]*\).*/\1/p') 2>/dev/null`;
+- if (version < 4)
+- printf ("%s-next-nextstep%d\n", __ARCHITECTURE__, version);
+- else
+- printf ("%s-next-openstep%d\n", __ARCHITECTURE__, version);
+- exit (0);
+-#endif
+-
+-#if defined (MULTIMAX) || defined (n16)
+-#if defined (UMAXV)
+- printf ("ns32k-encore-sysv\n"); exit (0);
+-#else
+-#if defined (CMU)
+- printf ("ns32k-encore-mach\n"); exit (0);
+-#else
+- printf ("ns32k-encore-bsd\n"); exit (0);
+-#endif
+-#endif
+-#endif
+-
+-#if defined (__386BSD__)
+- printf ("i386-pc-bsd\n"); exit (0);
+-#endif
+-
+-#if defined (sequent)
+-#if defined (i386)
+- printf ("i386-sequent-dynix\n"); exit (0);
+-#endif
+-#if defined (ns32000)
+- printf ("ns32k-sequent-dynix\n"); exit (0);
+-#endif
+-#endif
+-
+-#if defined (_SEQUENT_)
+- struct utsname un;
+-
+- uname(&un);
+-
+- if (strncmp(un.version, "V2", 2) == 0) {
+- printf ("i386-sequent-ptx2\n"); exit (0);
+- }
+- if (strncmp(un.version, "V1", 2) == 0) { /* XXX is V1 correct? */
+- printf ("i386-sequent-ptx1\n"); exit (0);
+- }
+- printf ("i386-sequent-ptx\n"); exit (0);
+-
+-#endif
+-
+-#if defined (vax)
+-# if !defined (ultrix)
+-# include <sys/param.h>
+-# if defined (BSD)
+-# if BSD == 43
+- printf ("vax-dec-bsd4.3\n"); exit (0);
+-# else
+-# if BSD == 199006
+- printf ("vax-dec-bsd4.3reno\n"); exit (0);
+-# else
+- printf ("vax-dec-bsd\n"); exit (0);
+-# endif
+-# endif
+-# else
+- printf ("vax-dec-bsd\n"); exit (0);
+-# endif
+-# else
+- printf ("vax-dec-ultrix\n"); exit (0);
+-# endif
+-#endif
+-
+-#if defined (alliant) && defined (i860)
+- printf ("i860-alliant-bsd\n"); exit (0);
+-#endif
+-
+- exit (1);
+-}
+-EOF
+-
+-$CC_FOR_BUILD -o $dummy $dummy.c 2>/dev/null && SYSTEM_NAME=`$dummy` &&
+- { echo "$SYSTEM_NAME"; exit; }
+-
+-# Apollos put the system type in the environment.
+-
+-test -d /usr/apollo && { echo ${ISP}-apollo-${SYSTYPE}; exit; }
+-
+-# Convex versions that predate uname can use getsysinfo(1)
+-
+-if [ -x /usr/convex/getsysinfo ]
+-then
+- case `getsysinfo -f cpu_type` in
+- c1*)
+- echo c1-convex-bsd
+- exit ;;
+- c2*)
+- if getsysinfo -f scalar_acc
+- then echo c32-convex-bsd
+- else echo c2-convex-bsd
+- fi
+- exit ;;
+- c34*)
+- echo c34-convex-bsd
+- exit ;;
+- c38*)
+- echo c38-convex-bsd
++ x86_64:VMkernel:*:*)
++ echo ${UNAME_MACHINE}-unknown-esx
+ exit ;;
+- c4*)
+- echo c4-convex-bsd
++ amd64:Isilon\ OneFS:*:*)
++ echo x86_64-unknown-onefs
+ exit ;;
+- esac
+-fi
++esac
+
+ cat >&2 <<EOF
+ $0: unable to guess system type
+
+-This script, last modified $timestamp, has failed to recognize
+-the operating system you are using. It is advised that you
+-download the most up to date version of the config scripts from
++This script (version $timestamp), has failed to recognize the
++operating system you are using. If your script is old, overwrite
++config.guess and config.sub with the latest versions from:
+
+- http://git.savannah.gnu.org/gitweb/?p=config.git;a=blob_plain;f=config.guess;hb=HEAD
++ http://git.savannah.gnu.org/gitweb/?p=config.git;a=blob_plain;f=config.guess
+ and
+- http://git.savannah.gnu.org/gitweb/?p=config.git;a=blob_plain;f=config.sub;hb=HEAD
++ http://git.savannah.gnu.org/gitweb/?p=config.git;a=blob_plain;f=config.sub
+
+-If the version you run ($0) is already up to date, please
+-send the following data and any information you think might be
+-pertinent to <config-patches@gnu.org> in order to provide the needed
+-information to handle your system.
++If $0 has already been updated, send the following data and any
++information you think might be pertinent to config-patches@gnu.org to
++provide the necessary information to handle your system.
+
+ config.guess timestamp = $timestamp
+
diff --git a/var/spack/repos/builtin/packages/libsm/package.py b/var/spack/repos/builtin/packages/libsm/package.py
new file mode 100644
index 0000000000..4affd50127
--- /dev/null
+++ b/var/spack/repos/builtin/packages/libsm/package.py
@@ -0,0 +1,47 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class Libsm(Package):
+ """libSM - X Session Management Library."""
+
+ homepage = "http://cgit.freedesktop.org/xorg/lib/libSM"
+ url = "https://www.x.org/archive/individual/lib/libSM-1.2.2.tar.gz"
+
+ version('1.2.2', '18e5084ed9500b1b47719fd1758f0ec8')
+
+ depends_on('libice@1.0.5:')
+
+ depends_on('xproto', type='build')
+ depends_on('xtrans', type='build')
+ depends_on('pkg-config@0.9.0:', type='build')
+ depends_on('util-macros', type='build')
+
+ def install(self, spec, prefix):
+ configure('--prefix={0}'.format(prefix))
+
+ make()
+ make('install')
diff --git a/var/spack/repos/builtin/packages/libsodium/package.py b/var/spack/repos/builtin/packages/libsodium/package.py
index 831a75e659..805881ce07 100644
--- a/var/spack/repos/builtin/packages/libsodium/package.py
+++ b/var/spack/repos/builtin/packages/libsodium/package.py
@@ -24,18 +24,27 @@
##############################################################################
from spack import *
+
class Libsodium(Package):
"""Sodium is a modern, easy-to-use software library for encryption,
decryption, signatures, password hashing and more."""
homepage = "https://download.libsodium.org/doc/"
- url = "https://download.libsodium.org/libsodium/releases/libsodium-1.0.3.tar.gz"
+ url = "https://download.libsodium.org/libsodium/releases/libsodium-1.0.11.tar.gz"
+ version('1.0.11', 'b58928d035064b2a46fb564937b83540')
+ version('1.0.10', 'ea89dcbbda0b2b6ff6a1c476231870dd')
version('1.0.3', 'b3bcc98e34d3250f55ae196822307fab')
version('1.0.2', 'dc40eb23e293448c6fc908757738003f')
version('1.0.1', '9a221b49fba7281ceaaf5e278d0f4430')
version('1.0.0', '3093dabe4e038d09f0d150cef064b2f7')
version('0.7.1', 'c224fe3923d1dcfe418c65c8a7246316')
+ def url_for_version(self, version):
+ url = 'https://download.libsodium.org/libsodium/releases/'
+ if version < Version('1.0.4'):
+ url += 'old/'
+ return url + 'libsodium-{0}.tar.gz'.format(version)
+
def install(self, spec, prefix):
configure("--prefix=%s" % prefix)
diff --git a/var/spack/repos/builtin/packages/libspatialindex/package.py b/var/spack/repos/builtin/packages/libspatialindex/package.py
new file mode 100644
index 0000000000..5dd839a7c3
--- /dev/null
+++ b/var/spack/repos/builtin/packages/libspatialindex/package.py
@@ -0,0 +1,32 @@
+##############################################################################
+# Copyright (c) 2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class Libspatialindex(CMakePackage):
+ homepage = "http://libspatialindex.github.io"
+ url = "https://github.com/libspatialindex/libspatialindex/tarball/1.8.5"
+
+ version('1.8.5', 'a95d8159714dbda9a274792cd273d298')
diff --git a/var/spack/repos/builtin/packages/libsplash/package.py b/var/spack/repos/builtin/packages/libsplash/package.py
new file mode 100644
index 0000000000..c87dae19be
--- /dev/null
+++ b/var/spack/repos/builtin/packages/libsplash/package.py
@@ -0,0 +1,64 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class Libsplash(Package):
+ """libSplash aims at developing a HDF5-based I/O library for HPC
+ simulations. It is created as an easy-to-use frontend for the standard HDF5
+ library with support for MPI processes in a cluster environment. While the
+ standard HDF5 library provides detailed low-level control, libSplash
+ simplifies tasks commonly found in large-scale HPC simulations, such as
+ iterative computations and MPI distributed processes.
+ """
+
+ homepage = "https://github.com/ComputationalRadiationPhysics/libSplash"
+ url = "https://github.com/ComputationalRadiationPhysics/libSplash/archive/v1.4.0.tar.gz"
+
+ version('dev', branch='dev',
+ git='https://github.com/ComputationalRadiationPhysics/libSplash.git')
+ version('master', branch='master',
+ git='https://github.com/ComputationalRadiationPhysics/libSplash.git')
+ version('1.6.0', 'c05bce95abfe1ae4cd9d9817acf58d94')
+ version('1.5.0', 'c1efec4c20334242c8a3b6bfdc0207e3')
+ version('1.4.0', '2de37bcef6fafa1960391bf44b1b50e0')
+ version('1.3.1', '524580ba088d97253d03b4611772f37c')
+ version('1.2.4', '3fccb314293d22966beb7afd83b746d0')
+
+ variant('mpi', default=True,
+ description='Enable parallel I/O (one-file aggregation) support')
+
+ depends_on('cmake', type='build')
+ depends_on('hdf5@1.8.6:')
+ depends_on('hdf5+mpi', when='+mpi')
+ depends_on('mpi', when='+mpi')
+
+ def install(self, spec, prefix):
+ with working_dir('spack-build', create=True):
+ cmake('-DCMAKE_INSTALL_PREFIX=%s' % prefix,
+ '..', *std_cmake_args)
+
+ make()
+ make('install')
diff --git a/var/spack/repos/builtin/packages/libtermkey/package.py b/var/spack/repos/builtin/packages/libtermkey/package.py
index c7db959a40..64688505c4 100644
--- a/var/spack/repos/builtin/packages/libtermkey/package.py
+++ b/var/spack/repos/builtin/packages/libtermkey/package.py
@@ -24,17 +24,17 @@
##############################################################################
from spack import *
+
class Libtermkey(Package):
"""Easy keyboard entry processing for terminal programs"""
homepage = "http://www.leonerd.org.uk/code/libtermkey/"
url = "http://www.leonerd.org.uk/code/libtermkey/libtermkey-0.18.tar.gz"
- version('0.18' , '3be2e3e5a851a49cc5e8567ac108b520')
- version('0.17' , '20edb99e0d95ec1690fe90e6a555ae6d')
- version('0.16' , '7a24b675aaeb142d30db28e7554987d4')
+ version('0.18', '3be2e3e5a851a49cc5e8567ac108b520')
+ version('0.17', '20edb99e0d95ec1690fe90e6a555ae6d')
+ version('0.16', '7a24b675aaeb142d30db28e7554987d4')
version('0.15b', '27689756e6c86c56ae454f2ac259bc3d')
- version('0.14' , 'e08ce30f440f9715c459060e0e048978')
-
+ version('0.14', 'e08ce30f440f9715c459060e0e048978')
def install(self, spec, prefix):
make()
diff --git a/var/spack/repos/builtin/packages/libtiff/package.py b/var/spack/repos/builtin/packages/libtiff/package.py
index 4b03e7997b..70c371b3b8 100644
--- a/var/spack/repos/builtin/packages/libtiff/package.py
+++ b/var/spack/repos/builtin/packages/libtiff/package.py
@@ -24,19 +24,15 @@
##############################################################################
from spack import *
-class Libtiff(Package):
+
+class Libtiff(AutotoolsPackage):
"""libtiff graphics format library"""
- homepage = "http://www.remotesensing.org/libtiff/"
- url = "http://download.osgeo.org/libtiff/tiff-4.0.3.tar.gz"
+ homepage = "http://www.simplesystems.org/libtiff/"
+ url = "ftp://download.osgeo.org/libtiff/tiff-4.0.3.tar.gz"
+ version('4.0.6', 'd1d2e940dea0b5ad435f21f03d96dd72')
version('4.0.3', '051c1068e6a0627f461948c365290410')
depends_on('jpeg')
depends_on('zlib')
depends_on('xz')
-
- def install(self, spec, prefix):
- configure("--prefix=%s" % prefix)
-
- make()
- make("install")
diff --git a/var/spack/repos/builtin/packages/libtool/package.py b/var/spack/repos/builtin/packages/libtool/package.py
index 90e7b3b643..cd12503681 100644
--- a/var/spack/repos/builtin/packages/libtool/package.py
+++ b/var/spack/repos/builtin/packages/libtool/package.py
@@ -24,18 +24,24 @@
##############################################################################
from spack import *
-class Libtool(Package):
- """libtool -- library building part of autotools"""
- homepage = "https://www.gnu.org/software/libtool/"
- url = "http://ftpmirror.gnu.org/libtool/libtool-2.4.2.tar.gz"
- version('2.4.6' , 'addf44b646ddb4e3919805aa88fa7c5e')
- version('2.4.2' , 'd2f3b7d4627e69e13514a40e72a24d50')
+class Libtool(AutotoolsPackage):
+ """libtool -- library building part of autotools."""
- depends_on('m4')
+ homepage = 'https://www.gnu.org/software/libtool/'
+ url = 'http://ftpmirror.gnu.org/libtool/libtool-2.4.2.tar.gz'
- def install(self, spec, prefix):
- configure("--prefix=%s" % prefix)
+ version('2.4.6', 'addf44b646ddb4e3919805aa88fa7c5e')
+ version('2.4.2', 'd2f3b7d4627e69e13514a40e72a24d50')
- make()
- make("install")
+ depends_on('m4@1.4.6:', type='build')
+
+ def _make_executable(self, name):
+ return Executable(join_path(self.prefix.bin, name))
+
+ def setup_dependent_package(self, module, dependent_spec):
+ # Automake is very likely to be a build dependency,
+ # so we add the tools it provides to the dependent module
+ executables = ['libtoolize', 'libtool']
+ for name in executables:
+ setattr(module, name, self._make_executable(name))
diff --git a/var/spack/repos/builtin/packages/libunistring/package.py b/var/spack/repos/builtin/packages/libunistring/package.py
new file mode 100644
index 0000000000..5b8837e72b
--- /dev/null
+++ b/var/spack/repos/builtin/packages/libunistring/package.py
@@ -0,0 +1,42 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class Libunistring(Package):
+ """This library provides functions for manipulating Unicode strings
+ and for manipulating C strings according to the Unicode standard."""
+
+ homepage = "https://www.gnu.org/software/libunistring/"
+ url = "http://ftp.gnu.org/gnu/libunistring/libunistring-0.9.6.tar.xz"
+
+ version('0.9.6', 'cb09c398020c27edac10ca590e9e9ef3')
+
+ def install(self, spec, prefix):
+ configure('--prefix={0}'.format(prefix))
+
+ make()
+ # make('check') # test-verify fails for me, contacted developers
+ make('install')
diff --git a/var/spack/repos/builtin/packages/libunwind/package.py b/var/spack/repos/builtin/packages/libunwind/package.py
index 980b765c02..63ab4aec59 100644
--- a/var/spack/repos/builtin/packages/libunwind/package.py
+++ b/var/spack/repos/builtin/packages/libunwind/package.py
@@ -24,6 +24,7 @@
##############################################################################
from spack import *
+
class Libunwind(Package):
"""A portable and efficient C programming interface (API) to determine
the call-chain of a program."""
diff --git a/var/spack/repos/builtin/packages/libuuid/package.py b/var/spack/repos/builtin/packages/libuuid/package.py
index 0dd32ec77d..b8f6b1cc3a 100644
--- a/var/spack/repos/builtin/packages/libuuid/package.py
+++ b/var/spack/repos/builtin/packages/libuuid/package.py
@@ -24,9 +24,10 @@
##############################################################################
from spack import *
+
class Libuuid(Package):
"""Portable uuid C library"""
- # FIXME: add a proper url for your package's homepage here.
+
homepage = "http://sourceforge.net/projects/libuuid/"
url = "http://downloads.sourceforge.net/project/libuuid/libuuid-1.0.3.tar.gz?r=http%3A%2F%2Fsourceforge.net%2Fprojects%2Flibuuid%2F&ts=1433881396&use_mirror=iweb"
@@ -35,6 +36,5 @@ class Libuuid(Package):
def install(self, spec, prefix):
configure("--prefix=%s" % prefix)
- # FIXME: Add logic to build and install here
make()
make("install")
diff --git a/var/spack/repos/builtin/packages/libuv/package.py b/var/spack/repos/builtin/packages/libuv/package.py
index 63565c7aea..dae10809f2 100644
--- a/var/spack/repos/builtin/packages/libuv/package.py
+++ b/var/spack/repos/builtin/packages/libuv/package.py
@@ -24,6 +24,7 @@
##############################################################################
from spack import *
+
class Libuv(Package):
"""Multi-platform library with a focus on asynchronous IO"""
homepage = "http://libuv.org"
@@ -31,9 +32,9 @@ class Libuv(Package):
version('1.9.0', '14737f9c76123a19a290dabb7d1cd04c')
- depends_on('automake')
- depends_on('autoconf')
- depends_on('libtool')
+ depends_on('automake', type='build')
+ depends_on('autoconf', type='build')
+ depends_on('libtool', type='build')
def install(self, spec, prefix):
bash = which("bash")
diff --git a/var/spack/repos/builtin/packages/libvterm/package.py b/var/spack/repos/builtin/packages/libvterm/package.py
index e57af273ad..2e1ef99b98 100644
--- a/var/spack/repos/builtin/packages/libvterm/package.py
+++ b/var/spack/repos/builtin/packages/libvterm/package.py
@@ -24,6 +24,7 @@
##############################################################################
from spack import *
+
class Libvterm(Package):
"""An abstract library implementation of a terminal emulator"""
homepage = "http://www.leonerd.org.uk/code/libvterm/"
diff --git a/var/spack/repos/builtin/packages/libwebsockets/package.py b/var/spack/repos/builtin/packages/libwebsockets/package.py
new file mode 100644
index 0000000000..3ce58a4c36
--- /dev/null
+++ b/var/spack/repos/builtin/packages/libwebsockets/package.py
@@ -0,0 +1,39 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class Libwebsockets(CMakePackage):
+ """C library for lightweight websocket clients and servers."""
+
+ homepage = "https://github.com/warmcat/libwebsockets"
+ url = "https://github.com/warmcat/libwebsockets/archive/v2.1.0.tar.gz"
+
+ version('2.1.0', '4df3be57dee43aeebd54a3ed56568f50')
+ version('2.0.3', 'a025156d606d90579e65d53ccd062a94')
+ version('1.7.9', '7b3692ead5ae00fd0e1d56c080170f07')
+
+ depends_on('zlib')
+ depends_on('openssl')
diff --git a/var/spack/repos/builtin/packages/libwindowswm/package.py b/var/spack/repos/builtin/packages/libwindowswm/package.py
new file mode 100644
index 0000000000..3836e0d419
--- /dev/null
+++ b/var/spack/repos/builtin/packages/libwindowswm/package.py
@@ -0,0 +1,53 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class Libwindowswm(Package):
+ """WindowsWM - Cygwin/X rootless window management extension.
+
+ WindowsWM is a simple library designed to interface with the
+ Windows-WM extension. This extension allows X window managers to
+ better interact with the Cygwin XWin server when running X11 in a
+ rootless mode."""
+
+ homepage = "http://cgit.freedesktop.org/xorg/lib/libWindowsWM"
+ url = "https://www.x.org/archive/individual/lib/libWindowsWM-1.0.1.tar.gz"
+
+ version('1.0.1', 'f260e124706ff6209c566689528667c6')
+
+ depends_on('libx11')
+ depends_on('libxext')
+
+ depends_on('xextproto', type='build')
+ depends_on('windowswmproto', type='build')
+ depends_on('pkg-config@0.9.0:', type='build')
+ depends_on('util-macros', type='build')
+
+ def install(self, spec, prefix):
+ configure('--prefix={0}'.format(prefix))
+
+ make()
+ make('install')
diff --git a/var/spack/repos/builtin/packages/libx11/package.py b/var/spack/repos/builtin/packages/libx11/package.py
new file mode 100644
index 0000000000..c5df2e0f83
--- /dev/null
+++ b/var/spack/repos/builtin/packages/libx11/package.py
@@ -0,0 +1,51 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class Libx11(Package):
+ """libX11 - Core X11 protocol client library."""
+
+ homepage = "https://www.x.org/"
+ url = "https://www.x.org/archive/individual/lib/libX11-1.6.3.tar.gz"
+
+ version('1.6.3', '7d16653fe7c36209799175bb3dc1ae46')
+
+ depends_on('libxcb@1.1.92:')
+
+ depends_on('xproto@7.0.17:', type='build')
+ depends_on('xextproto', type='build')
+ depends_on('xtrans', type='build')
+ depends_on('kbproto', type='build')
+ depends_on('inputproto', type='build')
+ depends_on('pkg-config@0.9.0:', type='build')
+ depends_on('util-macros', type='build')
+
+ def install(self, spec, prefix):
+ configure('--prefix={0}'.format(prefix))
+
+ make()
+ make('check')
+ make('install')
diff --git a/var/spack/repos/builtin/packages/libxau/package.py b/var/spack/repos/builtin/packages/libxau/package.py
new file mode 100644
index 0000000000..eb1f1326c6
--- /dev/null
+++ b/var/spack/repos/builtin/packages/libxau/package.py
@@ -0,0 +1,47 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class Libxau(Package):
+ """The libXau package contains a library implementing the X11
+ Authorization Protocol. This is useful for restricting client
+ access to the display."""
+
+ homepage = "https://cgit.freedesktop.org/xorg/lib/libXau/"
+ url = "https://www.x.org/archive/individual/lib/libXau-1.0.8.tar.gz"
+
+ version('1.0.8', 'a85cd601d82bc79c0daa280917572e20')
+
+ depends_on('xproto', type='build')
+ depends_on('pkg-config@0.9.0:', type='build')
+ depends_on('util-macros', type='build')
+
+ def install(self, spec, prefix):
+ configure('--prefix={0}'.format(prefix))
+
+ make()
+ make('check')
+ make('install')
diff --git a/var/spack/repos/builtin/packages/libxaw/package.py b/var/spack/repos/builtin/packages/libxaw/package.py
new file mode 100644
index 0000000000..9f92ff57d2
--- /dev/null
+++ b/var/spack/repos/builtin/packages/libxaw/package.py
@@ -0,0 +1,52 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class Libxaw(Package):
+ """Xaw is the X Athena Widget Set.
+ Xaw is a widget set based on the X Toolkit Intrinsics (Xt) Library."""
+
+ homepage = "http://cgit.freedesktop.org/xorg/lib/libXaw"
+ url = "https://www.x.org/archive/individual/lib/libXaw-1.0.13.tar.gz"
+
+ version('1.0.13', '6c522476024df5872cddc5f1562fb656')
+
+ depends_on('libx11')
+ depends_on('libxext')
+ depends_on('libxt')
+ depends_on('libxmu')
+ depends_on('libxpm')
+
+ depends_on('xproto', type='build')
+ depends_on('xextproto', type='build')
+ depends_on('pkg-config@0.9.0:', type='build')
+ depends_on('util-macros', type='build')
+
+ def install(self, spec, prefix):
+ configure('--prefix={0}'.format(prefix))
+
+ make()
+ make('install')
diff --git a/var/spack/repos/builtin/packages/libxaw3d/package.py b/var/spack/repos/builtin/packages/libxaw3d/package.py
new file mode 100644
index 0000000000..498f57cbca
--- /dev/null
+++ b/var/spack/repos/builtin/packages/libxaw3d/package.py
@@ -0,0 +1,50 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class Libxaw3d(Package):
+ """Xaw3d is the X 3D Athena Widget Set.
+ Xaw3d is a widget set based on the X Toolkit Intrinsics (Xt) Library."""
+
+ homepage = "http://cgit.freedesktop.org/xorg/lib/libXaw3d"
+ url = "https://www.x.org/archive/individual/lib/libXaw3d-1.6.2.tar.gz"
+
+ version('1.6.2', 'e51e00b734853e555ae9b367d213de45')
+
+ depends_on('libx11')
+ depends_on('libxt')
+ depends_on('libxmu')
+ depends_on('libxext')
+ depends_on('libxpm')
+
+ depends_on('pkg-config@0.9.0:', type='build')
+ depends_on('util-macros', type='build')
+
+ def install(self, spec, prefix):
+ configure('--prefix={0}'.format(prefix))
+
+ make()
+ make('install')
diff --git a/var/spack/repos/builtin/packages/libxc/package.py b/var/spack/repos/builtin/packages/libxc/package.py
index 87437373d6..d773395e6c 100644
--- a/var/spack/repos/builtin/packages/libxc/package.py
+++ b/var/spack/repos/builtin/packages/libxc/package.py
@@ -24,6 +24,7 @@
##############################################################################
from spack import *
+
class Libxc(Package):
"""Libxc is a library of exchange-correlation functionals for
density-functional theory."""
@@ -31,12 +32,28 @@ class Libxc(Package):
homepage = "http://www.tddft.org/programs/octopus/wiki/index.php/Libxc"
url = "http://www.tddft.org/programs/octopus/down.php?file=libxc/libxc-2.2.2.tar.gz"
+ version('3.0.0', '8227fa3053f8fc215bd9d7b0d36de03c')
version('2.2.2', 'd9f90a0d6e36df6c1312b6422280f2ec')
-
+ version('2.2.1', '38dc3a067524baf4f8521d5bb1cd0b8f')
def install(self, spec, prefix):
- configure('--prefix=%s' % prefix,
+ # Optimizations for the Intel compiler, suggested by CP2K
+ optflags = '-O2'
+ if self.compiler.name == 'intel':
+ optflags += ' -xAVX -axCORE-AVX2 -ipo'
+ if which('xiar'):
+ env['AR'] = 'xiar'
+
+ env['CFLAGS'] = optflags
+ env['FCFLAGS'] = optflags
+
+ configure('--prefix={0}'.format(prefix),
'--enable-shared')
make()
- make("install")
+
+ # libxc provides a testsuite, but many tests fail
+ # http://www.tddft.org/pipermail/libxc/2013-February/000032.html
+ # make('check')
+
+ make('install')
diff --git a/var/spack/repos/builtin/packages/libxcb/package.py b/var/spack/repos/builtin/packages/libxcb/package.py
index 0f39bb0f1d..9fa1c6f97c 100644
--- a/var/spack/repos/builtin/packages/libxcb/package.py
+++ b/var/spack/repos/builtin/packages/libxcb/package.py
@@ -24,30 +24,38 @@
##############################################################################
from spack import *
+
class Libxcb(Package):
"""The X protocol C-language Binding (XCB) is a replacement
for Xlib featuring a small footprint, latency hiding, direct
access to the protocol, improved threading support, and
extensibility."""
- homepage = "http://xcb.freedesktop.org/"
- url = "http://xcb.freedesktop.org/dist/libxcb-1.11.tar.gz"
+ homepage = "https://xcb.freedesktop.org/"
+ url = "https://xcb.freedesktop.org/dist/libxcb-1.11.tar.gz"
+ version('1.12', '95eee7c28798e16ba5443f188b27a476')
version('1.11', '1698dd837d7e6e94d029dbe8b3a82deb')
version('1.11.1', '118623c15a96b08622603a71d8789bf3')
- depends_on("python")
- depends_on("xcb-proto")
- depends_on("pkg-config")
- # depends_on('pthread') # Ubuntu: apt-get install libpthread-stubs0-dev
- # depends_on('xau') # Ubuntu: apt-get install libxau-dev
+ depends_on('libpthread-stubs')
+ depends_on('libxau@0.99.2:')
+ depends_on('libxdmcp')
- def patch(self):
- filter_file('typedef struct xcb_auth_info_t {', 'typedef struct {', 'src/xcb.h')
+ depends_on('xcb-proto', type='build')
+ depends_on('python@2:2.8', type='build')
+ depends_on('pkg-config@0.9.0:', type='build')
+ depends_on('util-macros', type='build')
+ def patch(self):
+ filter_file(
+ 'typedef struct xcb_auth_info_t {',
+ 'typedef struct {',
+ 'src/xcb.h')
def install(self, spec, prefix):
- configure("--prefix=%s" % prefix)
+ configure('--prefix={0}'.format(prefix))
make()
- make("install")
+ make('check')
+ make('install')
diff --git a/var/spack/repos/builtin/packages/libxcomposite/package.py b/var/spack/repos/builtin/packages/libxcomposite/package.py
new file mode 100644
index 0000000000..48cba00250
--- /dev/null
+++ b/var/spack/repos/builtin/packages/libxcomposite/package.py
@@ -0,0 +1,48 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class Libxcomposite(Package):
+ """libXcomposite - client library for the Composite extension to the
+ X11 protocol."""
+
+ homepage = "http://cgit.freedesktop.org/xorg/lib/libXcomposite"
+ url = "https://www.x.org/archive/individual/lib/libXcomposite-0.4.4.tar.gz"
+
+ version('0.4.4', 'af860b1554a423735d831e6f29ac1ef5')
+
+ depends_on('libx11')
+ depends_on('libxfixes')
+
+ depends_on('compositeproto@0.4:', type='build')
+ depends_on('pkg-config@0.9.0:', type='build')
+ depends_on('util-macros', type='build')
+
+ def install(self, spec, prefix):
+ configure('--prefix={0}'.format(prefix))
+
+ make()
+ make('install')
diff --git a/var/spack/repos/builtin/packages/libxcursor/package.py b/var/spack/repos/builtin/packages/libxcursor/package.py
new file mode 100644
index 0000000000..215452ef52
--- /dev/null
+++ b/var/spack/repos/builtin/packages/libxcursor/package.py
@@ -0,0 +1,48 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class Libxcursor(Package):
+ """libXcursor - X Window System Cursor management library."""
+
+ homepage = "http://cgit.freedesktop.org/xorg/lib/libXcursor"
+ url = "https://www.x.org/archive/individual/lib/libXcursor-1.1.14.tar.gz"
+
+ version('1.1.14', '39c8423de190d64f1c52fbc00022e52c')
+
+ depends_on('libxrender@0.8.2:')
+ depends_on('libxfixes')
+ depends_on('libx11')
+
+ depends_on('fixesproto', type='build')
+ depends_on('pkg-config@0.9.0:', type='build')
+ depends_on('util-macros', type='build')
+
+ def install(self, spec, prefix):
+ configure('--prefix={0}'.format(prefix))
+
+ make()
+ make('install')
diff --git a/var/spack/repos/builtin/packages/libxdamage/package.py b/var/spack/repos/builtin/packages/libxdamage/package.py
new file mode 100644
index 0000000000..448ac21945
--- /dev/null
+++ b/var/spack/repos/builtin/packages/libxdamage/package.py
@@ -0,0 +1,49 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class Libxdamage(Package):
+ """This package contains the library for the X Damage extension."""
+
+ homepage = "http://cgit.freedesktop.org/xorg/lib/libXdamage"
+ url = "https://www.x.org/archive/individual/lib/libXdamage-1.1.4.tar.gz"
+
+ version('1.1.4', '95867778da012623815214769007c0d7')
+
+ depends_on('libxfixes')
+ depends_on('libx11')
+
+ depends_on('damageproto@1.1:', type='build')
+ depends_on('fixesproto', type='build')
+ depends_on('xextproto', type='build')
+ depends_on('pkg-config@0.9.0:', type='build')
+ depends_on('util-macros', type='build')
+
+ def install(self, spec, prefix):
+ configure('--prefix={0}'.format(prefix))
+
+ make()
+ make('install')
diff --git a/var/spack/repos/builtin/packages/libxdmcp/package.py b/var/spack/repos/builtin/packages/libxdmcp/package.py
new file mode 100644
index 0000000000..c05d4b8771
--- /dev/null
+++ b/var/spack/repos/builtin/packages/libxdmcp/package.py
@@ -0,0 +1,45 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class Libxdmcp(Package):
+ """libXdmcp - X Display Manager Control Protocol library."""
+
+ homepage = "http://cgit.freedesktop.org/xorg/lib/libXdmcp"
+ url = "https://www.x.org/archive/individual/lib/libXdmcp-1.1.2.tar.gz"
+
+ version('1.1.2', 'ab0d6a38f0344a05d698ec7d48cfa5a8')
+
+ depends_on('xproto', type='build')
+ depends_on('pkg-config@0.9.0:', type='build')
+ depends_on('util-macros', type='build')
+
+ def install(self, spec, prefix):
+ configure('--prefix={0}'.format(prefix))
+
+ make()
+ make('check')
+ make('install')
diff --git a/var/spack/repos/builtin/packages/libxevie/package.py b/var/spack/repos/builtin/packages/libxevie/package.py
new file mode 100644
index 0000000000..b9f0e41631
--- /dev/null
+++ b/var/spack/repos/builtin/packages/libxevie/package.py
@@ -0,0 +1,49 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class Libxevie(Package):
+ """Xevie - X Event Interception Extension (XEvIE)."""
+
+ homepage = "http://cgit.freedesktop.org/xorg/lib/libXevie"
+ url = "https://www.x.org/archive/individual/lib/libXevie-1.0.3.tar.gz"
+
+ version('1.0.3', '100e6485cabfe6e788e09c110ca680d8')
+
+ depends_on('libx11')
+ depends_on('libxext')
+
+ depends_on('xproto', type='build')
+ depends_on('xextproto', type='build')
+ depends_on('evieext', type='build')
+ depends_on('pkg-config@0.9.0:', type='build')
+ depends_on('util-macros', type='build')
+
+ def install(self, spec, prefix):
+ configure('--prefix={0}'.format(prefix))
+
+ make()
+ make('install')
diff --git a/var/spack/repos/builtin/packages/libxext/package.py b/var/spack/repos/builtin/packages/libxext/package.py
new file mode 100644
index 0000000000..192ab3957a
--- /dev/null
+++ b/var/spack/repos/builtin/packages/libxext/package.py
@@ -0,0 +1,47 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class Libxext(Package):
+ """libXext - library for common extensions to the X11 protocol."""
+
+ homepage = "http://cgit.freedesktop.org/xorg/lib/libXext"
+ url = "https://www.x.org/archive/individual/lib/libXext-1.3.3.tar.gz"
+
+ version('1.3.3', '93f5ec084c998efbfb0befed22f9b57f')
+
+ depends_on('libx11@1.6:')
+
+ depends_on('xproto@7.0.13:', type='build')
+ depends_on('xextproto@7.1.99:', type='build')
+ depends_on('pkg-config@0.9.0:', type='build')
+ depends_on('util-macros', type='build')
+
+ def install(self, spec, prefix):
+ configure('--prefix={0}'.format(prefix))
+
+ make()
+ make('install')
diff --git a/var/spack/repos/builtin/packages/libxfixes/package.py b/var/spack/repos/builtin/packages/libxfixes/package.py
new file mode 100644
index 0000000000..6b8b599a85
--- /dev/null
+++ b/var/spack/repos/builtin/packages/libxfixes/package.py
@@ -0,0 +1,49 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class Libxfixes(Package):
+ """This package contains header files and documentation for the XFIXES
+ extension. Library and server implementations are separate."""
+
+ homepage = "http://cgit.freedesktop.org/xorg/lib/libXfixes"
+ url = "https://www.x.org/archive/individual/lib/libXfixes-5.0.2.tar.gz"
+
+ version('5.0.2', '3636e59f8f5fa2e469d556d49f30e98d')
+
+ depends_on('libx11@1.6:')
+
+ depends_on('xproto', type='build')
+ depends_on('fixesproto@5.0:', type='build')
+ depends_on('xextproto', type='build')
+ depends_on('pkg-config@0.9.0:', type='build')
+ depends_on('util-macros', type='build')
+
+ def install(self, spec, prefix):
+ configure('--prefix={0}'.format(prefix))
+
+ make()
+ make('install')
diff --git a/var/spack/repos/builtin/packages/libxfont/package.py b/var/spack/repos/builtin/packages/libxfont/package.py
new file mode 100644
index 0000000000..1ebf321c9f
--- /dev/null
+++ b/var/spack/repos/builtin/packages/libxfont/package.py
@@ -0,0 +1,54 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class Libxfont(Package):
+ """libXfont provides the core of the legacy X11 font system, handling the
+ index files (fonts.dir, fonts.alias, fonts.scale), the various font file
+ formats, and rasterizing them. It is used by the X servers, the
+ X Font Server (xfs), and some font utilities (bdftopcf for instance),
+ but should not be used by normal X11 clients. X11 clients access fonts
+ via either the new API's in libXft, or the legacy API's in libX11."""
+
+ homepage = "http://cgit.freedesktop.org/xorg/lib/libXfont"
+ url = "https://www.x.org/archive/individual/lib/libXfont-1.5.2.tar.gz"
+
+ version('1.5.2', 'e8c616db0e59df4614980915e79bb05e')
+
+ depends_on('libfontenc')
+ depends_on('freetype')
+
+ depends_on('xtrans', type='build')
+ depends_on('xproto', type='build')
+ depends_on('fontsproto@2.1.3:', type='build')
+ depends_on('pkg-config@0.9.0:', type='build')
+ depends_on('util-macros', type='build')
+
+ def install(self, spec, prefix):
+ configure('--prefix={0}'.format(prefix))
+
+ make()
+ make('install')
diff --git a/var/spack/repos/builtin/packages/libxfont2/package.py b/var/spack/repos/builtin/packages/libxfont2/package.py
new file mode 100644
index 0000000000..8611e65ebb
--- /dev/null
+++ b/var/spack/repos/builtin/packages/libxfont2/package.py
@@ -0,0 +1,54 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class Libxfont2(Package):
+ """libXfont provides the core of the legacy X11 font system, handling the
+ index files (fonts.dir, fonts.alias, fonts.scale), the various font file
+ formats, and rasterizing them. It is used by the X servers, the
+ X Font Server (xfs), and some font utilities (bdftopcf for instance),
+ but should not be used by normal X11 clients. X11 clients access fonts
+ via either the new API's in libXft, or the legacy API's in libX11."""
+
+ homepage = "http://cgit.freedesktop.org/xorg/lib/libXfont"
+ url = "https://www.x.org/archive/individual/lib/libXfont2-2.0.1.tar.gz"
+
+ version('2.0.1', '6ae5ae1f9fb1213b04f14a802a1d721c')
+
+ depends_on('libfontenc')
+ depends_on('freetype')
+
+ depends_on('xtrans', type='build')
+ depends_on('xproto', type='build')
+ depends_on('fontsproto@2.1.3:', type='build')
+ depends_on('pkg-config@0.9.0:', type='build')
+ depends_on('util-macros', type='build')
+
+ def install(self, spec, prefix):
+ configure('--prefix={0}'.format(prefix))
+
+ make()
+ make('install')
diff --git a/var/spack/repos/builtin/packages/libxfontcache/package.py b/var/spack/repos/builtin/packages/libxfontcache/package.py
new file mode 100644
index 0000000000..5421f093ca
--- /dev/null
+++ b/var/spack/repos/builtin/packages/libxfontcache/package.py
@@ -0,0 +1,48 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class Libxfontcache(Package):
+ """Xfontcache - X-TrueType font cache extension client library."""
+
+ homepage = "http://cgit.freedesktop.org/xorg/lib/libXfontcache"
+ url = "https://www.x.org/archive/individual/lib/libXfontcache-1.0.5.tar.gz"
+
+ version('1.0.5', '5030fc9c7f16dbb52f92a8ba2c574f5c')
+
+ depends_on('libx11')
+ depends_on('libxext')
+
+ depends_on('xextproto', type='build')
+ depends_on('fontcacheproto', type='build')
+ depends_on('pkg-config@0.9.0:', type='build')
+ depends_on('util-macros', type='build')
+
+ def install(self, spec, prefix):
+ configure('--prefix={0}'.format(prefix))
+
+ make()
+ make('install')
diff --git a/var/spack/repos/builtin/packages/libxft/package.py b/var/spack/repos/builtin/packages/libxft/package.py
new file mode 100644
index 0000000000..b1b8f853d0
--- /dev/null
+++ b/var/spack/repos/builtin/packages/libxft/package.py
@@ -0,0 +1,52 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class Libxft(Package):
+ """X FreeType library.
+
+ Xft version 2.1 was the first stand alone release of Xft, a library that
+ connects X applications with the FreeType font rasterization library. Xft
+ uses fontconfig to locate fonts so it has no configuration files."""
+
+ homepage = "http://cgit.freedesktop.org/xorg/lib/libXft"
+ url = "https://www.x.org/archive/individual/lib/libXft-2.3.2.tar.gz"
+
+ version('2.3.2', '3a2c1ce2641817dace55cd2bfe10b0f0')
+
+ depends_on('freetype@2.1.6:')
+ depends_on('fontconfig@2.5.92:')
+ depends_on('libx11')
+ depends_on('libxrender@0.8.2:')
+
+ depends_on('pkg-config@0.9.0:', type='build')
+ depends_on('util-macros', type='build')
+
+ def install(self, spec, prefix):
+ configure('--prefix={0}'.format(prefix))
+
+ make()
+ make('install')
diff --git a/var/spack/repos/builtin/packages/libxi/package.py b/var/spack/repos/builtin/packages/libxi/package.py
new file mode 100644
index 0000000000..4e9a273579
--- /dev/null
+++ b/var/spack/repos/builtin/packages/libxi/package.py
@@ -0,0 +1,48 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class Libxi(Package):
+ """libXi - library for the X Input Extension."""
+
+ homepage = "http://cgit.freedesktop.org/xorg/lib/libXi"
+ url = "https://www.x.org/archive/individual/lib/libXi-1.7.6.tar.gz"
+
+ version('1.7.6', 'f3828f9d7893068f6f6f10fe15b31afa')
+
+ depends_on('libx11@1.6:')
+ depends_on('libxext@1.0.99.1:')
+ depends_on('libxfixes@5:')
+
+ depends_on('xproto@7.0.13:', type='build')
+ depends_on('xextproto@7.0.3:', type='build')
+ depends_on('inputproto@2.2.99.1:', type='build')
+
+ def install(self, spec, prefix):
+ configure('--prefix={0}'.format(prefix))
+
+ make()
+ make('install')
diff --git a/var/spack/repos/builtin/packages/libxinerama/package.py b/var/spack/repos/builtin/packages/libxinerama/package.py
new file mode 100644
index 0000000000..a001c41ca7
--- /dev/null
+++ b/var/spack/repos/builtin/packages/libxinerama/package.py
@@ -0,0 +1,48 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class Libxinerama(Package):
+ """libXinerama - API for Xinerama extension to X11 Protocol."""
+
+ homepage = "http://cgit.freedesktop.org/xorg/lib/libXinerama"
+ url = "https://www.x.org/archive/individual/lib/libXinerama-1.1.3.tar.gz"
+
+ version('1.1.3', '7224a1baa9733a54053550a3fb4be118')
+
+ depends_on('libx11')
+ depends_on('libxext')
+
+ depends_on('xextproto', type='build')
+ depends_on('xineramaproto@1.1.99.1:', type='build')
+ depends_on('pkg-config@0.9.0:', type='build')
+ depends_on('util-macros', type='build')
+
+ def install(self, spec, prefix):
+ configure('--prefix={0}'.format(prefix))
+
+ make()
+ make('install')
diff --git a/var/spack/repos/builtin/packages/libxkbfile/package.py b/var/spack/repos/builtin/packages/libxkbfile/package.py
new file mode 100644
index 0000000000..af8029a024
--- /dev/null
+++ b/var/spack/repos/builtin/packages/libxkbfile/package.py
@@ -0,0 +1,46 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class Libxkbfile(Package):
+ """XKB file handling routines."""
+
+ homepage = "https://cgit.freedesktop.org/xorg/lib/libxkbfile"
+ url = "https://www.x.org/archive/individual/lib/libxkbfile-1.0.9.tar.gz"
+
+ version('1.0.9', '5aab87eba67f37dd910a19be5c1129ee')
+
+ depends_on('libx11')
+
+ depends_on('kbproto', type='build')
+ depends_on('pkg-config@0.9.0:', type='build')
+ depends_on('util-macros', type='build')
+
+ def install(self, spec, prefix):
+ configure('--prefix={0}'.format(prefix))
+
+ make()
+ make('install')
diff --git a/var/spack/repos/builtin/packages/libxkbui/package.py b/var/spack/repos/builtin/packages/libxkbui/package.py
new file mode 100644
index 0000000000..d0c132d970
--- /dev/null
+++ b/var/spack/repos/builtin/packages/libxkbui/package.py
@@ -0,0 +1,47 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class Libxkbui(Package):
+ """X.org libxkbui library."""
+
+ homepage = "https://cgit.freedesktop.org/xorg/lib/libxkbui/"
+ url = "https://www.x.org/archive/individual/lib/libxkbui-1.0.2.tar.gz"
+
+ version('1.0.2', 'a6210171defde64d9e8bcf6a6f6074b0')
+
+ depends_on('libx11')
+ depends_on('libxt')
+ depends_on('libxkbfile')
+
+ depends_on('pkg-config@0.9.0:', type='build')
+ depends_on('util-macros', type='build')
+
+ def install(self, spec, prefix):
+ configure('--prefix={0}'.format(prefix))
+
+ make()
+ make('install')
diff --git a/var/spack/repos/builtin/packages/libxml2/package.py b/var/spack/repos/builtin/packages/libxml2/package.py
index baaa2fc83d..bbb934ab9f 100644
--- a/var/spack/repos/builtin/packages/libxml2/package.py
+++ b/var/spack/repos/builtin/packages/libxml2/package.py
@@ -23,7 +23,7 @@
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
from spack import *
-import os
+
class Libxml2(Package):
"""Libxml2 is the XML C parser and toolkit developed for the Gnome
@@ -32,23 +32,31 @@ class Libxml2(Package):
homepage = "http://xmlsoft.org"
url = "http://xmlsoft.org/sources/libxml2-2.9.2.tar.gz"
+ version('2.9.4', 'ae249165c173b1ff386ee8ad676815f5')
version('2.9.2', '9e6a9aca9d155737868b3dc5fd82f788')
variant('python', default=False, description='Enable Python support')
- extends('python', when='+python', ignore=r'(bin.*$)|(include.*$)|(share.*$)|(lib/libxml2.*$)|(lib/xml2.*$)|(lib/cmake.*$)')
+ extends('python', when='+python',
+ ignore=r'(bin.*$)|(include.*$)|(share.*$)|(lib/libxml2.*$)|'
+ '(lib/xml2.*$)|(lib/cmake.*$)')
depends_on('zlib')
depends_on('xz')
+ depends_on('pkg-config@0.9.0:', type='build')
+
def install(self, spec, prefix):
if '+python' in spec:
- site_packages_dir = os.path.join(prefix, 'lib/python%s.%s/site-packages' %(spec['python'].version[:2]))
- python_args = ["--with-python=%s" % spec['python'].prefix, "--with-python-install-dir=%s" % site_packages_dir]
+ python_args = [
+ '--with-python={0}'.format(spec['python'].prefix),
+ '--with-python-install-dir={0}'.format(site_packages_dir)
+ ]
else:
- python_args = ["--without-python"]
+ python_args = ['--without-python']
- configure("--prefix=%s" % prefix,
- *python_args)
+ configure('--prefix={0}'.format(prefix), *python_args)
make()
- make("install")
+ if self.run_tests:
+ make('check')
+ make('install')
diff --git a/var/spack/repos/builtin/packages/libxmu/package.py b/var/spack/repos/builtin/packages/libxmu/package.py
new file mode 100644
index 0000000000..dbba5f168d
--- /dev/null
+++ b/var/spack/repos/builtin/packages/libxmu/package.py
@@ -0,0 +1,51 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class Libxmu(Package):
+ """This library contains miscellaneous utilities and is not part of the
+ Xlib standard. It contains routines which only use public interfaces so
+ that it may be layered on top of any proprietary implementation of Xlib
+ or Xt."""
+
+ homepage = "http://cgit.freedesktop.org/xorg/lib/libXmu"
+ url = "https://www.x.org/archive/individual/lib/libXmu-1.1.2.tar.gz"
+
+ version('1.1.2', 'd5be323b02e6851607205c8e941b4e61')
+
+ depends_on('libxt')
+ depends_on('libxext')
+ depends_on('libx11')
+
+ depends_on('xextproto', type='build')
+ depends_on('pkg-config@0.9.0:', type='build')
+ depends_on('util-macros', type='build')
+
+ def install(self, spec, prefix):
+ configure('--prefix={0}'.format(prefix))
+
+ make()
+ make('install')
diff --git a/var/spack/repos/builtin/packages/libxp/package.py b/var/spack/repos/builtin/packages/libxp/package.py
new file mode 100644
index 0000000000..10aaccc54f
--- /dev/null
+++ b/var/spack/repos/builtin/packages/libxp/package.py
@@ -0,0 +1,49 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class Libxp(Package):
+ """libXp - X Print Client Library."""
+
+ homepage = "http://cgit.freedesktop.org/xorg/lib/libXp"
+ url = "https://www.x.org/archive/individual/lib/libXp-1.0.3.tar.gz"
+
+ version('1.0.3', '1157da663b28e110f440ce64cede6e18')
+
+ depends_on('libx11@1.6:')
+ depends_on('libxext')
+ depends_on('libxau')
+
+ depends_on('xextproto', type='build')
+ depends_on('printproto', type='build')
+ depends_on('pkg-config@0.9.0:', type='build')
+ depends_on('util-macros', type='build')
+
+ def install(self, spec, prefix):
+ configure('--prefix={0}'.format(prefix))
+
+ make()
+ make('install')
diff --git a/var/spack/repos/builtin/packages/libxpm/package.py b/var/spack/repos/builtin/packages/libxpm/package.py
new file mode 100644
index 0000000000..b726e74b0b
--- /dev/null
+++ b/var/spack/repos/builtin/packages/libxpm/package.py
@@ -0,0 +1,50 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class Libxpm(Package):
+ """libXpm - X Pixmap (XPM) image file format library."""
+
+ homepage = "http://cgit.freedesktop.org/xorg/lib/libXpm"
+ url = "https://www.x.org/archive//individual/lib/libXpm-3.5.11.tar.gz"
+
+ version('3.5.11', '7c67c878ee048206b070bc0b24154f04')
+ version('3.5.10', 'a70507638d74541bf30a771f1e5938bb')
+ version('3.5.9', 'd6d4b0f76248a6b346eb42dfcdaa72a6')
+ version('3.5.8', '2d81d6633e67ac5562e2fbee126b2897')
+ version('3.5.7', '7bbc8f112f7143ed6961a58ce4e14558')
+
+ depends_on('libx11')
+
+ depends_on('xproto', type='build')
+ depends_on('pkg-config@0.9.0:', type='build')
+ depends_on('util-macros', type='build')
+
+ def install(self, spec, prefix):
+ configure('--prefix={0}'.format(prefix))
+
+ make()
+ make('install')
diff --git a/var/spack/repos/builtin/packages/libxpresent/package.py b/var/spack/repos/builtin/packages/libxpresent/package.py
new file mode 100644
index 0000000000..e65d4353a0
--- /dev/null
+++ b/var/spack/repos/builtin/packages/libxpresent/package.py
@@ -0,0 +1,49 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class Libxpresent(Package):
+ """This package contains header files and documentation for the Present
+ extension. Library and server implementations are separate."""
+
+ homepage = "https://cgit.freedesktop.org/xorg/lib/libXpresent/"
+ url = "https://www.x.org/archive/individual/lib/libXpresent-1.0.0.tar.gz"
+
+ version('1.0.0', '2f543a595c3e6a519e2e38d079002958')
+
+ depends_on('libx11')
+
+ depends_on('xproto', type='build')
+ depends_on('presentproto@1.0:', type='build')
+ depends_on('xextproto', type='build')
+ depends_on('pkg-config@0.9.0:', type='build')
+ depends_on('util-macros', type='build')
+
+ def install(self, spec, prefix):
+ configure('--prefix={0}'.format(prefix))
+
+ make()
+ make('install')
diff --git a/var/spack/repos/builtin/packages/libxprintapputil/package.py b/var/spack/repos/builtin/packages/libxprintapputil/package.py
new file mode 100644
index 0000000000..fc66b76ac0
--- /dev/null
+++ b/var/spack/repos/builtin/packages/libxprintapputil/package.py
@@ -0,0 +1,49 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class Libxprintapputil(Package):
+ """Xprint application utility routines."""
+
+ homepage = "https://cgit.freedesktop.org/xorg/lib/libXprintAppUtil/"
+ url = "https://www.x.org/archive/individual/lib/libXprintAppUtil-1.0.1.tar.gz"
+
+ version('1.0.1', '3adb71fa34a2d4e75d8b840310318f76')
+
+ depends_on('libx11')
+ depends_on('libxp')
+ depends_on('libxprintutil')
+ depends_on('libxau')
+
+ depends_on('printproto', type='build')
+ depends_on('pkg-config@0.9.0:', type='build')
+ depends_on('util-macros', type='build')
+
+ def install(self, spec, prefix):
+ configure('--prefix={0}'.format(prefix))
+
+ make()
+ make('install')
diff --git a/var/spack/repos/builtin/packages/libxprintutil/package.py b/var/spack/repos/builtin/packages/libxprintutil/package.py
new file mode 100644
index 0000000000..8eb768958f
--- /dev/null
+++ b/var/spack/repos/builtin/packages/libxprintutil/package.py
@@ -0,0 +1,49 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class Libxprintutil(Package):
+ """Xprint application utility routines."""
+
+ homepage = "https://cgit.freedesktop.org/xorg/lib/libXprintUtil/"
+ url = "https://www.x.org/archive/individual/lib/libXprintUtil-1.0.1.tar.gz"
+
+ version('1.0.1', '2f02e812f3e419534ced6fcb5860825f')
+
+ depends_on('libx11')
+ depends_on('libxp')
+ depends_on('libxt')
+ depends_on('libxau')
+
+ depends_on('printproto', type='build')
+ depends_on('pkg-config@0.9.0:', type='build')
+ depends_on('util-macros', type='build')
+
+ def install(self, spec, prefix):
+ configure('--prefix={0}'.format(prefix))
+
+ make()
+ make('install')
diff --git a/var/spack/repos/builtin/packages/libxrandr/package.py b/var/spack/repos/builtin/packages/libxrandr/package.py
new file mode 100644
index 0000000000..56c36c0c7b
--- /dev/null
+++ b/var/spack/repos/builtin/packages/libxrandr/package.py
@@ -0,0 +1,50 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class Libxrandr(Package):
+ """libXrandr - X Resize, Rotate and Reflection extension library."""
+
+ homepage = "http://cgit.freedesktop.org/xorg/lib/libXrandr"
+ url = "https://www.x.org/archive/individual/lib/libXrandr-1.5.0.tar.gz"
+
+ version('1.5.0', 'e2fafff575b94ba0b15983eb4df93656')
+
+ depends_on('libx11@1.6:')
+ depends_on('libxext')
+ depends_on('libxrender')
+
+ depends_on('randrproto@1.5:', type='build')
+ depends_on('xextproto', type='build')
+ depends_on('renderproto', type='build')
+ depends_on('pkg-config@0.9.0:', type='build')
+ depends_on('util-macros', type='build')
+
+ def install(self, spec, prefix):
+ configure('--prefix={0}'.format(prefix))
+
+ make()
+ make('install')
diff --git a/var/spack/repos/builtin/packages/libxrender/package.py b/var/spack/repos/builtin/packages/libxrender/package.py
new file mode 100644
index 0000000000..c5a6dac1be
--- /dev/null
+++ b/var/spack/repos/builtin/packages/libxrender/package.py
@@ -0,0 +1,46 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class Libxrender(Package):
+ """libXrender - library for the Render Extension to the X11 protocol."""
+
+ homepage = "http://cgit.freedesktop.org/xorg/lib/libXrender"
+ url = "https://www.x.org/archive/individual/lib/libXrender-0.9.9.tar.gz"
+
+ version('0.9.9', '0c797c4f2a7b782896bc223e6dac4333')
+
+ depends_on('libx11@1.6:')
+
+ depends_on('renderproto@0.9:', type='build')
+ depends_on('pkg-config@0.9.0:', type='build')
+ depends_on('util-macros', type='build')
+
+ def install(self, spec, prefix):
+ configure('--prefix={0}'.format(prefix))
+
+ make()
+ make('install')
diff --git a/var/spack/repos/builtin/packages/libxres/package.py b/var/spack/repos/builtin/packages/libxres/package.py
new file mode 100644
index 0000000000..6d0684c4b8
--- /dev/null
+++ b/var/spack/repos/builtin/packages/libxres/package.py
@@ -0,0 +1,48 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class Libxres(Package):
+ """libXRes - X-Resource extension client library."""
+
+ homepage = "http://cgit.freedesktop.org/xorg/lib/libXRes"
+ url = "https://www.x.org/archive/individual/lib/libXres-1.0.7.tar.gz"
+
+ version('1.0.7', '7fad9ab34201bb4adffcbf0cd7e87a89')
+
+ depends_on('libx11')
+ depends_on('libxext')
+
+ depends_on('xextproto', type='build')
+ depends_on('resourceproto@1.0:', type='build')
+ depends_on('pkg-config@0.9.0:', type='build')
+ depends_on('util-macros', type='build')
+
+ def install(self, spec, prefix):
+ configure('--prefix={0}'.format(prefix))
+
+ make()
+ make('install')
diff --git a/var/spack/repos/builtin/packages/libxscrnsaver/package.py b/var/spack/repos/builtin/packages/libxscrnsaver/package.py
new file mode 100644
index 0000000000..c9ca6ac1c8
--- /dev/null
+++ b/var/spack/repos/builtin/packages/libxscrnsaver/package.py
@@ -0,0 +1,48 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class Libxscrnsaver(Package):
+ """XScreenSaver - X11 Screen Saver extension client library"""
+
+ homepage = "http://cgit.freedesktop.org/xorg/lib/libXScrnSaver"
+ url = "https://www.x.org/archive/individual/lib/libXScrnSaver-1.2.2.tar.gz"
+
+ version('1.2.2', '79227e7d8c0dad27654c526de3d6fef3')
+
+ depends_on('libx11')
+ depends_on('libxext')
+
+ depends_on('xextproto', type='build')
+ depends_on('scrnsaverproto@1.2:', type='build')
+ depends_on('pkg-config@0.9.0:', type='build')
+ depends_on('util-macros', type='build')
+
+ def install(self, spec, prefix):
+ configure('--prefix={0}'.format(prefix))
+
+ make()
+ make('install')
diff --git a/var/spack/repos/builtin/packages/libxshmfence/package.py b/var/spack/repos/builtin/packages/libxshmfence/package.py
index 6d63ea6426..d4f4c85203 100644
--- a/var/spack/repos/builtin/packages/libxshmfence/package.py
+++ b/var/spack/repos/builtin/packages/libxshmfence/package.py
@@ -24,17 +24,26 @@
##############################################################################
from spack import *
+
class Libxshmfence(Package):
- """This is a tiny library that exposes a event API on top of Linux
- futexes."""
+ """libxshmfence - Shared memory 'SyncFence' synchronization primitive.
+
+ This library offers a CPU-based synchronization primitive compatible
+ with the X SyncFence objects that can be shared between processes
+ using file descriptor passing."""
- homepage = "http://keithp.com/blogs/dri3_extension/" # not really...
+ homepage = "https://cgit.freedesktop.org/xorg/lib/libxshmfence/"
url = "http://xorg.freedesktop.org/archive/individual/lib/libxshmfence-1.2.tar.gz"
version('1.2', 'f0b30c0fc568b22ec524859ee28556f1')
+ depends_on('xproto', type='build')
+ depends_on('pkg-config@0.9.0:', type='build')
+ depends_on('util-macros', type='build')
+
def install(self, spec, prefix):
- configure("--prefix=%s" % prefix)
+ configure('--prefix={0}'.format(prefix))
make()
- make("install")
+ make('check')
+ make('install')
diff --git a/var/spack/repos/builtin/packages/libxslt/package.py b/var/spack/repos/builtin/packages/libxslt/package.py
index c6c439ad09..9c5a42bcfb 100644
--- a/var/spack/repos/builtin/packages/libxslt/package.py
+++ b/var/spack/repos/builtin/packages/libxslt/package.py
@@ -24,7 +24,8 @@
##############################################################################
from spack import *
-class Libxslt(Package):
+
+class Libxslt(AutotoolsPackage):
"""Libxslt is the XSLT C library developed for the GNOME
project. XSLT itself is a an XML language to define
transformation for XML. Libxslt is based on libxml2 the XML C
@@ -36,13 +37,9 @@ class Libxslt(Package):
url = "http://xmlsoft.org/sources/libxslt-1.1.28.tar.gz"
version('1.1.28', '9667bf6f9310b957254fdcf6596600b7')
+ version('1.1.29', 'a129d3c44c022de3b9dcf6d6f288d72e')
depends_on("libxml2")
depends_on("xz")
depends_on("zlib")
depends_on("libgcrypt")
-
- def install(self, spec, prefix):
- configure("--prefix=%s" % prefix)
- make()
- make("install")
diff --git a/var/spack/repos/builtin/packages/libxsmm/package.py b/var/spack/repos/builtin/packages/libxsmm/package.py
new file mode 100644
index 0000000000..a736490600
--- /dev/null
+++ b/var/spack/repos/builtin/packages/libxsmm/package.py
@@ -0,0 +1,66 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class Libxsmm(Package):
+ '''LIBXSMM is a library for small dense and small sparse matrix-matrix
+ multiplications targeting Intel Architecture (x86).'''
+
+ homepage = 'https://github.com/hfp/libxsmm'
+ url = 'https://github.com/hfp/libxsmm/archive/1.4.3.tar.gz'
+
+ version('1.4.3', '9839bf0fb8be7badf1e97ce4c817149b')
+ version('1.4.2', 'ea025761437f3b5c936821b9ca21ec31')
+ version('1.4.1', '71648500ea4510529845d329091917df')
+ version('1.4', 'b42f91bf5285e7ad0463446e55ebdc2b')
+
+ def patch(self):
+ kwargs = {'ignore_absent': False, 'backup': False, 'string': True}
+ makefile = FileFilter('Makefile.inc')
+
+ # Spack sets CC, CXX, and FC to point to the compiler wrappers
+ # Don't let Makefile.inc overwrite these
+ makefile.filter('CC = icc', 'CC ?= icc', **kwargs)
+ makefile.filter('CC = gcc', 'CC ?= gcc', **kwargs)
+ makefile.filter('CXX = icpc', 'CXX ?= icpc', **kwargs)
+ makefile.filter('CXX = g++', 'CXX ?= g++', **kwargs)
+ makefile.filter('FC = ifort', 'FC ?= ifort', **kwargs)
+ makefile.filter('FC = gfortran', 'FC ?= gfortran', **kwargs)
+
+ def manual_install(self, prefix):
+ install_tree('include', prefix.include)
+ install_tree('lib', prefix.lib)
+ install_tree('documentation', prefix.share + '/libxsmm/doc')
+
+ def install(self, spec, prefix):
+ make_args = [
+ 'ROW_MAJOR=0',
+ 'INDICES_M={0}'.format(' '.join(str(i) for i in range(1, 25))),
+ 'INDICES_N={0}'.format(' '.join(str(i) for i in range(1, 25))),
+ 'INDICES_K={0}'.format(' '.join(str(i) for i in range(1, 25)))
+ ]
+ make(*make_args)
+ self.manual_install(prefix)
diff --git a/var/spack/repos/builtin/packages/libxstream/package.py b/var/spack/repos/builtin/packages/libxstream/package.py
new file mode 100644
index 0000000000..3201b58620
--- /dev/null
+++ b/var/spack/repos/builtin/packages/libxstream/package.py
@@ -0,0 +1,50 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class Libxstream(Package):
+ '''LIBXSTREAM is a library to work with streams, events, and code regions
+ that are able to run asynchronous while preserving the usual stream
+ conditions.'''
+
+ homepage = 'https://github.com/hfp/libxstream'
+ url = 'https://github.com/hfp/libxstream.git'
+
+ version('0.9.0', git='https://github.com/hfp/libxstream.git')
+
+ def patch(self):
+ kwargs = {'ignore_absent': False, 'backup': True, 'string': True}
+ makefile = FileFilter('Makefile.inc')
+
+ makefile.filter('CC =', 'CC ?=', **kwargs)
+ makefile.filter('CXX =', 'CXX ?=', **kwargs)
+ makefile.filter('FC =', 'FC ?=', **kwargs)
+
+ def install(self, spec, prefix):
+ make()
+ install_tree('lib', prefix.lib)
+ install_tree('include', prefix.include)
+ install_tree('documentation', prefix.share + '/libxstream/doc/')
diff --git a/var/spack/repos/builtin/packages/libxt/package.py b/var/spack/repos/builtin/packages/libxt/package.py
new file mode 100644
index 0000000000..c657c866b4
--- /dev/null
+++ b/var/spack/repos/builtin/packages/libxt/package.py
@@ -0,0 +1,49 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class Libxt(Package):
+ """libXt - X Toolkit Intrinsics library."""
+
+ homepage = "http://cgit.freedesktop.org/xorg/lib/libXt"
+ url = "https://www.x.org/archive/individual/lib/libXt-1.1.5.tar.gz"
+
+ version('1.1.5', '77d317fbc508dd6adefb59d57a663032')
+
+ depends_on('libsm')
+ depends_on('libice')
+ depends_on('libx11')
+
+ depends_on('xproto', type='build')
+ depends_on('kbproto', type='build')
+ depends_on('pkg-config@0.9.0:', type='build')
+ depends_on('util-macros', type='build')
+
+ def install(self, spec, prefix):
+ configure('--prefix={0}'.format(prefix))
+
+ make()
+ make('install')
diff --git a/var/spack/repos/builtin/packages/libxtrap/package.py b/var/spack/repos/builtin/packages/libxtrap/package.py
new file mode 100644
index 0000000000..4589f98a87
--- /dev/null
+++ b/var/spack/repos/builtin/packages/libxtrap/package.py
@@ -0,0 +1,58 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class Libxtrap(Package):
+ """libXTrap is the Xlib-based client API for the DEC-XTRAP extension.
+
+ XTrap was a proposed standard extension for X11R5 which facilitated the
+ capturing of server protocol and synthesizing core input events.
+
+ Digital participated in the X Consortium's xtest working group which chose
+ to evolve XTrap functionality into the XTEST & RECORD extensions for X11R6.
+
+ As X11R6 was released in 1994, XTrap has now been deprecated for over
+ 15 years, and uses of it should be quite rare."""
+
+ homepage = "http://cgit.freedesktop.org/xorg/lib/libXTrap"
+ url = "https://www.x.org/archive/individual/lib/libXTrap-1.0.1.tar.gz"
+
+ version('1.0.1', 'fde266b82ee14da3e4f4f81c9584c1ea')
+
+ depends_on('libx11')
+ depends_on('libxt')
+ depends_on('libxext')
+
+ depends_on('trapproto', type='build')
+ depends_on('xextproto', type='build')
+ depends_on('pkg-config@0.9.0:', type='build')
+ depends_on('util-macros', type='build')
+
+ def install(self, spec, prefix):
+ configure('--prefix={0}'.format(prefix))
+
+ make()
+ make('install')
diff --git a/var/spack/repos/builtin/packages/libxtst/package.py b/var/spack/repos/builtin/packages/libxtst/package.py
new file mode 100644
index 0000000000..0d16643f94
--- /dev/null
+++ b/var/spack/repos/builtin/packages/libxtst/package.py
@@ -0,0 +1,59 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class Libxtst(Package):
+ """libXtst provides the Xlib-based client API for the XTEST & RECORD
+ extensions.
+
+ The XTEST extension is a minimal set of client and server extensions
+ required to completely test the X11 server with no user intervention.
+ This extension is not intended to support general journaling and
+ playback of user actions.
+
+ The RECORD extension supports the recording and reporting of all
+ core X protocol and arbitrary X extension protocol."""
+
+ homepage = "http://cgit.freedesktop.org/xorg/lib/libXtst"
+ url = "https://www.x.org/archive/individual/lib/libXtst-1.2.2.tar.gz"
+
+ version('1.2.2', 'efef3b1e44bd8074a601c0c5ce0788f4')
+
+ depends_on('libx11')
+ depends_on('libxext@1.0.99.4:')
+ depends_on('libxi')
+
+ depends_on('recordproto@1.13.99.1:', type='build')
+ depends_on('xextproto@7.0.99.3:', type='build')
+ depends_on('inputproto', type='build')
+ depends_on('pkg-config@0.9.0:', type='build')
+ depends_on('util-macros', type='build')
+
+ def install(self, spec, prefix):
+ configure('--prefix={0}'.format(prefix))
+
+ make()
+ make('install')
diff --git a/var/spack/repos/builtin/packages/libxv/package.py b/var/spack/repos/builtin/packages/libxv/package.py
new file mode 100644
index 0000000000..03f10a1842
--- /dev/null
+++ b/var/spack/repos/builtin/packages/libxv/package.py
@@ -0,0 +1,49 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class Libxv(Package):
+ """libXv - library for the X Video (Xv) extension to the
+ X Window System."""
+
+ homepage = "http://cgit.freedesktop.org/xorg/lib/libXv"
+ url = "https://www.x.org/archive/individual/lib/libXv-1.0.10.tar.gz"
+
+ version('1.0.10', 'e7182673b4bbe3ca00ac932e22edc038')
+
+ depends_on('libx11@1.6:')
+ depends_on('libxext')
+
+ depends_on('xextproto', type='build')
+ depends_on('videoproto', type='build')
+ depends_on('pkg-config@0.9.0:', type='build')
+ depends_on('util-macros', type='build')
+
+ def install(self, spec, prefix):
+ configure('--prefix={0}'.format(prefix))
+
+ make()
+ make('install')
diff --git a/var/spack/repos/builtin/packages/libxvmc/package.py b/var/spack/repos/builtin/packages/libxvmc/package.py
new file mode 100644
index 0000000000..9d5695c2c1
--- /dev/null
+++ b/var/spack/repos/builtin/packages/libxvmc/package.py
@@ -0,0 +1,49 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class Libxvmc(Package):
+ """X.org libXvMC library."""
+
+ homepage = "https://cgit.freedesktop.org/xorg/lib/libXvMC"
+ url = "https://www.x.org/archive/individual/lib/libXvMC-1.0.9.tar.gz"
+
+ version('1.0.9', 'a28c0780373537f4774565309b31a69e')
+
+ depends_on('libx11@1.6:')
+ depends_on('libxext')
+ depends_on('libxv')
+
+ depends_on('xextproto', type='build')
+ depends_on('videoproto', type='build')
+ depends_on('pkg-config@0.9.0:', type='build')
+ depends_on('util-macros', type='build')
+
+ def install(self, spec, prefix):
+ configure('--prefix={0}'.format(prefix))
+
+ make()
+ make('install')
diff --git a/var/spack/repos/builtin/packages/libxxf86dga/package.py b/var/spack/repos/builtin/packages/libxxf86dga/package.py
new file mode 100644
index 0000000000..292c5d213b
--- /dev/null
+++ b/var/spack/repos/builtin/packages/libxxf86dga/package.py
@@ -0,0 +1,49 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class Libxxf86dga(Package):
+ """libXxf86dga - Client library for the XFree86-DGA extension."""
+
+ homepage = "http://cgit.freedesktop.org/xorg/lib/libXxf86dga"
+ url = "https://www.x.org/archive/individual/lib/libXxf86dga-1.1.4.tar.gz"
+
+ version('1.1.4', '8ed1c8674e730e8d333dfe4b9f2097d9')
+
+ depends_on('libx11')
+ depends_on('libxext')
+
+ depends_on('xproto', type='build')
+ depends_on('xextproto', type='build')
+ depends_on('xf86dgaproto@2.0.99.2:', type='build')
+ depends_on('pkg-config@0.9.0:', type='build')
+ depends_on('util-macros', type='build')
+
+ def install(self, spec, prefix):
+ configure('--prefix={0}'.format(prefix))
+
+ make()
+ make('install')
diff --git a/var/spack/repos/builtin/packages/libxxf86misc/package.py b/var/spack/repos/builtin/packages/libxxf86misc/package.py
new file mode 100644
index 0000000000..0247f8b57c
--- /dev/null
+++ b/var/spack/repos/builtin/packages/libxxf86misc/package.py
@@ -0,0 +1,49 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class Libxxf86misc(Package):
+ """libXxf86misc - Extension library for the XFree86-Misc X extension."""
+
+ homepage = "http://cgit.freedesktop.org/xorg/lib/libXxf86misc"
+ url = "https://www.x.org/archive/individual/lib/libXxf86misc-1.0.3.tar.gz"
+
+ version('1.0.3', 'c8d8743e146bcd2aa9856117ac5ef6c0')
+
+ depends_on('libx11')
+ depends_on('libxext')
+
+ depends_on('xproto', type='build')
+ depends_on('xextproto', type='build')
+ depends_on('xf86miscproto', type='build')
+ depends_on('pkg-config@0.9.0:', type='build')
+ depends_on('util-macros', type='build')
+
+ def install(self, spec, prefix):
+ configure('--prefix={0}'.format(prefix))
+
+ make()
+ make('install')
diff --git a/var/spack/repos/builtin/packages/libxxf86vm/package.py b/var/spack/repos/builtin/packages/libxxf86vm/package.py
new file mode 100644
index 0000000000..feec5ff3d2
--- /dev/null
+++ b/var/spack/repos/builtin/packages/libxxf86vm/package.py
@@ -0,0 +1,49 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class Libxxf86vm(Package):
+ """libXxf86vm - Extension library for the XFree86-VidMode X extension."""
+
+ homepage = "http://cgit.freedesktop.org/xorg/lib/libXxf86vm"
+ url = "https://www.x.org/archive/individual/lib/libXxf86vm-1.1.4.tar.gz"
+
+ version('1.1.4', '675bd0c521472628d5796602f625ef51')
+
+ depends_on('libx11@1.6:')
+ depends_on('libxext')
+
+ depends_on('xproto', type='build')
+ depends_on('xextproto', type='build')
+ depends_on('xf86vidmodeproto@2.2.99.1:', type='build')
+ depends_on('pkg-config@0.9.0:', type='build')
+ depends_on('util-macros', type='build')
+
+ def install(self, spec, prefix):
+ configure('--prefix={0}'.format(prefix))
+
+ make()
+ make('install')
diff --git a/var/spack/repos/builtin/packages/likwid/package.py b/var/spack/repos/builtin/packages/likwid/package.py
new file mode 100644
index 0000000000..8d1687a11a
--- /dev/null
+++ b/var/spack/repos/builtin/packages/likwid/package.py
@@ -0,0 +1,69 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class Likwid(Package):
+ """Likwid is a simple to install and use toolsuite of command line
+ applications for performance oriented programmers. It works for Intel and
+ AMD processors on the Linux operating system."""
+
+ homepage = "https://github.com/RRZE-HPC/likwid"
+ url = "https://github.com/RRZE-HPC/likwid/archive/4.1.2.tar.gz"
+
+ version('4.1.2', 'a857ce5bd23e31d96e2963fe81cb38f0')
+
+ # NOTE: There is no way to use an externally provided hwloc with Likwid.
+ # The reason is that the internal hwloc is patched to contain extra
+ # functionality and functions are prefixed with "likwid_".
+
+ # TODO: how to specify those?
+ # depends_on('lua')
+
+ # TODO: check
+ # depends_on('gnuplot', type='run')
+
+ supported_compilers = {'clang': 'CLANG', 'gcc': 'GCC', 'intel': 'ICC'}
+
+ def install(self, spec, prefix):
+ if self.compiler.name not in self.supported_compilers:
+ raise RuntimeError('{0} is not a supported compiler \
+ to compile Likwid'.format(self.compiler.name))
+
+ filter_file('^COMPILER .*',
+ 'COMPILER = ' +
+ self.supported_compilers[self.compiler.name],
+ 'config.mk')
+ filter_file('^PREFIX .*',
+ 'PREFIX = ' +
+ prefix,
+ 'config.mk')
+
+ filter_file('^INSTALL_CHOWN.*',
+ 'INSTALL_CHOWN = -o $(USER)',
+ 'config.mk')
+
+ make()
+ make('install')
diff --git a/var/spack/repos/builtin/packages/listres/package.py b/var/spack/repos/builtin/packages/listres/package.py
new file mode 100644
index 0000000000..c6b3d149f6
--- /dev/null
+++ b/var/spack/repos/builtin/packages/listres/package.py
@@ -0,0 +1,49 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class Listres(Package):
+ """The listres program generates a list of X resources for a widget
+ in an X client written using a toolkit based on libXt."""
+
+ homepage = "http://cgit.freedesktop.org/xorg/app/listres"
+ url = "https://www.x.org/archive/individual/app/listres-1.0.3.tar.gz"
+
+ version('1.0.3', '77cafc32e8e02cca2d4453e73e0c0e7d')
+
+ depends_on('libxaw')
+ depends_on('libxt')
+ depends_on('libxmu')
+
+ depends_on('xproto', type='build')
+ depends_on('pkg-config@0.9.0:', type='build')
+ depends_on('util-macros', type='build')
+
+ def install(self, spec, prefix):
+ configure('--prefix={0}'.format(prefix))
+
+ make()
+ make('install')
diff --git a/var/spack/repos/builtin/packages/llvm-lld/package.py b/var/spack/repos/builtin/packages/llvm-lld/package.py
index 073c2d1b9e..6a167cca33 100644
--- a/var/spack/repos/builtin/packages/llvm-lld/package.py
+++ b/var/spack/repos/builtin/packages/llvm-lld/package.py
@@ -24,6 +24,7 @@
##############################################################################
from spack import *
+
class LlvmLld(Package):
"""lld - The LLVM Linker
lld is a new set of modular code for creating linker tools."""
@@ -34,8 +35,10 @@ class LlvmLld(Package):
version('3.4', '3b6a17e58c8416c869c14dd37682f78e')
+ depends_on('cmake', type='build')
+
def install(self, spec, prefix):
- env['CXXFLAGS'] = self.compier.cxx11_flag
+ env['CXXFLAGS'] = self.compiler.cxx11_flag
with working_dir('spack-build', create=True):
cmake('..',
diff --git a/var/spack/repos/builtin/packages/llvm/package.py b/var/spack/repos/builtin/packages/llvm/package.py
index c090c131c6..06572ea312 100644
--- a/var/spack/repos/builtin/packages/llvm/package.py
+++ b/var/spack/repos/builtin/packages/llvm/package.py
@@ -22,34 +22,58 @@
# License along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
+import os
+
from spack import *
-import os, shutil
class Llvm(Package):
"""The LLVM Project is a collection of modular and reusable compiler and
- toolchain technologies. Despite its name, LLVM has little to do with
- traditional virtual machines, though it does provide helpful libraries
- that can be used to build them. The name "LLVM" itself is not an acronym;
- it is the full name of the project.
+ toolchain technologies. Despite its name, LLVM has little to do
+ with traditional virtual machines, though it does provide helpful
+ libraries that can be used to build them. The name "LLVM" itself
+ is not an acronym; it is the full name of the project.
"""
+
homepage = 'http://llvm.org/'
url = 'http://llvm.org/releases/3.7.1/llvm-3.7.1.src.tar.xz'
- version('3.0', 'a8e5f5f1c1adebae7b4a654c376a6005', url='http://llvm.org/releases/3.0/llvm-3.0.tar.gz') # currently required by mesa package
+ family = 'compiler' # Used by lmod
- variant('debug', default=False, description="Build a debug version of LLVM, this increases binary size by an order of magnitude, make sure you have 20-30gb of space available to build this")
- variant('clang', default=True, description="Build the LLVM C/C++/Objective-C compiler frontend")
- variant('lldb', default=True, description="Build the LLVM debugger")
- variant('internal_unwind', default=True, description="Build the libcxxabi libunwind")
- variant('polly', default=True, description="Build the LLVM polyhedral optimization plugin, only builds for 3.7.0+")
- variant('libcxx', default=True, description="Build the LLVM C++ standard library")
- variant('compiler-rt', default=True, description="Build the LLVM compiler runtime, including sanitizers")
- variant('gold', default=True, description="Add support for LTO with the gold linker plugin")
+ # currently required by mesa package
+ version('3.0', 'a8e5f5f1c1adebae7b4a654c376a6005',
+ url='http://llvm.org/releases/3.0/llvm-3.0.tar.gz')
+ variant('debug', default=False,
+ description="Build a debug version of LLVM, this increases "
+ "binary size by an order of magnitude, make sure you have "
+ "20-30gb of space available to build this")
+ variant('clang', default=True,
+ description="Build the LLVM C/C++/Objective-C compiler frontend")
+ variant('lldb', default=True, description="Build the LLVM debugger")
+ variant('internal_unwind', default=True,
+ description="Build the libcxxabi libunwind")
+ variant('polly', default=True,
+ description="Build the LLVM polyhedral optimization plugin, "
+ "only builds for 3.7.0+")
+ variant('libcxx', default=True,
+ description="Build the LLVM C++ standard library")
+ variant('compiler-rt', default=True,
+ description="Build LLVM compiler runtime, including sanitizers")
+ variant('gold', default=True,
+ description="Add support for LTO with the gold linker plugin")
+ variant('shared_libs', default=False,
+ description="Build all components as shared libraries, faster, "
+ "less memory to build, less stable")
+ variant('link_dylib', default=False,
+ description="Build and link the libLLVM shared library rather "
+ "than static")
+ variant('all_targets', default=True,
+ description="Build all supported targets, default targets "
+ "<current arch>,NVPTX,AMDGPU,CppBackend")
# Build dependency
- depends_on('cmake @2.8.12.2:')
+ depends_on('cmake@2.8.12.2:', type='build')
# Universal dependency
depends_on('python@2.7:2.8') # Seems not to support python 3.X.Y
@@ -66,147 +90,177 @@ class Llvm(Package):
depends_on('gmp', when='@:3.6.999 +polly')
depends_on('isl', when='@:3.6.999 +polly')
- base_url = 'http://llvm.org/releases/%%(version)s/%(pkg)s-%%(version)s.src.tar.xz'
- llvm_url = base_url % { 'pkg' : 'llvm'}
+ base_url = 'http://llvm.org/releases/%%(version)s/%(pkg)s-%%(version)s.src.tar.xz'
+ llvm_url = base_url % {'pkg': 'llvm'}
resources = {
- 'compiler-rt' : {
- 'url' : base_url % { 'pkg' : 'compiler-rt'},
- 'destination' : 'projects',
- 'placement' : 'compiler-rt',
- },
- 'openmp' : {
- 'url' : base_url % { 'pkg' : 'openmp'},
- 'destination' : 'projects',
- 'placement' : 'openmp',
- },
- 'libcxx' : {
- 'url' : base_url % { 'pkg' : 'libcxx'},
- 'destination' : 'projects',
- 'placement' : 'libcxx',
- },
- 'libcxxabi' : {
- 'url' : base_url % { 'pkg' : 'libcxxabi'},
- 'destination' : 'projects',
- 'placement' : 'libcxxabi',
- },
- 'clang' : {
- 'url' : base_url % { 'pkg' : 'cfe'},
- 'destination' : 'tools',
- 'placement' : 'clang',
- },
- 'clang-tools-extra' : {
- 'url' : base_url % { 'pkg' : 'clang-tools-extra'},
- 'destination' : 'tools/clang/tools',
- 'placement' : 'extra',
- },
- 'lldb' : {
- 'url' : base_url % { 'pkg' : 'lldb'},
- 'destination' : 'tools',
- 'placement' : 'lldb',
- },
- 'polly' : {
- 'url' : base_url % { 'pkg' : 'polly'},
- 'destination' : 'tools',
- 'placement' : 'polly',
- },
- 'llvm-libunwind' : {
- 'url' : base_url % { 'pkg' : 'libunwind'},
- 'destination' : 'projects',
- 'placement' : 'libunwind',
- },
- }
+ 'compiler-rt': {
+ 'url': base_url % {'pkg': 'compiler-rt'},
+ 'destination': 'projects',
+ 'placement': 'compiler-rt',
+ },
+ 'openmp': {
+ 'url': base_url % {'pkg': 'openmp'},
+ 'destination': 'projects',
+ 'placement': 'openmp',
+ },
+ 'libcxx': {
+ 'url': base_url % {'pkg': 'libcxx'},
+ 'destination': 'projects',
+ 'placement': 'libcxx',
+ },
+ 'libcxxabi': {
+ 'url': base_url % {'pkg': 'libcxxabi'},
+ 'destination': 'projects',
+ 'placement': 'libcxxabi',
+ },
+ 'cfe': {
+ 'url': base_url % {'pkg': 'cfe'},
+ 'destination': 'tools',
+ 'placement': 'clang',
+ },
+ 'clang-tools-extra': {
+ 'url': base_url % {'pkg': 'clang-tools-extra'},
+ 'destination': 'tools/clang/tools',
+ 'placement': 'extra',
+ },
+ 'lldb': {
+ 'url': base_url % {'pkg': 'lldb'},
+ 'destination': 'tools',
+ 'placement': 'lldb',
+ },
+ 'polly': {
+ 'url': base_url % {'pkg': 'polly'},
+ 'destination': 'tools',
+ 'placement': 'polly',
+ },
+ 'llvm-libunwind': {
+ 'url': base_url % {'pkg': 'libunwind'},
+ 'destination': 'projects',
+ 'placement': 'libunwind',
+ },
+ }
releases = [
- {
- 'version' : 'trunk',
- 'repo' : 'http://llvm.org/svn/llvm-project/llvm/trunk',
- 'resources' : {
- 'compiler-rt' : 'http://llvm.org/svn/llvm-project/compiler-rt/trunk',
- 'openmp' : 'http://llvm.org/svn/llvm-project/openmp/trunk',
- 'polly' : 'http://llvm.org/svn/llvm-project/polly/trunk',
- 'libcxx' : 'http://llvm.org/svn/llvm-project/libcxx/trunk',
- 'libcxxabi' : 'http://llvm.org/svn/llvm-project/libcxxabi/trunk',
- 'clang' : 'http://llvm.org/svn/llvm-project/cfe/trunk',
- 'clang-tools-extra' : 'http://llvm.org/svn/llvm-project/clang-tools-extra/trunk',
- 'lldb' : 'http://llvm.org/svn/llvm-project/lldb/trunk',
- 'llvm-libunwind' : 'http://llvm.org/svn/llvm-project/libunwind/trunk',
- }
- },
- {
- 'version' : '3.8.0',
- 'md5':'07a7a74f3c6bd65de4702bf941b511a0',
- 'resources' : {
- 'compiler-rt' : 'd6fcbe14352ffb708e4d1ac2e48bb025',
- 'openmp' : '8fd7cc35d48051613cf1e750e9f22e40',
- 'polly' : '1b3b20f52d34a4024e21a4ea7112caa7',
- 'libcxx' : 'd6e0bdbbee39f7907ad74fd56d03b88a',
- 'libcxxabi' : 'bbe6b4d72c7c5978550d370af529bcf7',
- 'clang' : 'cc99e7019bb74e6459e80863606250c5',
- 'clang-tools-extra' : 'c2344f50e0eea0b402f0092a80ddc036',
- 'lldb' : 'a5da35ed9cc8c8817ee854e3dbfba00e',
- 'llvm-libunwind' : '162ade468607f153cca12be90b5194fa',
- }
- },
- {
- 'version' : '3.7.1',
- 'md5':'bf8b3a2c79e61212c5409041dfdbd319',
- 'resources' : {
- 'compiler-rt' : '1c6975daf30bb3b0473b53c3a1a6ff01',
- 'openmp' : 'b4ad08cda4e5c22e42b66062b140438e',
- 'polly' : '3a2a7367002740881637f4d47bca4dc3',
- 'libcxx' : 'f9c43fa552a10e14ff53b94d04bea140',
- 'libcxxabi' : '52d925afac9f97e9dcac90745255c169',
- 'clang' : '0acd026b5529164197563d135a8fd83e',
- 'clang-tools-extra' : '5d49ff745037f061a7c86aeb6a24c3d2',
- 'lldb' : 'a106d8a0d21fc84d76953822fbaf3398',
- 'llvm-libunwind' : '814bd52c9247c5d04629658fbcb3ab8c',
- }
- },
- {
- 'version' : '3.7.0',
- 'md5':'b98b9495e5655a672d6cb83e1a180f8e',
- 'resources' : {
- 'compiler-rt' : '383c10affd513026f08936b5525523f5',
- 'openmp' : 'f482c86fdead50ba246a1a2b0bbf206f',
- 'polly' : '32f93ffc9cc7e042df22089761558f8b',
- 'libcxx' : '46aa5175cbe1ad42d6e9c995968e56dd',
- 'libcxxabi' : '5aa769e2fca79fa5335cfae8f6258772',
- 'clang' : '8f9d27335e7331cf0a4711e952f21f01',
- 'clang-tools-extra' : 'd5a87dacb65d981a427a536f6964642e',
- 'lldb' : 'e5931740400d1dc3e7db4c7ba2ceff68',
- 'llvm-libunwind' : '9a75392eb7eb8ed5c0840007e212baf5',
- }
- },
- {
- 'version' : '3.6.2',
- 'md5':'0c1ee3597d75280dee603bae9cbf5cc2',
- 'resources' : {
- 'compiler-rt' : 'e3bc4eb7ba8c39a6fe90d6c988927f3c',
- 'openmp' : '65dd5863b9b270960a96817e9152b123',
- 'libcxx' : '22214c90697636ef960a49aef7c1823a',
- 'libcxxabi' : '17518e361e4e228f193dd91e8ef54ba2',
- 'clang' : 'ff862793682f714bb7862325b9c06e20',
- 'clang-tools-extra' : '3ebc1dc41659fcec3db1b47d81575e06',
- 'lldb' : '51e5eb552f777b950bb0ff326e60d5f0',
- }
- },
- {
- 'version' : '3.5.1',
- 'md5':'2d3d8004f38852aa679e5945b8ce0b14',
- 'resources' : {
- 'compiler-rt' : 'd626cfb8a9712cb92b820798ab5bc1f8',
- 'openmp' : '121ddb10167d7fc38b1f7e4b029cf059',
- 'libcxx' : '406f09b1dab529f3f7879f4d548329d2',
- 'libcxxabi' : 'b22c707e8d474a99865ad3c521c3d464',
- 'clang' : '93f9532f8f7e6f1d8e5c1116907051cb',
- 'clang-tools-extra' : 'f13f31ed3038acadc6fa63fef812a246',
- 'lldb' : 'cc5ea8a414c62c33e760517f8929a204',
- }
- },
- ]
+ {
+ 'version': 'trunk',
+ 'repo': 'http://llvm.org/svn/llvm-project/llvm/trunk',
+ 'resources': {
+ 'compiler-rt': 'http://llvm.org/svn/llvm-project/compiler-rt/trunk',
+ 'openmp': 'http://llvm.org/svn/llvm-project/openmp/trunk',
+ 'polly': 'http://llvm.org/svn/llvm-project/polly/trunk',
+ 'libcxx': 'http://llvm.org/svn/llvm-project/libcxx/trunk',
+ 'libcxxabi': 'http://llvm.org/svn/llvm-project/libcxxabi/trunk',
+ 'cfe': 'http://llvm.org/svn/llvm-project/cfe/trunk',
+ 'clang-tools-extra': 'http://llvm.org/svn/llvm-project/clang-tools-extra/trunk',
+ 'lldb': 'http://llvm.org/svn/llvm-project/lldb/trunk',
+ 'llvm-libunwind': 'http://llvm.org/svn/llvm-project/libunwind/trunk',
+ }
+ },
+ {
+ 'version': '3.9.0',
+ 'md5': 'f2093e98060532449eb7d2fcfd0bc6c6',
+ 'resources': {
+ 'compiler-rt': 'b7ea34c9d744da16ffc0217b6990d095',
+ 'openmp': '5390164f2374e1444e82393541ecf6c7',
+ 'polly': '1cf328cbae25267749b68cfa6f113674',
+ 'libcxx': '0a11efefd864ce6f321194e441f7e569',
+ 'libcxxabi': 'd02642308e22e614af6b061b9b4fedfa',
+ 'cfe': '29e1d86bee422ab5345f5e9fb808d2dc',
+ 'clang-tools-extra': 'f4f663068c77fc742113211841e94d5e',
+ 'lldb': '968d053c3c3d7297983589164c6999e9',
+ 'llvm-libunwind': '3e5c87c723a456be599727a444b1c166',
+ }
+ },
+ {
+ 'version': '3.8.1',
+ 'md5': '538467e6028bbc9259b1e6e015d25845',
+ 'resources': {
+ 'compiler-rt': 'f140db073d2453f854fbe01cc46f3110',
+ 'openmp': '078b8d4c51ad437a4f8b5989f5ec4156',
+ 'polly': '8a40e697a4ba1c8b640b85d074bd6e25',
+ 'libcxx': '1bc60150302ff76a0d79d6f9db22332e',
+ 'libcxxabi': '3c63b03ba2f30a01279ca63384a67773',
+ 'cfe': '4ff2f8844a786edb0220f490f7896080',
+ 'clang-tools-extra': '6e49f285d0b366cc3cab782d8c92d382',
+ 'lldb': '9e4787b71be8e432fffd31e13ac87623',
+ 'llvm-libunwind': 'd66e2387e1d37a8a0c8fe6a0063a3bab',
+ }
+ },
+ {
+ 'version': '3.8.0',
+ 'md5': '07a7a74f3c6bd65de4702bf941b511a0',
+ 'resources': {
+ 'compiler-rt': 'd6fcbe14352ffb708e4d1ac2e48bb025',
+ 'openmp': '8fd7cc35d48051613cf1e750e9f22e40',
+ 'polly': '1b3b20f52d34a4024e21a4ea7112caa7',
+ 'libcxx': 'd6e0bdbbee39f7907ad74fd56d03b88a',
+ 'libcxxabi': 'bbe6b4d72c7c5978550d370af529bcf7',
+ 'cfe': 'cc99e7019bb74e6459e80863606250c5',
+ 'clang-tools-extra': 'c2344f50e0eea0b402f0092a80ddc036',
+ 'lldb': 'a5da35ed9cc8c8817ee854e3dbfba00e',
+ 'llvm-libunwind': '162ade468607f153cca12be90b5194fa',
+ }
+ },
+ {
+ 'version': '3.7.1',
+ 'md5': 'bf8b3a2c79e61212c5409041dfdbd319',
+ 'resources': {
+ 'compiler-rt': '1c6975daf30bb3b0473b53c3a1a6ff01',
+ 'openmp': 'b4ad08cda4e5c22e42b66062b140438e',
+ 'polly': '3a2a7367002740881637f4d47bca4dc3',
+ 'libcxx': 'f9c43fa552a10e14ff53b94d04bea140',
+ 'libcxxabi': '52d925afac9f97e9dcac90745255c169',
+ 'cfe': '0acd026b5529164197563d135a8fd83e',
+ 'clang-tools-extra': '5d49ff745037f061a7c86aeb6a24c3d2',
+ 'lldb': 'a106d8a0d21fc84d76953822fbaf3398',
+ 'llvm-libunwind': '814bd52c9247c5d04629658fbcb3ab8c',
+ }
+ },
+ {
+ 'version': '3.7.0',
+ 'md5': 'b98b9495e5655a672d6cb83e1a180f8e',
+ 'resources': {
+ 'compiler-rt': '383c10affd513026f08936b5525523f5',
+ 'openmp': 'f482c86fdead50ba246a1a2b0bbf206f',
+ 'polly': '32f93ffc9cc7e042df22089761558f8b',
+ 'libcxx': '46aa5175cbe1ad42d6e9c995968e56dd',
+ 'libcxxabi': '5aa769e2fca79fa5335cfae8f6258772',
+ 'cfe': '8f9d27335e7331cf0a4711e952f21f01',
+ 'clang-tools-extra': 'd5a87dacb65d981a427a536f6964642e',
+ 'lldb': 'e5931740400d1dc3e7db4c7ba2ceff68',
+ 'llvm-libunwind': '9a75392eb7eb8ed5c0840007e212baf5',
+ }
+ },
+ {
+ 'version': '3.6.2',
+ 'md5': '0c1ee3597d75280dee603bae9cbf5cc2',
+ 'resources': {
+ 'compiler-rt': 'e3bc4eb7ba8c39a6fe90d6c988927f3c',
+ 'openmp': '65dd5863b9b270960a96817e9152b123',
+ 'libcxx': '22214c90697636ef960a49aef7c1823a',
+ 'libcxxabi': '17518e361e4e228f193dd91e8ef54ba2',
+ 'cfe': 'ff862793682f714bb7862325b9c06e20',
+ 'clang-tools-extra': '3ebc1dc41659fcec3db1b47d81575e06',
+ 'lldb': '51e5eb552f777b950bb0ff326e60d5f0',
+ }
+ },
+ {
+ 'version': '3.5.1',
+ 'md5': '2d3d8004f38852aa679e5945b8ce0b14',
+ 'resources': {
+ 'compiler-rt': 'd626cfb8a9712cb92b820798ab5bc1f8',
+ 'openmp': '121ddb10167d7fc38b1f7e4b029cf059',
+ 'libcxx': '406f09b1dab529f3f7879f4d548329d2',
+ 'libcxxabi': 'b22c707e8d474a99865ad3c521c3d464',
+ 'cfe': '93f9532f8f7e6f1d8e5c1116907051cb',
+ 'clang-tools-extra': 'f13f31ed3038acadc6fa63fef812a246',
+ 'lldb': 'cc5ea8a414c62c33e760517f8929a204',
+ }
+ },
+ ]
for release in releases:
- if release['version'] == 'trunk' :
+ if release['version'] == 'trunk':
version(release['version'], svn=release['repo'])
for name, repo in release['resources'].items():
@@ -228,18 +282,19 @@ class Llvm(Package):
def install(self, spec, prefix):
env['CXXFLAGS'] = self.compiler.cxx11_flag
- cmake_args = [ arg for arg in std_cmake_args if 'BUILD_TYPE' not in arg ]
+ cmake_args = [arg for arg in std_cmake_args if 'BUILD_TYPE' not in arg]
build_type = 'RelWithDebInfo' if '+debug' in spec else 'Release'
cmake_args.extend([
- '..',
- '-DCMAKE_BUILD_TYPE=' + build_type,
- '-DLLVM_REQUIRES_RTTI:BOOL=ON',
- '-DCLANG_DEFAULT_OPENMP_RUNTIME:STRING=libomp',
- '-DPYTHON_EXECUTABLE:PATH=%s/bin/python' % spec['python'].prefix ])
+ '..',
+ '-DCMAKE_BUILD_TYPE=' + build_type,
+ '-DLLVM_REQUIRES_RTTI:BOOL=ON',
+ '-DCLANG_DEFAULT_OPENMP_RUNTIME:STRING=libomp',
+ '-DPYTHON_EXECUTABLE:PATH=%s/bin/python' % spec['python'].prefix])
if '+gold' in spec:
- cmake_args.append('-DLLVM_BINUTILS_INCDIR=' + os.path.join( spec['binutils'].prefix, 'include'))
+ cmake_args.append('-DLLVM_BINUTILS_INCDIR=' +
+ os.path.join(spec['binutils'].prefix, 'include'))
if '+polly' in spec:
cmake_args.append('-DLINK_POLLY_INTO_TOOLS:Bool=ON')
else:
@@ -257,17 +312,40 @@ class Llvm(Package):
if '+compiler-rt' not in spec:
cmake_args.append('-DLLVM_EXTERNAL_COMPILER_RT_BUILD:Bool=OFF')
- if '+clang' not in spec:
+ if '+shared_libs' in spec:
+ cmake_args.append('-DBUILD_SHARED_LIBS:Bool=ON')
+
+ if '+link_dylib' in spec:
+ cmake_args.append('-DLLVM_LINK_LLVM_DYLIB:Bool=ON')
+
+ if '+all_targets' not in spec: # all is default on cmake
+ targets = ['CppBackend', 'NVPTX', 'AMDGPU']
+ if 'x86' in spec.architecture.target.lower():
+ targets.append('X86')
+ elif 'arm' in spec.architecture.target.lower():
+ targets.append('ARM')
+ elif 'aarch64' in spec.architecture.target.lower():
+ targets.append('AArch64')
+ elif 'sparc' in spec.architecture.target.lower():
+ targets.append('sparc')
+ elif ('ppc' in spec.architecture.target.lower() or
+ 'power' in spec.architecture.target.lower()):
+ targets.append('PowerPC')
+
+ cmake_args.append(
+ '-DLLVM_TARGETS_TO_BUILD:Bool=' + ';'.join(targets))
+
+ if '+clang' not in spec:
if '+clang_extra' in spec:
- raise SpackException('The clang_extra variant requires the clang variant to be selected')
+ raise SpackException(
+ 'The clang_extra variant requires the `+clang` variant.')
if '+lldb' in spec:
- raise SpackException('The lldb variant requires the clang variant to be selected')
+ raise SpackException(
+ 'The lldb variant requires the `+clang` variant')
with working_dir('spack-build', create=True):
cmake(*cmake_args)
make()
make("install")
- query_path = os.path.join('bin', 'clang-query')
- # Manually install clang-query, because llvm doesn't...
- if os.path.exists(query_path):
- shutil.copy(query_path, os.path.join(prefix, 'bin'))
+ cp = which('cp')
+ cp('-a', 'bin/', prefix)
diff --git a/var/spack/repos/builtin/packages/lmdb/package.py b/var/spack/repos/builtin/packages/lmdb/package.py
index 79c020b2df..8c6c23d8dc 100644
--- a/var/spack/repos/builtin/packages/lmdb/package.py
+++ b/var/spack/repos/builtin/packages/lmdb/package.py
@@ -25,12 +25,12 @@
import os
from spack import *
+
class Lmdb(Package):
"""Read-only mirror of official repo on openldap.org. Issues and
pull requests here are ignored. Use OpenLDAP ITS for issues.
http://www.openldap.org/software/repo.html"""
-
homepage = "http://www.openldap.org/software/repo.html"
url = "https://github.com/LMDB/lmdb/archive/LMDB_0.9.16.tar.gz"
diff --git a/var/spack/repos/builtin/packages/lmod/fix_tclsh_paths.patch b/var/spack/repos/builtin/packages/lmod/fix_tclsh_paths.patch
new file mode 100644
index 0000000000..70f0d47925
--- /dev/null
+++ b/var/spack/repos/builtin/packages/lmod/fix_tclsh_paths.patch
@@ -0,0 +1,10 @@
+--- a/Makefile.in 2016-07-21 13:03:27.861000000 -0400
++++ b/Makefile.in 2016-07-21 13:03:58.416000000 -0400
+@@ -197,6 +197,7 @@
+ -e 's|@colorize@|$(COLORIZE)|g' \
+ -e 's|@duplicate_paths@|$(DUPLICATE_PATHS)|g' \
+ -e 's|@allow_tcl_mfiles@|$(ALLOW_TCL_MFILES)|g' \
++ -e 's|@path_to_tclsh@|$(PATH_TO_TCLSH)|g' \
+ -e 's|@mpath_avail@|$(MPATH_AVAIL)|g' \
+ -e 's|@short_time@|$(SHORT_TIME)|g' \
+ -e 's|@cacheDirs@|$(SPIDER_CACHE_DIRS)|g' \
diff --git a/var/spack/repos/builtin/packages/lmod/package.py b/var/spack/repos/builtin/packages/lmod/package.py
index 0a8b9b4577..9ac270bab5 100644
--- a/var/spack/repos/builtin/packages/lmod/package.py
+++ b/var/spack/repos/builtin/packages/lmod/package.py
@@ -23,28 +23,50 @@
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
from spack import *
-import os
+from glob import glob
+
class Lmod(Package):
- """
- Lmod is a Lua based module system that easily handles the MODULEPATH
+ """Lmod is a Lua based module system that easily handles the MODULEPATH
Hierarchical problem. Environment Modules provide a convenient way to
dynamically change the users' environment through modulefiles. This
includes easily adding or removing directories to the PATH environment
variable. Modulefiles for Library packages provide environment variables
that specify where the library and header files can be found.
"""
- homepage = "https://www.tacc.utexas.edu/research-development/tacc-projects/lmod"
- url = "http://sourceforge.net/projects/lmod/files/Lmod-6.0.1.tar.bz2/download"
+ homepage = 'https://www.tacc.utexas.edu/research-development/tacc-projects/lmod'
+ url = 'https://github.com/TACC/Lmod/archive/6.4.1.tar.gz'
+
+ version('6.4.5', '14f6c58dbc0a5a75574d795eac2c1e3c')
+ version('6.4.1', '7978ba777c8aa41a4d8c05fec5f780f4')
+ version('6.3.7', '0fa4d5a24c41cae03776f781aa2dedc1')
version('6.0.1', '91abf52fe5033bd419ffe2842ebe7af9')
- depends_on("lua@5.2:")
+ depends_on('lua@5.2:')
+ depends_on('lua-luaposix', type=('build', 'run'))
+ depends_on('lua-luafilesystem', type=('build', 'run'))
+ depends_on('tcl', type=('build', 'run'))
+
+ parallel = False
+
+ def setup_environment(self, spack_env, run_env):
+ stage_lua_path = join_path(
+ self.stage.path, 'Lmod-{version}', 'src', '?.lua')
+ spack_env.append_path('LUA_PATH', stage_lua_path.format(
+ version=self.version), separator=';')
+
+ patch('fix_tclsh_paths.patch', when='@:6.4.3')
+
+ def patch(self):
+ """The tcl scripts should use the tclsh that was discovered
+ by the configure script. Touch up their #! lines so that the
+ sed in the Makefile's install step has something to work on.
+ Requires the change in the associated patch file.fg"""
+ if self.spec.version <= Version('6.4.3'):
+ for tclscript in glob('src/*.tcl'):
+ filter_file(r'^#!.*tclsh', '#!@path_to_tclsh@', tclscript)
def install(self, spec, prefix):
- # Add our lua to PATH
- os.environ['PATH'] = spec['lua'].prefix.bin + ';' + os.environ['PATH']
-
configure('--prefix=%s' % prefix)
- make()
- make("install")
+ make('install')
diff --git a/var/spack/repos/builtin/packages/lndir/package.py b/var/spack/repos/builtin/packages/lndir/package.py
new file mode 100644
index 0000000000..a7ce892502
--- /dev/null
+++ b/var/spack/repos/builtin/packages/lndir/package.py
@@ -0,0 +1,44 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class Lndir(Package):
+ """lndir - create a shadow directory of symbolic links to another
+ directory tree."""
+
+ homepage = "http://cgit.freedesktop.org/xorg/util/lndir"
+ url = "https://www.x.org/archive/individual/util/lndir-1.0.3.tar.gz"
+
+ version('1.0.3', '7173b2e4832658d319c2980a7c834205')
+
+ depends_on('xproto@7.0.17:', type='build')
+ depends_on('pkg-config@0.9.0:', type='build')
+
+ def install(self, spec, prefix):
+ configure('--prefix={0}'.format(prefix))
+
+ make()
+ make('install')
diff --git a/var/spack/repos/builtin/packages/lrslib/Makefile.spack.patch b/var/spack/repos/builtin/packages/lrslib/Makefile.spack.patch
new file mode 100644
index 0000000000..d4d5e66528
--- /dev/null
+++ b/var/spack/repos/builtin/packages/lrslib/Makefile.spack.patch
@@ -0,0 +1,60 @@
+--- old/Makefile.spack
++++ new/Makefile.spack
+@@ -0,0 +1,57 @@
++# Set PREFIX to the install location for both building and installing
++# Set BOOST_PREFIX to the location where BOOST is installed
++# Set GMP_PREFIX to the location where GMP is installed
++
++all: liblrsgmp.la \
++ 2nash fourier lrs lrs1 lrsnash redund redund1 setnash setnash2
++
++liblrsgmp.la: lrslib-GMP.lo lrsgmp-GMP.lo
++ libtool --mode=link --tag=CC cc -g -O3 \
++ -rpath $(PREFIX)/lib -o $@ $^ \
++ -L$(GMP_PREFIX)/lib -lgmp
++
++lrs1: lrs-LONG.lo lrslib-LONG.lo lrslong-LONG.lo
++ libtool --mode=link --tag=CC cc -g -O3 -o $@ $^
++redund1: redund-LONG.lo lrslib-LONG.lo lrslong-LONG.lo
++ libtool --mode=link --tag=CC cc -g -O3 -o $@ $^
++lrs: lrs-GMP.lo lrslib-GMP.lo lrsmp-GMP.lo liblrsgmp.la
++ libtool --mode=link --tag=CC cc -g -O3 -o $@ $^
++redund: redund-GMP.lo lrslib-GMP.lo lrsmp-GMP.lo liblrsgmp.la
++ libtool --mode=link --tag=CC cc -g -O3 -o $@ $^
++fourier: fourier-GMP.lo lrslib-GMP.lo lrsgmp-GMP.lo liblrsgmp.la
++ libtool --mode=link --tag=CC cc -g -O3 -o $@ $^
++lrsnash: lrsnash-GMP.lo lrsnashlib-GMP.lo lrslib-GMP.lo lrsmp-GMP.lo \
++ liblrsgmp.la
++ libtool --mode=link --tag=CC cc -g -O3 -o $@ $^
++2nash: 2nash.lo
++ libtool --mode=link --tag=CC cc -g -O3 -o $@ $^
++setnash: setupnash.lo lrslib.lo lrsmp.lo
++ libtool --mode=link --tag=CC cc -g -O3 -o $@ $^
++setnash2: setupnash2.lo lrslib.lo lrsmp.lo
++ libtool --mode=link --tag=CC cc -g -O3 -o $@ $^
++
++%.lo: %.c
++ libtool --mode=compile --tag=CC cc -g -O3 -o $@ -c $*.c
++%-GMP.lo: %.c
++ libtool --mode=compile --tag=CC cc -g -O3 -o $@ -DGMP -c $*.c
++%-LONG.lo: %.c
++ libtool --mode=compile --tag=CC cc -g -O3 -o $@ -DLRSLONG -c $*.c
++
++install:
++ mkdir -p $(PREFIX)/bin
++ mkdir -p $(PREFIX)/include
++ mkdir -p $(PREFIX)/lib
++ libtool --mode=install cp 2nash $(PREFIX)/bin/2nash
++ libtool --mode=install cp fourier $(PREFIX)/bin/fourier
++ libtool --mode=install cp lrs $(PREFIX)/bin/lrs
++ libtool --mode=install cp lrs1 $(PREFIX)/bin/lrs1
++ libtool --mode=install cp lrsnash $(PREFIX)/bin/lrsnash
++ libtool --mode=install cp redund $(PREFIX)/bin/redund
++ libtool --mode=install cp redund1 $(PREFIX)/bin/redund1
++ libtool --mode=install cp setnash $(PREFIX)/bin/setnash
++ libtool --mode=install cp setnash2 $(PREFIX)/bin/setnash2
++ libtool --mode=install cp lrsgmp.h $(PREFIX)/include/lrsgmp.h
++ libtool --mode=install cp lrslib.h $(PREFIX)/include/lrslib.h
++ libtool --mode=install cp liblrsgmp.la $(PREFIX)/lib/liblrsgmp.la
++
++.PHONY: all install
diff --git a/var/spack/repos/builtin/packages/lrslib/package.py b/var/spack/repos/builtin/packages/lrslib/package.py
new file mode 100644
index 0000000000..3825867bb6
--- /dev/null
+++ b/var/spack/repos/builtin/packages/lrslib/package.py
@@ -0,0 +1,61 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+
+from spack import *
+
+
+class Lrslib(Package):
+ """lrslib Ver 6.2 is a self-contained ANSI C implementation of the
+ reverse search algorithm for vertex enumeration/convex hull
+ problems and comes with a choice of three arithmetic packages"""
+ homepage = "http://cgm.cs.mcgill.ca/~avis/C/lrs.html"
+ url = "http://cgm.cs.mcgill.ca/~avis/C/lrslib/archive/lrslib-062.tar.gz"
+
+ version('6.2', 'be5da7b3b90cc2be628dcade90c5d1b9')
+ version('6.1', '0b3687c8693cd7d1f234a3f65e147551')
+ version('6.0', 'd600a2e62969ad03f7ab2f85f1b3709c')
+ version('5.1', 'cca323eee8bf76f598a13d7bf67cc13d')
+ version('4.3', '86dd9a45d20a3a0069f77e61be5b46ad')
+
+ # Note: lrslib can also be built with Boost, and probably without gmp
+
+ # depends_on("boost")
+ depends_on("gmp")
+ depends_on("libtool", type="build")
+
+ patch("Makefile.spack.patch")
+
+ def url_for_version(self, version):
+ url = "http://cgm.cs.mcgill.ca/~avis/C/lrslib/archive/lrslib-0{0}.tar.gz"
+ return url.format(version.joined)
+
+ def install(self, spec, prefix):
+ # The Makefile isn't portable; use our own instead
+ makeargs = ["-f", "Makefile.spack",
+ "PREFIX=%s" % prefix,
+ # "BOOST_PREFIX=%s" % spec["boost"].prefix,
+ "GMP_PREFIX=%s" % spec["gmp"].prefix]
+ make(*makeargs)
+ make("install", *makeargs)
diff --git a/var/spack/repos/builtin/packages/lrzip/package.py b/var/spack/repos/builtin/packages/lrzip/package.py
new file mode 100644
index 0000000000..42542acfdb
--- /dev/null
+++ b/var/spack/repos/builtin/packages/lrzip/package.py
@@ -0,0 +1,61 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class Lrzip(Package):
+ """A compression utility that excels at compressing large files
+ (usually > 10-50 MB). Larger files and/or more free RAM means that the
+ utility will be able to more effectively compress your files (ie: faster /
+ smaller size), especially if the filesize(s) exceed 100 MB. You can either
+ choose to optimise for speed (fast compression / decompression) or size,
+ but not both."""
+
+ homepage = 'http://lrzip.kolivas.org'
+ url = 'https://github.com/ckolivas/lrzip/archive/v0.630.tar.gz'
+
+ version('master', git='https://github.com/ckolivas/lrzip.git')
+ version('0.630', '3ca7f1d1365aa105089d1fbfc6b0924a')
+ version('0.621', '1f07227b39ae81a98934411e8611e341')
+ version('0.616', 'd40bdb046d0807ef602e36b1e9782cc0')
+ version('0.615', 'f1c01e7f3de07f54d916b61c989dfaf2')
+
+ # depends_on('coreutils')
+ depends_on('lzo')
+ depends_on('zlib')
+ depends_on('bzip2')
+
+ def install(self, spec, prefix):
+ set_executable('./autogen.sh')
+ autogen = Executable('./autogen.sh')
+
+ configure_args = [
+ '--prefix={0}'.format(prefix),
+ '--disable-dependency-tracking'
+ ]
+ autogen(*configure_args)
+
+ make()
+ make('install')
diff --git a/var/spack/repos/builtin/packages/LuaJIT/package.py b/var/spack/repos/builtin/packages/lua-jit/package.py
index db6f7d3cad..5f7de8ff06 100644
--- a/var/spack/repos/builtin/packages/LuaJIT/package.py
+++ b/var/spack/repos/builtin/packages/lua-jit/package.py
@@ -25,7 +25,8 @@
import os
from spack import *
-class Luajit(Package):
+
+class LuaJit(Package):
"""Flast flexible JITed lua"""
homepage = "http://www.luajit.org"
url = "http://luajit.org/download/LuaJIT-2.0.4.tar.gz"
diff --git a/var/spack/repos/builtin/packages/lua-luafilesystem/package.py b/var/spack/repos/builtin/packages/lua-luafilesystem/package.py
new file mode 100644
index 0000000000..7a5c90f36f
--- /dev/null
+++ b/var/spack/repos/builtin/packages/lua-luafilesystem/package.py
@@ -0,0 +1,52 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class LuaLuafilesystem(Package):
+ """LuaFileSystem is a Lua library developed to complement the set of
+ functions related to file systems offered by the standard Lua distribution.
+
+ LuaFileSystem offers a portable way to access the underlying directory
+ structure and file attributes.
+
+ LuaFileSystem is free software and uses the same license as Lua 5.1
+ """
+
+ homepage = 'http://keplerproject.github.io/luafilesystem'
+ url = 'https://github.com/keplerproject/luafilesystem/archive/v1_6_3.tar.gz'
+
+ version('1_6_3', 'bed11874cfded8b4beed7dd054127b24')
+
+ depends_on('git@2.9.3:', type='build')
+ extends('lua')
+
+ def install(self, spec, prefix):
+ rockspec_fmt = join_path(self.stage.path,
+ 'luafilesystem-{version.underscored}',
+ 'rockspecs',
+ 'luafilesystem-{version.dotted}-1.rockspec')
+ luarocks('--tree=' + prefix, 'install',
+ rockspec_fmt.format(version=self.spec.version))
diff --git a/var/spack/repos/builtin/packages/lua-luaposix/package.py b/var/spack/repos/builtin/packages/lua-luaposix/package.py
index 9e96548f08..3803a938c8 100644
--- a/var/spack/repos/builtin/packages/lua-luaposix/package.py
+++ b/var/spack/repos/builtin/packages/lua-luaposix/package.py
@@ -1,3 +1,27 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
from spack import *
import glob
diff --git a/var/spack/repos/builtin/packages/lua/package.py b/var/spack/repos/builtin/packages/lua/package.py
index e621967586..357e2cc03a 100644
--- a/var/spack/repos/builtin/packages/lua/package.py
+++ b/var/spack/repos/builtin/packages/lua/package.py
@@ -57,21 +57,23 @@ class Lua(Package):
placement='luarocks')
def install(self, spec, prefix):
- if spec.satisfies("=darwin-i686") or spec.satisfies("=darwin-x86_64"):
+ if spec.satisfies("platform=darwin"):
target = 'macosx'
else:
target = 'linux'
make('INSTALL_TOP=%s' % prefix,
- 'MYLDFLAGS=-L%s -L%s ' % (
+ 'MYLDFLAGS=-L%s -L%s' % (
spec['readline'].prefix.lib,
spec['ncurses'].prefix.lib),
'MYLIBS=-lncurses',
+ 'CC=%s -std=gnu99' % spack_cc,
target)
make('INSTALL_TOP=%s' % prefix,
- 'MYLDFLAGS=-L%s -L%s ' % (
+ 'MYLDFLAGS=-L%s -L%s' % (
spec['readline'].prefix.lib,
spec['ncurses'].prefix.lib),
'MYLIBS=-lncurses',
+ 'CC=%s -std=gnu99' % spack_cc,
'install')
with working_dir(os.path.join('luarocks', 'luarocks')):
@@ -86,7 +88,8 @@ class Lua(Package):
def setup_dependent_environment(self, spack_env, run_env, extension_spec):
lua_paths = []
- for d in extension_spec.traverse():
+ for d in extension_spec.traverse(
+ deptypes=('build', 'run'), deptype_query='run'):
if d.package.extends(self.spec):
lua_paths.append(os.path.join(d.prefix, self.lua_lib_dir))
lua_paths.append(os.path.join(d.prefix, self.lua_share_dir))
@@ -105,6 +108,9 @@ class Lua(Package):
spack_env.set('LUA_PATH', ';'.join(lua_patterns), separator=';')
spack_env.set('LUA_CPATH', ';'.join(lua_cpatterns), separator=';')
+ # Add LUA to PATH for dependent packages
+ spack_env.prepend_path('PATH', self.prefix.bin)
+
# For run time environment set only the path for extension_spec and
# prepend it to LUAPATH
if extension_spec.package.extends(self.spec):
@@ -137,11 +143,11 @@ class Lua(Package):
@property
def lua_lib_dir(self):
- return os.path.join('lib', 'lua', '%d.%d' % self.version[:2])
+ return os.path.join('lib', 'lua', self.version.up_to(2))
@property
def lua_share_dir(self):
- return os.path.join('share', 'lua', '%d.%d' % self.version[:2])
+ return os.path.join('share', 'lua', self.version.up_to(2))
def setup_dependent_package(self, module, ext_spec):
"""
@@ -153,5 +159,5 @@ class Lua(Package):
"""
# Lua extension builds can have lua and luarocks executable functions
module.lua = Executable(join_path(self.spec.prefix.bin, 'lua'))
- module.luarocks = Executable(join_path(self.spec.prefix.bin,
- 'luarocks'))
+ module.luarocks = Executable(
+ join_path(self.spec.prefix.bin, 'luarocks'))
diff --git a/var/spack/repos/builtin/packages/luit/package.py b/var/spack/repos/builtin/packages/luit/package.py
new file mode 100644
index 0000000000..54fd740bdc
--- /dev/null
+++ b/var/spack/repos/builtin/packages/luit/package.py
@@ -0,0 +1,51 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class Luit(Package):
+ """Luit is a filter that can be run between an arbitrary application and
+ a UTF-8 terminal emulator such as xterm. It will convert application
+ output from the locale's encoding into UTF-8, and convert terminal
+ input from UTF-8 into the locale's encoding."""
+
+ homepage = "http://cgit.freedesktop.org/xorg/app/luit"
+ url = "https://www.x.org/archive/individual/app/luit-1.1.1.tar.gz"
+
+ version('1.1.1', '04128a52f68c05129f709196819ddad3')
+
+ depends_on('libfontenc')
+ depends_on('libx11')
+
+ depends_on('pkg-config@0.9.0:', type='build')
+ depends_on('util-macros', type='build')
+
+ def install(self, spec, prefix):
+ configure('--prefix={0}'.format(prefix),
+ # see http://www.linuxquestions.org/questions/linux-from-scratch-13/can't-compile-luit-xorg-applications-4175476308/ # noqa
+ 'CFLAGS=-U_XOPEN_SOURCE -D_XOPEN_SOURCE=600')
+
+ make()
+ make('install')
diff --git a/var/spack/repos/builtin/packages/lulesh/package.py b/var/spack/repos/builtin/packages/lulesh/package.py
new file mode 100644
index 0000000000..e880d4fa14
--- /dev/null
+++ b/var/spack/repos/builtin/packages/lulesh/package.py
@@ -0,0 +1,55 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+import os
+
+
+class Lulesh(Package):
+ """Livermore Unstructured Lagrangian Explicit Shock Hydrodynamics (LULESH)
+ """
+
+ homepage = "https://codesign.llnl.gov/lulesh.php"
+ url = "https://codesign.llnl.gov/lulesh/lulesh2.0.3.tgz"
+
+ version("2.0.3", "336644a8750f71c7c6b9d2960976e7aa")
+
+ patch("remove_defaults.patch")
+
+ variant('mpip', default=False)
+
+ depends_on("mpi", type="build")
+ depends_on("mpip", when="+mpip")
+
+ def install(self, spec, prefix):
+ if '+mpip' in spec:
+ os.environ["LDFLAGS"] = " -lmpiP -ldwarf -lelf"
+
+ if os.uname()[4] == "x86_64":
+ os.environ["LDFLAGS"] += " -lunwind"
+
+ os.environ["CXX"] = spec['mpi'].mpicxx + " -DUSE_MPI=1"
+ os.environ["PREFIX"] = prefix
+ make()
+ make("install")
diff --git a/var/spack/repos/builtin/packages/lulesh/remove_defaults.patch b/var/spack/repos/builtin/packages/lulesh/remove_defaults.patch
new file mode 100644
index 0000000000..36cce25ba1
--- /dev/null
+++ b/var/spack/repos/builtin/packages/lulesh/remove_defaults.patch
@@ -0,0 +1,60 @@
+--- a/Makefile
++++ b/Makefile
+@@ -1,17 +1,9 @@
+ #default build suggestion of MPI + OPENMP with gcc on Livermore machines you might have to change the compiler name
+
+-SHELL = /bin/sh
+ .SUFFIXES: .cc .o
+
+ LULESH_EXEC = lulesh2.0
+
+-MPI_INC = /opt/local/include/openmpi
+-MPI_LIB = /opt/local/lib
+-
+-SERCXX = g++ -DUSE_MPI=0
+-MPICXX = mpig++ -DUSE_MPI=1
+-CXX = $(MPICXX)
+-
+ SOURCES2.0 = \
+ lulesh.cc \
+ lulesh-comm.cc \
+@@ -20,28 +12,6 @@
+ lulesh-init.cc
+ OBJECTS2.0 = $(SOURCES2.0:.cc=.o)
+
+-#Default build suggestions with OpenMP for g++
+-CXXFLAGS = -g -O3 -fopenmp -I. -Wall
+-LDFLAGS = -g -O3 -fopenmp
+-
+-#Below are reasonable default flags for a serial build
+-#CXXFLAGS = -g -O3 -I. -Wall
+-#LDFLAGS = -g -O3
+-
+-#common places you might find silo on the Livermore machines.
+-#SILO_INCDIR = /opt/local/include
+-#SILO_LIBDIR = /opt/local/lib
+-#SILO_INCDIR = ./silo/4.9/1.8.10.1/include
+-#SILO_LIBDIR = ./silo/4.9/1.8.10.1/lib
+-
+-#If you do not have silo and visit you can get them at:
+-#silo: https://wci.llnl.gov/codes/silo/downloads.html
+-#visit: https://wci.llnl.gov/codes/visit/download.html
+-
+-#below is and example of how to make with silo, hdf5 to get vizulization by default all this is turned off. All paths are Livermore specific.
+-#CXXFLAGS = -g -DVIZ_MESH -I${SILO_INCDIR} -Wall -Wno-pragmas
+-#LDFLAGS = -g -L${SILO_LIBDIR} -Wl,-rpath -Wl,${SILO_LIBDIR} -lsiloh5 -lhdf5
+-
+ .cc.o: lulesh.h
+ @echo "Building $<"
+ $(CXX) -c $(CXXFLAGS) -o $@ $<
+@@ -56,6 +26,7 @@
+ /bin/rm -f *.o *~ $(OBJECTS) $(LULESH_EXEC)
+ /bin/rm -rf *.dSYM
+
+-tar: clean
+- cd .. ; tar cvf lulesh-2.0.tar LULESH-2.0 ; mv lulesh-2.0.tar LULESH-2.0
+-
++install: lulesh2.0
++ @echo "Installing"
++ mkdir -p $(PREFIX)/bin
++ install --mode=755 lulesh2.0 $(PREFIX)/bin/
diff --git a/var/spack/repos/builtin/packages/lwgrp/package.py b/var/spack/repos/builtin/packages/lwgrp/package.py
index 471098c873..9322d69b9b 100644
--- a/var/spack/repos/builtin/packages/lwgrp/package.py
+++ b/var/spack/repos/builtin/packages/lwgrp/package.py
@@ -22,9 +22,9 @@
# License along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
-import os
from spack import *
+
class Lwgrp(Package):
"""Thie light-weight group library provides process group
representations using O(log N) space and time."""
diff --git a/var/spack/repos/builtin/packages/lwm2/package.py b/var/spack/repos/builtin/packages/lwm2/package.py
index 340474b47e..063204b84a 100644
--- a/var/spack/repos/builtin/packages/lwm2/package.py
+++ b/var/spack/repos/builtin/packages/lwm2/package.py
@@ -24,6 +24,7 @@
##############################################################################
from spack import *
+
class Lwm2(Package):
"""LWM2: Light Weight Measurement Module. This is a PMPI module
that can collect a number of time-sliced MPI and POSIX I/O
diff --git a/var/spack/repos/builtin/packages/lz4/package.py b/var/spack/repos/builtin/packages/lz4/package.py
new file mode 100644
index 0000000000..de7e566e70
--- /dev/null
+++ b/var/spack/repos/builtin/packages/lz4/package.py
@@ -0,0 +1,45 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class Lz4(Package):
+ """LZ4 is lossless compression algorithm, providing compression speed
+ at 400 MB/s per core, scalable with multi-cores CPU. It also features
+ an extremely fast decoder, with speed in multiple GB/s per core,
+ typically reaching RAM speed limits on multi-core systems."""
+
+ homepage = "http://cyan4973.github.io/lz4/"
+ url = "https://github.com/Cyan4973/lz4/archive/r131.tar.gz"
+
+ version('131', '42b09fab42331da9d3fb33bd5c560de9')
+
+ # depends_on('valgrind', type='test')
+
+ def install(self, spec, prefix):
+ make()
+ if self.run_tests:
+ make('test') # requires valgrind to be installed
+ make('install', 'PREFIX={0}'.format(prefix))
diff --git a/var/spack/repos/builtin/packages/lzma/package.py b/var/spack/repos/builtin/packages/lzma/package.py
new file mode 100644
index 0000000000..3eb97a2d9f
--- /dev/null
+++ b/var/spack/repos/builtin/packages/lzma/package.py
@@ -0,0 +1,41 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class Lzma(AutotoolsPackage):
+ """LZMA Utils are legacy data compression software with high compression
+ ratio. LZMA Utils are no longer developed, although critical bugs may be
+ fixed as long as fixing them doesn't require huge changes to the code.
+
+ Users of LZMA Utils should move to XZ Utils. XZ Utils support the legacy
+ .lzma format used by LZMA Utils, and can also emulate the command line
+ tools of LZMA Utils. This should make transition from LZMA Utils to XZ
+ Utils relatively easy."""
+
+ homepage = "http://tukaani.org/lzma/"
+ url = "http://tukaani.org/lzma/lzma-4.32.7.tar.gz"
+
+ version('4.32.7', '2a748b77a2f8c3cbc322dbd0b4c9d06a')
diff --git a/var/spack/repos/builtin/packages/lzo/package.py b/var/spack/repos/builtin/packages/lzo/package.py
new file mode 100644
index 0000000000..e9c98842f4
--- /dev/null
+++ b/var/spack/repos/builtin/packages/lzo/package.py
@@ -0,0 +1,44 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class Lzo(AutotoolsPackage):
+ """Real-time data compression library"""
+
+ homepage = 'https://www.oberhumer.com/opensource/lzo/'
+ url = 'http://www.oberhumer.com/opensource/lzo/download/lzo-2.09.tar.gz'
+
+ version('2.09', 'c7ffc9a103afe2d1bba0b015e7aa887f')
+ version('2.08', 'fcec64c26a0f4f4901468f360029678f')
+ version('2.07', '4011935e95171e78ad4894f7335c982a')
+ version('2.06', '95380bd4081f85ef08c5209f4107e9f8')
+ version('2.05', 'c67cda5fa191bab761c7cb06fe091e36')
+
+ def configure_args(self):
+ return [
+ '--disable-dependency-tracking',
+ '--enable-shared'
+ ]
diff --git a/var/spack/repos/builtin/packages/m4/package.py b/var/spack/repos/builtin/packages/m4/package.py
index dcb306dcd3..ebfbc28612 100644
--- a/var/spack/repos/builtin/packages/m4/package.py
+++ b/var/spack/repos/builtin/packages/m4/package.py
@@ -24,8 +24,10 @@
##############################################################################
from spack import *
-class M4(Package):
+
+class M4(AutotoolsPackage):
"""GNU M4 is an implementation of the traditional Unix macro processor."""
+
homepage = "https://www.gnu.org/software/m4/m4.html"
url = "ftp://ftp.gnu.org/gnu/m4/m4-1.4.17.tar.gz"
@@ -33,17 +35,25 @@ class M4(Package):
patch('pgi.patch', when='@1.4.17')
- variant('sigsegv', default=True, description="Build the libsigsegv dependency")
+ variant('sigsegv', default=True,
+ description="Build the libsigsegv dependency")
depends_on('libsigsegv', when='+sigsegv')
- def install(self, spec, prefix):
- configure_args = []
- if 'libsigsegv' in spec:
- configure_args.append('--with-libsigsegv-prefix=%s' % spec['libsigsegv'].prefix)
+ def configure_args(self):
+ spec = self.spec
+ args = ['--enable-c++']
+
+ if '+sigsegv' in spec:
+ args.append('--with-libsigsegv-prefix={0}'.format(
+ spec['libsigsegv'].prefix))
else:
- configure_args.append('--without-libsigsegv-prefix')
+ args.append('--without-libsigsegv-prefix')
+
+ # http://lists.gnu.org/archive/html/bug-m4/2016-09/msg00002.html
+ arch = spec.architecture
+ if (arch.platform == 'darwin' and arch.platform_os == 'sierra' and
+ '%gcc' in spec):
+ args.append('ac_cv_type_struct_sched_param=yes')
- configure("--prefix=%s" % prefix, *configure_args)
- make()
- make("install")
+ return args
diff --git a/var/spack/repos/builtin/packages/mafft/package.py b/var/spack/repos/builtin/packages/mafft/package.py
new file mode 100644
index 0000000000..131b8c58f9
--- /dev/null
+++ b/var/spack/repos/builtin/packages/mafft/package.py
@@ -0,0 +1,42 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class Mafft(Package):
+ """MAFFT is a multiple sequence alignment program for unix-like
+ operating systems. It offers a range of multiple alignment
+ methods, L-INS-i (accurate; for alignment of <~200 sequences),
+ FFT-NS-2 (fast; for alignment of <~30,000 sequences), etc."""
+
+ homepage = "http://mafft.cbrc.jp/alignment/software/index.html"
+ url = "http://mafft.cbrc.jp/alignment/software/mafft-7.221-with-extensions-src.tgz"
+
+ version('7.221', 'b1aad911e51024d631722a2e061ba215')
+
+ def install(self, spec, prefix):
+ with working_dir('core'):
+ make('PREFIX=%s' % prefix)
+ make('PREFIX=%s' % prefix, 'install')
diff --git a/var/spack/repos/builtin/packages/magics/no_hardcoded_python.patch b/var/spack/repos/builtin/packages/magics/no_hardcoded_python.patch
new file mode 100644
index 0000000000..e2e2a5d1ba
--- /dev/null
+++ b/var/spack/repos/builtin/packages/magics/no_hardcoded_python.patch
@@ -0,0 +1,5 @@
+--- a/tools/xml2mv.py 2016-06-27 17:49:27.000000000 +0200
++++ a/tools/xml2mv.py 2016-09-13 16:25:17.246960456 +0200
+@@ -1 +1 @@
+-#!/usr/bin/python
++#!/usr/bin/env python
diff --git a/var/spack/repos/builtin/packages/magics/package.py b/var/spack/repos/builtin/packages/magics/package.py
new file mode 100644
index 0000000000..cd793ae051
--- /dev/null
+++ b/var/spack/repos/builtin/packages/magics/package.py
@@ -0,0 +1,115 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class Magics(Package):
+ """Magics is the latest generation of the ECMWF's Meteorological plotting
+ software MAGICS. Although completely redesigned in C++, it is intended
+ to be as backwards-compatible as possible with the Fortran interface."""
+
+ homepage = "https://software.ecmwf.int/wiki/display/MAGP/Magics"
+ url = "https://software.ecmwf.int/wiki/download/attachments/3473464/Magics-2.29.0-Source.tar.gz"
+
+ # Maintainers of Magics do not keep tarballs of minor releases. Once the
+ # next minor released is published the previous one becomes unavailable.
+ # That is why the preferred version is the latest stable one.
+ version('2.29.4', '91c561f413316fb665b3bb563f3878d1')
+ version('2.29.0', 'db20a4d3c51a2da5657c31ae3de59709', preferred=True)
+
+ # The patch changes the hardcoded path to python in shebang to enable the
+ # usage of the first python installation that appears in $PATH
+ patch('no_hardcoded_python.patch')
+
+ # The patch reorders includes and adds namespaces where necessary to
+ # resolve ambiguity of invocations of isnan and isinf functions. The
+ # patch is not needed since the version 2.29.1
+ patch('resolve_isnan_ambiguity.patch', when='@2.29.0')
+
+ variant('bufr', default=False, description='Enable BUFR support')
+ variant('netcdf', default=False, description='Enable NetCDF support')
+ variant('cairo', default=True, description='Enable cairo support[png/jpeg]')
+ variant('metview', default=False, description='Enable metview support')
+ variant('qt', default=False, description='Enable metview support with qt')
+
+ depends_on('cmake', type='build')
+ depends_on('pkg-config', type='build')
+
+ # Currently python is only necessary to run
+ # building preprocessing scripts.
+ depends_on('python', type='build')
+ depends_on('grib-api')
+ depends_on('proj')
+ depends_on('boost')
+ depends_on('expat')
+ depends_on('pango', when='+cairo')
+ depends_on('netcdf-cxx', when='+netcdf')
+ depends_on('libemos', when='+bufr')
+ depends_on('qt', when='+metview+qt')
+
+ def install(self, spec, prefix):
+ options = []
+ options.extend(std_cmake_args)
+ options.append('-DENABLE_ODB=OFF')
+ options.append('-DENABLE_PYTHON=OFF')
+ options.append('-DBOOST_ROOT=%s' % spec['boost'].prefix)
+ options.append('-DPROJ4_PATH=%s' % spec['proj'].prefix)
+ options.append('-DGRIB_API_PATH=%s' % spec['grib-api'].prefix)
+ options.append('-DENABLE_TESTS=OFF')
+
+ if '+bufr' in spec:
+ options.append('-DENABLE_BUFR=ON')
+ options.append('-DLIBEMOS_PATH=%s' % spec['libemos'].prefix)
+ else:
+ options.append('-DENABLE_BUFR=OFF')
+
+ if '+netcdf' in spec:
+ options.append('-DENABLE_NETCDF=ON')
+ options.append('-DNETCDF_PATH=%s' % spec['netcdf-cxx'].prefix)
+ else:
+ options.append('-DENABLE_NETCDF=OFF')
+
+ if '+cairo' in spec:
+ options.append('-DENABLE_CAIRO=ON')
+ else:
+ options.append('-DENABLE_CAIRO=OFF')
+
+ if '+metview' in spec:
+ if '+qt' in spec:
+ options.append('-DENABLE_METVIEW=ON')
+ if spec['qt'].version.up_to(1) == '5':
+ options.append('-DENABLE_QT5=ON')
+ else:
+ options.append('-DENABLE_METVIEW_NO_QT=ON')
+ else:
+ options.append('-DENABLE_METVIEW=OFF')
+
+ if (self.compiler.f77 is None) or (self.compiler.fc is None):
+ options.append('-DENABLE_FORTRAN=OFF')
+
+ with working_dir('spack-build', create=True):
+ cmake('..', *options)
+ make()
+ make('install')
diff --git a/var/spack/repos/builtin/packages/magics/resolve_isnan_ambiguity.patch b/var/spack/repos/builtin/packages/magics/resolve_isnan_ambiguity.patch
new file mode 100644
index 0000000000..54b96ae88e
--- /dev/null
+++ b/var/spack/repos/builtin/packages/magics/resolve_isnan_ambiguity.patch
@@ -0,0 +1,73 @@
+--- a/src/common/Polyline.cc 2016-04-28 14:38:09.000000000 +0200
++++ b/src/common/Polyline.cc 2016-09-14 13:31:35.784617803 +0200
+@@ -31,2 +30,0 @@
+-#include "TeCoord2D.h"
+-#include "TeGeometryAlgorithms.h"
+--- a/src/decoders/GribRegularInterpretor.cc 2016-04-28 14:38:09.000000000 +0200
++++ b/src/decoders/GribRegularInterpretor.cc 2016-09-14 13:43:41.673614590 +0200
+@@ -2083,2 +2083,2 @@
+- if (isnan(val1)) {
+- if (isnan(val2)) {
++ if (std::isnan(val1)) {
++ if (std::isnan(val2)) {
+@@ -2090 +2090 @@
+- if (isnan(val2)) {
++ if (std::isnan(val2)) {
+@@ -2101 +2101 @@
+- if (isnan(val) || isinf(val) || isinf(-val)) {
++ if (std::isnan(val) || std::isinf(val) || std::isinf(-val)) {
+@@ -2105 +2105 @@
+- if (isnan(val))
++ if (std::isnan(val))
+--- a/src/decoders/GribSatelliteInterpretor.cc 2016-04-28 14:38:09.000000000 +0200
++++ b/src/decoders/GribSatelliteInterpretor.cc 2016-09-14 13:48:55.243699910 +0200
+@@ -33,5 +32,0 @@
+-#include "TeProjection.h"
+-#include "TeDataTypes.h"
+-#include "TeRasterParams.h"
+-#include "TeDecoderMemory.h"
+-#include "TeRasterRemap.h"
+--- a/src/decoders/NetcdfGeoMatrixInterpretor.cc 2016-04-28 14:38:09.000000000 +0200
++++ b/src/decoders/NetcdfGeoMatrixInterpretor.cc 2016-09-14 13:52:37.481201085 +0200
+@@ -93 +93 @@
+- if ( !isnan(*d) ) {
++ if ( !std::isnan(*d) ) {
+--- a/src/decoders/NetcdfOrcaInterpretor.cc 2016-04-28 14:38:09.000000000 +0200
++++ b/src/decoders/NetcdfOrcaInterpretor.cc 2016-09-14 13:51:16.248650570 +0200
+@@ -210,2 +210,2 @@
+- if ( isnan(val1) ) {
+- if ( isnan(val2) ) {
++ if ( std::isnan(val1) ) {
++ if ( std::isnan(val2) ) {
+@@ -218 +218 @@
+- if ( isnan(val2) ) {
++ if ( std::isnan(val2) ) {
+@@ -226 +226 @@
+- if (isnan(val) || isinf(val) || isinf(-val) ) {
++ if (std::isnan(val) || std::isinf(val) || std::isinf(-val) ) {
+@@ -230 +230 @@
+- if (isnan(val) ) val = missing;
++ if (std::isnan(val) ) val = missing;
+@@ -296 +296 @@
+- if (isnan(value) )
++ if (std::isnan(value) )
+--- a/src/terralib/kernel/TeCentroid.cpp 2016-04-28 14:38:09.000000000 +0200
++++ b/src/terralib/kernel/TeCentroid.cpp 2016-09-14 14:17:31.675996554 +0200
+@@ -23,0 +24,2 @@
++#include "TeGeometryAlgorithms.h"
++
+@@ -30 +31,0 @@
+-#include "TeGeometryAlgorithms.h"
+--- a/src/terralib/kernel/TeDatabase.h 2014-11-07 17:39:24.000000000 +0100
++++ b/src/terralib/kernel/TeDatabase.h 2016-09-14 14:20:01.041100590 +0200
+@@ -33,0 +34 @@
++#include "TeGeometry.h"
+@@ -38 +38,0 @@
+-#include "TeGeometry.h"
+--- a/src/terralib/kernel/TeOverlayUtils.h 2014-11-07 17:39:24.000000000 +0100
++++ b/src/terralib/kernel/TeOverlayUtils.h 2016-09-14 14:21:51.649920405 +0200
+@@ -37,0 +38,2 @@
++#include "TeGeometry.h"
++
+@@ -44 +45,0 @@
+-#include "TeGeometry.h"
diff --git a/var/spack/repos/builtin/packages/makedepend/package.py b/var/spack/repos/builtin/packages/makedepend/package.py
new file mode 100644
index 0000000000..5675793abc
--- /dev/null
+++ b/var/spack/repos/builtin/packages/makedepend/package.py
@@ -0,0 +1,44 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class Makedepend(Package):
+ """makedepend - create dependencies in makefiles."""
+
+ homepage = "http://cgit.freedesktop.org/xorg/util/makedepend"
+ url = "https://www.x.org/archive/individual/util/makedepend-1.0.5.tar.gz"
+
+ version('1.0.5', 'efb2d7c7e22840947863efaedc175747')
+
+ depends_on('xproto@7.0.17:', type='build')
+ depends_on('pkg-config@0.9.0:', type='build')
+
+ def install(self, spec, prefix):
+ configure('--prefix={0}'.format(prefix))
+
+ make()
+ make('check')
+ make('install')
diff --git a/var/spack/repos/builtin/packages/mariadb/package.py b/var/spack/repos/builtin/packages/mariadb/package.py
new file mode 100644
index 0000000000..d9df200d02
--- /dev/null
+++ b/var/spack/repos/builtin/packages/mariadb/package.py
@@ -0,0 +1,59 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class Mariadb(Package):
+ """MariaDB turns data into structured information in a wide array of
+ applications, ranging from banking to websites. It is an enhanced, drop-in
+ replacement for MySQL. MariaDB is used because it is fast, scalable and
+ robust, with a rich ecosystem of storage engines, plugins and many other
+ tools make it very versatile for a wide variety of use cases."""
+
+ homepage = "https://mariadb.org/about/"
+ url = "https://downloads.mariadb.org/f/mariadb-10.1.14/source/mariadb-10.1.14.tar.gz"
+
+ version('10.1.14', '294925531e0fd2f0461e3894496a5adc')
+ version('5.5.49', '67b5a499a5f158b2a586e6e3bfb4f304')
+
+ variant('nonblocking', default=True, description='Allow non blocking '
+ 'operations in the mariadb client library.')
+
+ depends_on('boost')
+ depends_on('cmake')
+ depends_on('jemalloc')
+ depends_on('libaio')
+ depends_on('libedit')
+ depends_on('libevent', when='+nonblocking')
+ depends_on('ncurses')
+ depends_on('zlib')
+
+ def install(self, spec, prefix):
+ with working_dir('spack-build', create=True):
+
+ cmake('..', *std_cmake_args)
+
+ make()
+ make('install')
diff --git a/var/spack/repos/builtin/packages/matio/package.py b/var/spack/repos/builtin/packages/matio/package.py
index c141f7e8af..a33b23a4e9 100644
--- a/var/spack/repos/builtin/packages/matio/package.py
+++ b/var/spack/repos/builtin/packages/matio/package.py
@@ -25,15 +25,26 @@
from spack import *
-class Matio(Package):
+class Matio(AutotoolsPackage):
"""matio is an C library for reading and writing Matlab MAT files"""
homepage = "http://sourceforge.net/projects/matio/"
- url = "http://downloads.sourceforge.net/project/matio/matio/1.5.2/matio-1.5.2.tar.gz"
+ url = "http://downloads.sourceforge.net/project/matio/matio/1.5.9/matio-1.5.9.tar.gz"
+ version('1.5.9', 'aab5b4219a3c0262afe7eeb7bdd2f463')
version('1.5.2', '85b007b99916c63791f28398f6a4c6f1')
- def install(self, spec, prefix):
- configure('--prefix=%s' % prefix)
+ variant("zlib", default=True,
+ description='support for compressed mat files')
+ variant("hdf5", default=True,
+ description='support for version 7.3 mat files via hdf5')
- make()
- make("install")
+ depends_on("zlib", when="+zlib")
+ depends_on("hdf5", when="+hdf5")
+
+ def configure_args(self):
+ args = []
+ if '+zlib' in self.spec:
+ args.append("--with-zlib=%s" % self.spec['zlib'].prefix)
+ if '+hdf5' in self.spec:
+ args.append("--with-hdf5=%s" % self.spec['hdf5'].prefix)
+ return args
diff --git a/var/spack/repos/builtin/packages/maven/package.py b/var/spack/repos/builtin/packages/maven/package.py
new file mode 100644
index 0000000000..c4e0a1d0a4
--- /dev/null
+++ b/var/spack/repos/builtin/packages/maven/package.py
@@ -0,0 +1,41 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+from distutils.dir_util import copy_tree
+
+
+class Maven(Package):
+ """Apache Maven is a software project management and comprehension tool."""
+
+ homepage = "https://maven.apache.org/index.html"
+ url = "http://www.gtlib.gatech.edu/pub/apache/maven/maven-3/3.3.9/binaries/apache-maven-3.3.9-bin.tar.gz"
+
+ version('3.3.9', '516923b3955b6035ba6b0a5b031fbd8b')
+
+ depends_on('jdk')
+
+ def install(self, spec, prefix):
+ # install pre-built distribution
+ copy_tree('.', prefix)
diff --git a/var/spack/repos/builtin/packages/mbedtls/package.py b/var/spack/repos/builtin/packages/mbedtls/package.py
index ae34d25691..493ea59f0b 100644
--- a/var/spack/repos/builtin/packages/mbedtls/package.py
+++ b/var/spack/repos/builtin/packages/mbedtls/package.py
@@ -24,20 +24,25 @@
##############################################################################
from spack import *
+
class Mbedtls(Package):
- """
- mbed TLS (formerly known as PolarSSL) makes it trivially easy for developers to include cryptographic and SSL/TLS capabilities in their (embedded) products, facilitating this functionality with a minimal coding footprint.
+ """mbed TLS (formerly known as PolarSSL) makes it trivially easy for
+ developers to include cryptographic and SSL/TLS capabilities in
+ their (embedded) products, facilitating this functionality with a
+ minimal coding footprint.
+
"""
homepage = "https://tls.mbed.org"
url = "https://github.com/ARMmbed/mbedtls/archive/mbedtls-2.2.1.tar.gz"
- version('2.2.1' , '73a38f96898d6d03e32f55dd9f9a67be')
- version('2.2.0' , 'eaf4586c1ef93ae872e606b6c1203942')
- version('2.1.4' , '40cdf67b6c6d92c9cbcfd552d39ea3ae')
- version('2.1.3' , '7eb4cf1dfa68578a2c8dbd0b6fa752dd')
+ version('2.3.0', '98158e1160a0825a3e8db38881a177a0')
+ version('2.2.1', '73a38f96898d6d03e32f55dd9f9a67be')
+ version('2.2.0', 'eaf4586c1ef93ae872e606b6c1203942')
+ version('2.1.4', '40cdf67b6c6d92c9cbcfd552d39ea3ae')
+ version('2.1.3', '7eb4cf1dfa68578a2c8dbd0b6fa752dd')
version('1.3.16', '4144d7320c691f721aeb9e67a1bc38e0')
- depends_on('cmake')
+ depends_on('cmake', type='build')
def install(self, spec, prefix):
cmake('.', *std_cmake_args)
diff --git a/var/spack/repos/builtin/packages/meep/package.py b/var/spack/repos/builtin/packages/meep/package.py
new file mode 100644
index 0000000000..2c1018e711
--- /dev/null
+++ b/var/spack/repos/builtin/packages/meep/package.py
@@ -0,0 +1,109 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class Meep(Package):
+ """Meep (or MEEP) is a free finite-difference time-domain (FDTD) simulation
+ software package developed at MIT to model electromagnetic systems."""
+
+ homepage = "http://ab-initio.mit.edu/wiki/index.php/Meep"
+
+ version('1.3', '18a5b9e18008627a0411087e0bb60db5')
+ version('1.2.1', '9be2e743c3a832ae922de9d955d016c5')
+ version('1.1.1', '415e0cd312b6caa22b5dd612490e1ccf')
+
+ variant('blas', default=True, description='Enable BLAS support')
+ variant('lapack', default=True, description='Enable LAPACK support')
+ variant('harminv', default=True, description='Enable Harminv support')
+ variant('guile', default=True, description='Enable Guilde support')
+ variant('libctl', default=True, description='Enable libctl support')
+ variant('mpi', default=True, description='Enable MPI support')
+ variant('hdf5', default=True, description='Enable HDF5 support')
+ variant('gsl', default=True, description='Enable GSL support')
+
+ depends_on('blas', when='+blas')
+ depends_on('lapack', when='+lapack')
+ depends_on('harminv', when='+harminv')
+ depends_on('guile', when='+guile')
+ depends_on('libctl@3.2:', when='+libctl')
+ depends_on('mpi', when='+mpi')
+ depends_on('hdf5~mpi', when='+hdf5~mpi')
+ depends_on('hdf5+mpi', when='+hdf5+mpi')
+ depends_on('gsl', when='+gsl')
+
+ def url_for_version(self, version):
+ base_url = "http://ab-initio.mit.edu/meep"
+ if version > Version('1.1.1'):
+ return "{0}/meep-{1}.tar.gz".format(base_url, version)
+ else:
+ return "{0}/old/meep-{1}.tar.gz".format(base_url, version)
+
+ def install(self, spec, prefix):
+ config_args = [
+ '--prefix={0}'.format(prefix),
+ '--enable-shared'
+ ]
+
+ if '+blas' in spec:
+ config_args.append('--with-blas={0}'.format(
+ spec['blas'].prefix.lib))
+ else:
+ config_args.append('--without-blas')
+
+ if '+lapack' in spec:
+ config_args.append('--with-lapack={0}'.format(
+ spec['lapack'].prefix.lib))
+ else:
+ config_args.append('--without-lapack')
+
+ if '+libctl' in spec:
+ config_args.append('--with-libctl={0}'.format(
+ join_path(spec['libctl'].prefix.share, 'libctl')))
+ else:
+ config_args.append('--without-libctl')
+
+ if '+mpi' in spec:
+ config_args.append('--with-mpi')
+ else:
+ config_args.append('--without-mpi')
+
+ if '+hdf5' in spec:
+ config_args.append('--with-hdf5')
+ else:
+ config_args.append('--without-hdf5')
+
+ configure(*config_args)
+
+ make()
+
+ # aniso_disp test fails unless installed with harminv
+ # near2far test fails unless installed with gsl
+ if self.run_tests and '+harminv' in spec and '+gsl' in spec:
+ # Most tests fail when run in parallel
+ # 2D_convergence tests still fails to converge for unknown reasons
+ make('check', parallel=False)
+
+ make('install')
diff --git a/var/spack/repos/builtin/packages/memaxes/package.py b/var/spack/repos/builtin/packages/memaxes/package.py
index 135384e2f7..ffad167788 100644
--- a/var/spack/repos/builtin/packages/memaxes/package.py
+++ b/var/spack/repos/builtin/packages/memaxes/package.py
@@ -24,6 +24,7 @@
##############################################################################
from spack import *
+
class Memaxes(Package):
"""MemAxes is a visualizer for sampled memory trace data."""
@@ -32,7 +33,7 @@ class Memaxes(Package):
version('0.5', '5874f3fda9fd2d313c0ff9684f915ab5',
url='https://github.com/llnl/MemAxes/archive/v0.5.tar.gz')
- depends_on("cmake@2.8.9:")
+ depends_on('cmake@2.8.9:', type='build')
depends_on("qt@5:")
def install(self, spec, prefix):
diff --git a/var/spack/repos/builtin/packages/mercurial/package.py b/var/spack/repos/builtin/packages/mercurial/package.py
new file mode 100644
index 0000000000..ea77953f15
--- /dev/null
+++ b/var/spack/repos/builtin/packages/mercurial/package.py
@@ -0,0 +1,69 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+import llnl.util.tty as tty
+import os
+
+
+class Mercurial(Package):
+ """Mercurial is a free, distributed source control management tool."""
+
+ homepage = "https://www.mercurial-scm.org"
+ url = "https://www.mercurial-scm.org/release/mercurial-3.9.tar.gz"
+
+ version('3.9.1', '3759dd10edb8c1a6dfb8ff0ce82658ce')
+ version('3.9', 'e2b355da744e94747daae3a5339d28a0')
+ version('3.8.4', 'cec2c3db688cb87142809089c6ae13e9')
+ version('3.8.3', '97aced7018614eeccc9621a3dea35fda')
+ version('3.8.2', 'c38daa0cbe264fc621dc3bb05933b0b3')
+ version('3.8.1', '172a8c588adca12308c2aca16608d7f4')
+
+ extends('python')
+ depends_on('python@2.6:2.8')
+ depends_on('py-docutils', type='build')
+
+ def install(self, spec, prefix):
+ make('install', 'PREFIX={0}'.format(prefix))
+
+ # Configuration of HTTPS certificate authorities
+ # https://www.mercurial-scm.org/wiki/CACertificates
+ hgrc_filename = join_path(prefix.etc, 'mercurial', 'hgrc')
+ mkdirp(os.path.dirname(hgrc_filename))
+
+ with open(hgrc_filename, 'w') as hgrc:
+ if os.path.exists('/etc/ssl/certs/ca-certificates.crt'):
+ # Debian/Ubuntu/Gentoo/Arch Linux
+ hgrc.write('[web]\ncacerts = /etc/ssl/certs/ca-certificates.crt') # noqa
+ elif os.path.exists('/etc/pki/tls/certs/ca-bundle.crt'):
+ # Fedora/RHEL/CentOS
+ hgrc.write('[web]\ncacerts = /etc/pki/tls/certs/ca-bundle.crt')
+ elif os.path.exists('/etc/ssl/ca-bundle.pem'):
+ # openSUSE/SLE
+ hgrc.write('[web]\ncacerts = /etc/ssl/ca-bundle.pem')
+ else:
+ tty.warn('CA certificate not found. You may not be able to '
+ 'connect to an HTTPS server. If your CA certificate '
+ 'is in a non-standard location, you should add it to '
+ '{0}'.format(hgrc_filename))
diff --git a/var/spack/repos/builtin/packages/mesa/package.py b/var/spack/repos/builtin/packages/mesa/package.py
index bd9c9eec4a..f19bb466fd 100644
--- a/var/spack/repos/builtin/packages/mesa/package.py
+++ b/var/spack/repos/builtin/packages/mesa/package.py
@@ -24,35 +24,44 @@
##############################################################################
from spack import *
+
class Mesa(Package):
"""Mesa is an open-source implementation of the OpenGL
specification - a system for rendering interactive 3D graphics."""
homepage = "http://www.mesa3d.org"
- url = "ftp://ftp.freedesktop.org/pub/mesa/older-versions/8.x/8.0.5/MesaLib-8.0.5.tar.gz"
- # url = "ftp://ftp.freedesktop.org/pub/mesa/10.4.4/MesaLib-10.4.4.tar.gz"
+ url = "ftp://ftp.freedesktop.org/pub/mesa/12.0.3/mesa-12.0.3.tar.gz"
- # version('10.4.4', '8d863a3c209bf5116b2babfccccc68ce')
- version('8.0.5', 'cda5d101f43b8784fa60bdeaca4056f2')
+ version('12.0.3', '60c5f9897ddc38b46f8144c7366e84ad')
- # mesa 7.x, 8.x, 9.x
- depends_on("libdrm@2.4.33")
- depends_on("llvm@3.0")
- depends_on("libxml2+python")
+ # General dependencies
+ depends_on('python@2.6.4:')
+ depends_on('py-mako@0.3.4:')
+ depends_on('flex@2.5.35:', type='build')
+ depends_on('bison@2.4.1:', type='build')
- # patch("llvm-fixes.patch") # using newer llvm
+ # For DRI and hardware acceleration
+ depends_on('libpthread-stubs')
+ depends_on('libdrm')
+ depends_on('openssl')
+ depends_on('libxcb@1.9.3:')
+ depends_on('libxshmfence@1.1:')
+ depends_on('libx11')
+ depends_on('libxext')
+ depends_on('libxdamage')
+ depends_on('libxfixes')
- # mesa 10.x
- # depends_on("py-mako")
- # depends_on("flex")
- # depends_on("bison")
- # depends_on("dri2proto")
- # depends_on("libxcb")
- # depends_on("libxshmfence")
+ depends_on('glproto@1.4.14:', type='build')
+ depends_on('dri2proto@2.6:', type='build')
+ depends_on('dri3proto@1.0:', type='build')
+ depends_on('presentproto@1.0:', type='build')
+ depends_on('pkg-config@0.9.0:', type='build')
+ # TODO: Add package for systemd, provides libudev
+ # Using the system package manager to install systemd didn't work for me
def install(self, spec, prefix):
- configure("--prefix=%s" % prefix)
+ configure('--prefix={0}'.format(prefix))
make()
- make("install")
+ make('install')
diff --git a/var/spack/repos/builtin/packages/metis/package.py b/var/spack/repos/builtin/packages/metis/package.py
index 061179b78e..b66677288c 100644
--- a/var/spack/repos/builtin/packages/metis/package.py
+++ b/var/spack/repos/builtin/packages/metis/package.py
@@ -24,55 +24,86 @@
##############################################################################
from spack import *
-import glob, sys, os
+import glob
+import sys
+import os
+
class Metis(Package):
- """
- METIS is a set of serial programs for partitioning graphs, partitioning finite element meshes, and producing fill
- reducing orderings for sparse matrices. The algorithms implemented in METIS are based on the multilevel
- recursive-bisection, multilevel k-way, and multi-constraint partitioning schemes.
- """
+ """METIS is a set of serial programs for partitioning graphs, partitioning
+ finite element meshes, and producing fill reducing orderings for sparse
+ matrices. The algorithms implemented in METIS are based on the
+ multilevel recursive-bisection, multilevel k-way, and multi-constraint
+ partitioning schemes."""
- homepage = 'http://glaros.dtc.umn.edu/gkhome/metis/metis/overview'
- url = "http://glaros.dtc.umn.edu/gkhome/fetch/sw/metis/metis-5.1.0.tar.gz"
+ homepage = "http://glaros.dtc.umn.edu/gkhome/metis/metis/overview"
+ base_url = "http://glaros.dtc.umn.edu/gkhome/fetch/sw/metis"
- version('5.1.0', '5465e67079419a69e0116de24fce58fe',
- url='http://glaros.dtc.umn.edu/gkhome/fetch/sw/metis/metis-5.1.0.tar.gz')
- version('4.0.3', '5efa35de80703c1b2c4d0de080fafbcf4e0d363a21149a1ad2f96e0144841a55',
- url='http://glaros.dtc.umn.edu/gkhome/fetch/sw/metis/OLD/metis-4.0.3.tar.gz')
+ version('5.1.0', '5465e67079419a69e0116de24fce58fe')
+ version('5.0.2', 'acb521a4e8c2e6dd559a7f9abd0468c5')
+ version('4.0.3', 'd3848b454532ef18dc83e4fb160d1e10')
- variant('shared', default=True, description='Enables the build of shared libraries')
- variant('debug', default=False, description='Builds the library in debug mode')
- variant('gdb', default=False, description='Enables gdb support')
+ variant('shared', default=True, description='Enables the build of shared libraries.')
+ variant('debug', default=False, description='Builds the library in debug mode.')
+ variant('gdb', default=False, description='Enables gdb support.')
- variant('idx64', default=False, description='Use int64_t as default index type')
- variant('double', default=False, description='Use double precision floating point types')
+ variant('int64', default=False, description='Sets the bit width of METIS\'s index type to 64.')
+ variant('real64', default=False, description='Sets the bit width of METIS\'s real type to 64.')
- depends_on('cmake @2.8:', when='@5:') # build-time dependency
- depends_on('gdb', when='+gdb')
+ depends_on('cmake@2.8:', when='@5:', type='build')
patch('install_gklib_defs_rename.patch', when='@5:')
+ def url_for_version(self, version):
+ verdir = 'OLD/' if version < Version('4.0.3') else ''
+ return '%s/%smetis-%s.tar.gz' % (Metis.base_url, verdir, version)
- @when('@4:4.0.3')
- def install(self, spec, prefix):
+ @when('@:4')
+ def patch(self):
+ pass
- if '+gdb' in spec:
- raise InstallError('gdb support not implemented in METIS 4!')
- if '+idx64' in spec:
- raise InstallError('idx64 option not implemented in METIS 4!')
- if '+double' in spec:
- raise InstallError('double option not implemented for METIS 4!')
+ @when('@5:')
+ def patch(self):
+ source_path = self.stage.source_path
+ metis_header = FileFilter(join_path(source_path, 'include', 'metis.h'))
+
+ metis_header.filter(
+ r'(\b)(IDXTYPEWIDTH )(\d+)(\b)',
+ r'\1\2{0}\4'.format('64' if '+int64' in self.spec else '32'),
+ )
+ metis_header.filter(
+ r'(\b)(REALTYPEWIDTH )(\d+)(\b)',
+ r'\1\2{0}\4'.format('64' if '+real64' in self.spec else '32'),
+ )
+
+ # Make clang 7.3 happy.
+ # Prevents "ld: section __DATA/__thread_bss extends beyond end of file"
+ # See upstream LLVM issue https://llvm.org/bugs/show_bug.cgi?id=27059
+ # and https://github.com/Homebrew/homebrew-science/blob/master/metis.rb
+ if self.spec.satisfies('%clang@7.3.0'):
+ filter_file('#define MAX_JBUFS 128', '#define MAX_JBUFS 24',
+ join_path(source_path, 'GKlib', 'error.c'))
+
+ @when('@:4')
+ def install(self, spec, prefix):
+ # Process library spec and options
+ if any('+{0}'.format(v) in spec for v in ['gdb', 'int64', 'real64']):
+ raise InstallError('METIS@:4 does not support the following '
+ 'variants: gdb, int64, real64.')
options = ['COPTIONS=-fPIC']
if '+debug' in spec:
options.append('OPTFLAGS=-g -O0')
make(*options)
+ # Compile and install library files
+ ccompile = Executable(self.compiler.cc)
+
mkdir(prefix.bin)
- for x in ('pmetis', 'kmetis', 'oemetis', 'onmetis', 'partnmesh',
- 'partdmesh', 'mesh2nodal', 'mesh2dual', 'graphchk'):
- install(x, prefix.bin)
+ binfiles = ('pmetis', 'kmetis', 'oemetis', 'onmetis', 'partnmesh',
+ 'partdmesh', 'mesh2nodal', 'mesh2dual', 'graphchk')
+ for binfile in binfiles:
+ install(binfile, prefix.bin)
mkdir(prefix.lib)
install('libmetis.a', prefix.lib)
@@ -82,106 +113,110 @@ class Metis(Package):
install(h, prefix.include)
mkdir(prefix.share)
- for f in (join_path(*p)
- for p in (('Programs', 'io.c'),
- ('Test','mtest.c'),
- ('Graphs','4elt.graph'),
- ('Graphs', 'metis.mesh'),
- ('Graphs', 'test.mgraph'))):
- install(f, prefix.share)
+ sharefiles = (('Graphs', '4elt.graph'), ('Graphs', 'metis.mesh'),
+ ('Graphs', 'test.mgraph'))
+ for sharefile in tuple(join_path(*sf) for sf in sharefiles):
+ install(sharefile, prefix.share)
if '+shared' in spec:
+ shared_flags = ['-fPIC', '-shared']
if sys.platform == 'darwin':
- lib_dsuffix = 'dylib'
- load_flag = '-Wl,-all_load'
- no_load_flag = ''
+ shared_suffix = 'dylib'
+ shared_flags.extend(['-Wl,-all_load', 'libmetis.a'])
else:
- lib_dsuffix = 'so'
- load_flag = '-Wl,-whole-archive'
- no_load_flag = '-Wl,-no-whole-archive'
+ shared_suffix = 'so'
+ shared_flags.extend(['-Wl,-whole-archive', 'libmetis.a',
+ '-Wl,-no-whole-archive'])
- os.system(spack_cc + ' -fPIC -shared ' + load_flag +
- ' libmetis.a ' + no_load_flag + ' -o libmetis.' +
- lib_dsuffix)
- install('libmetis.' + lib_dsuffix, prefix.lib)
+ shared_out = '%s/libmetis.%s' % (prefix.lib, shared_suffix)
+ shared_flags.extend(['-o', shared_out])
- # Set up and run tests on installation
- symlink(join_path(prefix.share, 'io.c'), 'io.c')
- symlink(join_path(prefix.share, 'mtest.c'), 'mtest.c')
- os.system(spack_cc + ' -I%s' % prefix.include + ' -c io.c')
- os.system(spack_cc + ' -I%s' % prefix.include +
- ' -L%s' % prefix.lib + ' -lmetis mtest.c io.o -o mtest')
- _4eltgraph = join_path(prefix.share, '4elt.graph')
- test_mgraph = join_path(prefix.share, 'test.mgraph')
- metis_mesh = join_path(prefix.share, 'metis.mesh')
- kmetis = join_path(prefix.bin, 'kmetis')
- os.system('./mtest ' + _4eltgraph)
- os.system(kmetis + ' ' + _4eltgraph + ' 40')
- os.system(join_path(prefix.bin, 'onmetis') + ' ' + _4eltgraph)
- os.system(join_path(prefix.bin, 'pmetis') + ' ' + test_mgraph + ' 2')
- os.system(kmetis + ' ' + test_mgraph + ' 2')
- os.system(kmetis + ' ' + test_mgraph + ' 5')
- os.system(join_path(prefix.bin, 'partnmesh') + metis_mesh + ' 10')
- os.system(join_path(prefix.bin, 'partdmesh') + metis_mesh + ' 10')
- os.system(join_path(prefix.bin, 'mesh2dual') + metis_mesh)
+ ccompile(*shared_flags)
+ # Set up and run tests on installation
+ ccompile('-I%s' % prefix.include, '-L%s' % prefix.lib,
+ '-Wl,-rpath=%s' % (prefix.lib if '+shared' in spec else ''),
+ join_path('Programs', 'io.o'), join_path('Test', 'mtest.c'),
+ '-o', '%s/mtest' % prefix.bin, '-lmetis', '-lm')
+
+ if self.run_tests:
+ test_bin = lambda testname: join_path(prefix.bin, testname)
+ test_graph = lambda graphname: join_path(prefix.share, graphname)
+
+ graph = test_graph('4elt.graph')
+ os.system('%s %s' % (test_bin('mtest'), graph))
+ os.system('%s %s 40' % (test_bin('kmetis'), graph))
+ os.system('%s %s' % (test_bin('onmetis'), graph))
+ graph = test_graph('test.mgraph')
+ os.system('%s %s 2' % (test_bin('pmetis'), graph))
+ os.system('%s %s 2' % (test_bin('kmetis'), graph))
+ os.system('%s %s 5' % (test_bin('kmetis'), graph))
+ graph = test_graph('metis.mesh')
+ os.system('%s %s 10' % (test_bin('partnmesh'), graph))
+ os.system('%s %s 10' % (test_bin('partdmesh'), graph))
+ os.system('%s %s' % (test_bin('mesh2dual'), graph))
+
+ # FIXME: The following code should replace the testing code in the
+ # block above since it causes installs to fail when one or more of
+ # the Metis tests fail, but it currently doesn't work because the
+ # 'mtest', 'onmetis', and 'partnmesh' tests return error codes that
+ # trigger false positives for failure.
+ """
+ Executable(test_bin('mtest'))(test_graph('4elt.graph'))
+ Executable(test_bin('kmetis'))(test_graph('4elt.graph'), '40')
+ Executable(test_bin('onmetis'))(test_graph('4elt.graph'))
+
+ Executable(test_bin('pmetis'))(test_graph('test.mgraph'), '2')
+ Executable(test_bin('kmetis'))(test_graph('test.mgraph'), '2')
+ Executable(test_bin('kmetis'))(test_graph('test.mgraph'), '5')
+
+ Executable(test_bin('partnmesh'))(test_graph('metis.mesh'), '10')
+ Executable(test_bin('partdmesh'))(test_graph('metis.mesh'), '10')
+ Executable(test_bin('mesh2dual'))(test_graph('metis.mesh'))
+ """
@when('@5:')
def install(self, spec, prefix):
-
- options = []
- options.extend(std_cmake_args)
-
- build_directory = join_path(self.stage.path, 'spack-build')
source_directory = self.stage.source_path
+ build_directory = join_path(source_directory, 'build')
- options.append('-DGKLIB_PATH:PATH={metis_source}/GKlib'.format(metis_source=source_directory))
+ options = std_cmake_args[:]
+ options.append('-DGKLIB_PATH:PATH=%s/GKlib' % source_directory)
options.append('-DCMAKE_INSTALL_NAME_DIR:PATH=%s/lib' % prefix)
if '+shared' in spec:
options.append('-DSHARED:BOOL=ON')
-
if '+debug' in spec:
options.extend(['-DDEBUG:BOOL=ON',
'-DCMAKE_BUILD_TYPE:STRING=Debug'])
-
if '+gdb' in spec:
options.append('-DGDB:BOOL=ON')
- metis_header = join_path(source_directory, 'include', 'metis.h')
-
- if '+idx64' in spec:
- filter_file('IDXTYPEWIDTH 32', 'IDXTYPEWIDTH 64', metis_header)
-
- if '+double' in spec:
- filter_file('REALTYPEWIDTH 32', 'REALTYPEWIDTH 64', metis_header)
-
- # Make clang 7.3 happy.
- # Prevents "ld: section __DATA/__thread_bss extends beyond end of file"
- # See upstream LLVM issue https://llvm.org/bugs/show_bug.cgi?id=27059
- # Adopted from https://github.com/Homebrew/homebrew-science/blob/master/metis.rb
- if spec.satisfies('%clang@7.3.0'):
- filter_file('#define MAX_JBUFS 128', '#define MAX_JBUFS 24', join_path(source_directory, 'GKlib', 'error.c'))
-
with working_dir(build_directory, create=True):
cmake(source_directory, *options)
make()
- make("install")
- # now run some tests:
- for f in ["4elt", "copter2", "mdual"]:
- graph = join_path(source_directory,'graphs','%s.graph' % f)
- Executable(join_path(prefix.bin,'graphchk'))(graph)
- Executable(join_path(prefix.bin,'gpmetis'))(graph,'2')
- Executable(join_path(prefix.bin,'ndmetis'))(graph)
-
- graph = join_path(source_directory,'graphs','test.mgraph')
- Executable(join_path(prefix.bin,'gpmetis'))(graph,'2')
- graph = join_path(source_directory,'graphs','metis.mesh')
- Executable(join_path(prefix.bin,'mpmetis'))(graph,'2')
+ make('install')
# install GKlib headers, which will be needed for ParMETIS
- GKlib_dist = join_path(prefix.include,'GKlib')
+ GKlib_dist = join_path(prefix.include, 'GKlib')
mkdirp(GKlib_dist)
- fs = glob.glob(join_path(source_directory,'GKlib',"*.h"))
- for f in fs:
- install(f, GKlib_dist)
+ hfiles = glob.glob(join_path(source_directory, 'GKlib', '*.h'))
+ for hfile in hfiles:
+ install(hfile, GKlib_dist)
+
+ if self.run_tests:
+ # FIXME: On some systems, the installed binaries for METIS cannot
+ # be executed without first being read.
+ ls = which('ls')
+ ls('-a', '-l', prefix.bin)
+
+ for f in ['4elt', 'copter2', 'mdual']:
+ graph = join_path(source_directory, 'graphs', '%s.graph' % f)
+ Executable(join_path(prefix.bin, 'graphchk'))(graph)
+ Executable(join_path(prefix.bin, 'gpmetis'))(graph, '2')
+ Executable(join_path(prefix.bin, 'ndmetis'))(graph)
+
+ graph = join_path(source_directory, 'graphs', 'test.mgraph')
+ Executable(join_path(prefix.bin, 'gpmetis'))(graph, '2')
+ graph = join_path(source_directory, 'graphs', 'metis.mesh')
+ Executable(join_path(prefix.bin, 'mpmetis'))(graph, '2')
diff --git a/var/spack/repos/builtin/packages/mfem/package.py b/var/spack/repos/builtin/packages/mfem/package.py
index 504efb5e35..45f64cef65 100644
--- a/var/spack/repos/builtin/packages/mfem/package.py
+++ b/var/spack/repos/builtin/packages/mfem/package.py
@@ -23,7 +23,7 @@
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
from spack import *
-import glob, string
+
class Mfem(Package):
"""Free, lightweight, scalable C++ library for finite element methods."""
@@ -31,19 +31,26 @@ class Mfem(Package):
homepage = 'http://www.mfem.org'
url = 'https://github.com/mfem/mfem'
+ version('3.2',
+ '2938c3deed4ec4f7fd5b5f5cfe656845282e86e2dcd477d292390058b7b94340',
+ url='http://goo.gl/Y9T75B', preferred=True, extension='.tar.gz')
+
+ version('3.1',
+ '841ea5cf58de6fae4de0f553b0e01ebaab9cd9c67fa821e8a715666ecf18fc57',
+ url='http://goo.gl/xrScXn', extension='.tar.gz')
# version('3.1', git='https://github.com/mfem/mfem.git',
# commit='dbae60fe32e071989b52efaaf59d7d0eb2a3b574')
- version('3.1', '841ea5cf58de6fae4de0f553b0e01ebaab9cd9c67fa821e8a715666ecf18fc57',
- url='http://goo.gl/xrScXn', expand=False)
-
variant('metis', default=False, description='Activate support for metis')
variant('hypre', default=False, description='Activate support for hypre')
variant('suite-sparse', default=False,
description='Activate support for SuiteSparse')
- variant('mpi', default=False, description='Activate support for MPI')
+ variant('mpi', default=True, description='Activate support for MPI')
+ variant('superlu-dist', default=False,
+ description='Activate support for SuperLU_Dist')
variant('lapack', default=False, description='Activate support for LAPACK')
variant('debug', default=False, description='Build debug version')
+ variant('netcdf', default=False, description='Activate NetCDF support')
depends_on('blas', when='+lapack')
depends_on('lapack', when='+lapack')
@@ -60,7 +67,13 @@ class Mfem(Package):
depends_on('blas', when='+suite-sparse')
depends_on('lapack', when='+suite-sparse')
depends_on('metis@5:', when='+suite-sparse ^suite-sparse@4.5:')
- depends_on('cmake', when='^metis@5:')
+ depends_on('cmake', when='^metis@5:', type='build')
+
+ depends_on('superlu-dist', when='@3.2: +superlu-dist')
+
+ depends_on('netcdf', when='@3.2: +netcdf')
+ depends_on('zlib', when='@3.2: +netcdf')
+ depends_on('hdf5', when='@3.2: +netcdf')
def check_variants(self, spec):
if '+mpi' in spec and ('+hypre' not in spec or '+metis' not in spec):
@@ -69,11 +82,18 @@ class Mfem(Package):
if '+suite-sparse' in spec and ('+metis' not in spec or
'+lapack' not in spec):
raise InstallError('mfem+suite-sparse must be built with ' +
- '+metis and +lapack!')
- if 'metis@5:' in spec and '%clang' in spec and ('^cmake %gcc' not in spec):
+ '+metis and +lapack!')
+ if 'metis@5:' in spec and '%clang' in spec and (
+ '^cmake %gcc' not in spec):
raise InstallError('To work around CMake bug with clang, must ' +
'build mfem with mfem[+variants] %clang ' +
'^cmake %gcc to force CMake to build with gcc')
+ if '@:3.1' in spec and '+superlu-dist' in spec:
+ raise InstallError('MFEM does not support SuperLU_DIST for ' +
+ 'versions 3.1 and earlier')
+ if '@:3.1' in spec and '+netcdf' in spec:
+ raise InstallError('MFEM does not support NetCDF for versions' +
+ '3.1 and earlier')
return
def install(self, spec, prefix):
@@ -82,40 +102,65 @@ class Mfem(Package):
options = ['PREFIX=%s' % prefix]
if '+lapack' in spec:
- lapack_lib = '-L{0} -llapack -L{1} -lblas'.format(
- spec['lapack'].prefix.lib, spec['blas'].prefix.lib)
- options.extend(['MFEM_USE_LAPACK=YES',
- 'LAPACK_OPT=-I%s' % spec['lapack'].prefix.include,
- 'LAPACK_LIB=%s' % lapack_lib])
+ lapack_lib = (spec['lapack'].lapack_libs + spec['blas'].blas_libs).ld_flags # NOQA: ignore=E501
+ options.extend([
+ 'MFEM_USE_LAPACK=YES',
+ 'LAPACK_OPT=-I%s' % spec['lapack'].prefix.include,
+ 'LAPACK_LIB=%s' % lapack_lib])
if '+hypre' in spec:
- options.extend(['HYPRE_DIR=%s' % spec['hypre'].prefix,
- 'HYPRE_OPT=-I%s' % spec['hypre'].prefix.include,
- 'HYPRE_LIB=-L%s' % spec['hypre'].prefix.lib +
- ' -lHYPRE'])
-
- if '+metis' in spec:
+ options.extend([
+ 'HYPRE_DIR=%s' % spec['hypre'].prefix,
+ 'HYPRE_OPT=-I%s' % spec['hypre'].prefix.include,
+ 'HYPRE_LIB=-L%s' % spec['hypre'].prefix.lib +
+ ' -lHYPRE'])
+
+ if 'parmetis' in spec:
+ metis_lib = '-L%s -lparmetis -lmetis' % spec['parmetis'].prefix.lib
+ metis_str = 'MFEM_USE_METIS_5=YES'
+ options.extend([metis_str,
+ 'METIS_DIR=%s' % spec['parmetis'].prefix,
+ 'METIS_OPT=-I%s' % spec['parmetis'].prefix.include,
+ 'METIS_LIB=%s' % metis_lib])
+ elif 'metis' in spec:
metis_lib = '-L%s -lmetis' % spec['metis'].prefix.lib
if spec['metis'].satisfies('@5:'):
metis_str = 'MFEM_USE_METIS_5=YES'
else:
metis_str = 'MFEM_USE_METIS_5=NO'
- options.extend([metis_str,
- 'METIS_DIR=%s' % spec['metis'].prefix,
- 'METIS_OPT=-I%s' % spec['metis'].prefix.include,
- 'METIS_LIB=%s' % metis_lib])
-
- if '+mpi' in spec: options.extend(['MFEM_USE_MPI=YES'])
+ options.extend([
+ metis_str,
+ 'METIS_DIR=%s' % spec['metis'].prefix,
+ 'METIS_OPT=-I%s' % spec['metis'].prefix.include,
+ 'METIS_LIB=%s' % metis_lib])
+
+ if 'mpi' in spec:
+ options.extend(['MFEM_USE_MPI=YES'])
+
+ if '+superlu-dist' in spec:
+ superlu_lib = '-L%s' % spec['superlu-dist'].prefix.lib
+ superlu_lib += ' -lsuperlu_dist'
+ sl_inc = 'SUPERLU_OPT=-I%s' % spec['superlu-dist'].prefix.include
+ options.extend(['MFEM_USE_SUPERLU=YES',
+ 'SUPERLU_DIR=%s' % spec['superlu-dist'].prefix,
+ sl_inc,
+ 'SUPERLU_LIB=%s' % superlu_lib])
if '+suite-sparse' in spec:
ssp = spec['suite-sparse'].prefix
ss_lib = '-L%s' % ssp.lib
- ss_lib += (' -lumfpack -lcholmod -lcolamd -lamd -lcamd' +
- ' -lccolamd -lsuitesparseconfig')
+
+ if '@3.2:' in spec:
+ ss_lib += ' -lklu -lbtf'
+
+ ss_lib += (' -lumfpack -lcholmod -lcolamd' +
+ ' -lamd -lcamd -lccolamd -lsuitesparseconfig')
no_librt_archs = ['darwin-i686', 'darwin-x86_64']
- no_rt = any(map(lambda a: spec.satisfies('='+a), no_librt_archs))
- if not no_rt: ss_lib += ' -lrt'
+ no_rt = any(map(lambda a: spec.satisfies('=' + a),
+ no_librt_archs))
+ if not no_rt:
+ ss_lib += ' -lrt'
ss_lib += (' ' + metis_lib + ' ' + lapack_lib)
options.extend(['MFEM_USE_SUITESPARSE=YES',
@@ -123,25 +168,33 @@ class Mfem(Package):
'SUITESPARSE_OPT=-I%s' % ssp.include,
'SUITESPARSE_LIB=%s' % ss_lib])
- if '+debug' in spec: options.extend(['MFEM_DEBUG=YES'])
-
- # Dirty hack to cope with URL redirect
- tgz_file = string.split(self.url,'/')[-1]
- tar = which('tar')
- tar('xzvf', tgz_file)
- cd(glob.glob('mfem*')[0])
- # End dirty hack to cope with URL redirect
+ if '+netcdf' in spec:
+ np = spec['netcdf'].prefix
+ zp = spec['zlib'].prefix
+ h5p = spec['hdf5'].prefix
+ nlib = '-L%s -lnetcdf ' % np.lib
+ nlib += '-L%s -lhdf5_hl -lhdf5 ' % h5p.lib
+ nlib += '-L%s -lz' % zp.lib
+ options.extend(['MFEM_USE_NETCDF=YES',
+ 'NETCDF_DIR=%s' % np,
+ 'HDF5_DIR=%s' % h5p,
+ 'ZLIB_DIR=%s' % zp,
+ 'NETCDF_OPT=-I%s' % np.include,
+ 'NETCDF_LIB=%s' % nlib])
+
+ if '+debug' in spec:
+ options.extend(['MFEM_DEBUG=YES'])
make('config', *options)
make('all')
# Run a small test before installation
- args = ['-m', join_path('data','star.mesh'), '--no-visualization']
+ args = ['-m', join_path('data', 'star.mesh'), '--no-visualization']
if '+mpi' in spec:
Executable(join_path(spec['mpi'].prefix.bin,
'mpirun'))('-np',
'4',
- join_path('examples','ex1p'),
+ join_path('examples', 'ex1p'),
*args)
else:
Executable(join_path('examples', 'ex1'))(*args)
diff --git a/var/spack/repos/builtin/packages/Mitos/package.py b/var/spack/repos/builtin/packages/mitos/package.py
index 83aca3dc6a..d577a1b285 100644
--- a/var/spack/repos/builtin/packages/Mitos/package.py
+++ b/var/spack/repos/builtin/packages/mitos/package.py
@@ -41,6 +41,7 @@ class Mitos(Package):
depends_on('dyninst@8.2.1:')
depends_on('hwloc')
depends_on('mpi')
+ depends_on('cmake', type='build')
def install(self, spec, prefix):
with working_dir('spack-build', create=True):
diff --git a/var/spack/repos/builtin/packages/mkfontdir/package.py b/var/spack/repos/builtin/packages/mkfontdir/package.py
new file mode 100644
index 0000000000..15c85b24e4
--- /dev/null
+++ b/var/spack/repos/builtin/packages/mkfontdir/package.py
@@ -0,0 +1,47 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class Mkfontdir(Package):
+ """mkfontdir creates the fonts.dir files needed by the legacy X server
+ core font system. The current implementation is a simple wrapper script
+ around the mkfontscale program, which must be built and installed first."""
+
+ homepage = "http://cgit.freedesktop.org/xorg/app/mkfontdir"
+ url = "https://www.x.org/archive/individual/app/mkfontdir-1.0.7.tar.gz"
+
+ version('1.0.7', '52a5bc129f3f3ac54e7115608cec3cdc')
+
+ depends_on('mkfontscale', type='run')
+
+ depends_on('pkg-config@0.9.0:', type='build')
+ depends_on('util-macros', type='build')
+
+ def install(self, spec, prefix):
+ configure('--prefix={0}'.format(prefix))
+
+ make()
+ make('install')
diff --git a/var/spack/repos/builtin/packages/mkfontscale/package.py b/var/spack/repos/builtin/packages/mkfontscale/package.py
new file mode 100644
index 0000000000..4c907831b6
--- /dev/null
+++ b/var/spack/repos/builtin/packages/mkfontscale/package.py
@@ -0,0 +1,48 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class Mkfontscale(Package):
+ """mkfontscale creates the fonts.scale and fonts.dir index files used by the
+ legacy X11 font system."""
+
+ homepage = "http://cgit.freedesktop.org/xorg/app/mkfontscale"
+ url = "https://www.x.org/archive/individual/app/mkfontscale-1.1.2.tar.gz"
+
+ version('1.1.2', 'fab4e1598b8948c124ec7a9f06d30e5b')
+
+ depends_on('libfontenc')
+ depends_on('freetype')
+
+ depends_on('xproto@7.0.25:', type='build')
+ depends_on('pkg-config@0.9.0:', type='build')
+ depends_on('util-macros', type='build')
+
+ def install(self, spec, prefix):
+ configure('--prefix={0}'.format(prefix))
+
+ make()
+ make('install')
diff --git a/var/spack/repos/builtin/packages/moab/package.py b/var/spack/repos/builtin/packages/moab/package.py
index 893a543e97..b783d7b81b 100644
--- a/var/spack/repos/builtin/packages/moab/package.py
+++ b/var/spack/repos/builtin/packages/moab/package.py
@@ -37,7 +37,7 @@ class Moab(Package):
homepage = "https://bitbucket.org/fathomteam/moab"
url = "http://ftp.mcs.anl.gov/pub/fathom/moab-4.6.3.tar.gz"
- version('4.9.1', 'bcb8bee3e58c076c7f31884db119088e')
+ version('4.9.1', '19cc2189fa266181ad9109b18d0b2ab8')
version('4.9.0', '40695d0a159040683cfa05586ad4a7c2')
version('4.8.2', '1dddd10f162fce3cfffaedc48f6f467d')
@@ -45,6 +45,7 @@ class Moab(Package):
description='Required to enable the ExodusII reader/writer.')
variant('shared', default=True,
description='Enables the build of shared libraries')
+ variant('fortran', default=True, description='Enable Fortran support')
# There are many possible variants for MOAB. Here are examples for
# two of them:
@@ -58,7 +59,8 @@ class Moab(Package):
depends_on('hdf5+mpi')
depends_on('netcdf+mpi', when='+netcdf')
depends_on('parmetis')
- depends_on('trilinos') # looking for zoltan.
+ depends_on('zoltan')
+ depends_on('zoltan~fortran', when='~fortran')
def install(self, spec, prefix):
@@ -70,13 +72,15 @@ class Moab(Package):
'--with-mpi=%s' % spec['mpi'].prefix,
'--with-hdf5=%s' % spec['hdf5'].prefix,
'--with-parmetis=%s' % spec['parmetis'].prefix,
- '--with-zoltan=%s' % spec['trilinos'].prefix,
+ '--with-zoltan=%s' % spec['zoltan'].prefix,
'--disable-vtkMOABReader',
'--without-vtk',
'CXX=%s' % spec['mpi'].mpicxx,
'CC=%s' % spec['mpi'].mpicc,
'FC=%s' % spec['mpi'].mpifc]
+ if '~fortran' in spec:
+ options.append('--disable-fortran')
if '+shared' in spec:
options.append('--enable-shared')
if '+netcdf' in spec:
diff --git a/var/spack/repos/builtin/packages/mpc/package.py b/var/spack/repos/builtin/packages/mpc/package.py
index 92eb976f8b..2fe3900981 100644
--- a/var/spack/repos/builtin/packages/mpc/package.py
+++ b/var/spack/repos/builtin/packages/mpc/package.py
@@ -24,6 +24,7 @@
##############################################################################
from spack import *
+
class Mpc(Package):
"""Gnu Mpc is a C library for the arithmetic of complex numbers
with arbitrarily high precision and correct rounding of the
@@ -34,12 +35,12 @@ class Mpc(Package):
version('1.0.3', 'd6a1d5f8ddea3abd2cc3e98f58352d26')
version('1.0.2', '68fadff3358fb3e7976c7a398a0af4c3')
- depends_on("gmp")
- depends_on("mpfr")
+ depends_on('gmp') # mpir is a drop-in replacement for this
+ depends_on('mpfr') # Could also be built against mpir
def url_for_version(self, version):
if version < Version("1.0.1"):
- return "http://www.multiprecision.org/mpc/download/mpc-%s.tar.gz" % version
+ return "http://www.multiprecision.org/mpc/download/mpc-%s.tar.gz" % version # NOQA
else:
return "ftp://ftp.gnu.org/gnu/mpc/mpc-%s.tar.gz" % version
diff --git a/var/spack/repos/builtin/packages/mpe2/package.py b/var/spack/repos/builtin/packages/mpe2/package.py
index f69ea2d65b..a129d59949 100644
--- a/var/spack/repos/builtin/packages/mpe2/package.py
+++ b/var/spack/repos/builtin/packages/mpe2/package.py
@@ -24,8 +24,9 @@
##############################################################################
from spack import *
+
class Mpe2(Package):
- """Message Passing Extensions (MPE) -- Parallel, shared X window graphics"""
+ """Message Passing Extensions (MPE): Parallel, shared X window graphics"""
homepage = "http://www.mcs.anl.gov/research/projects/perfvis/software/MPE/"
url = "ftp://ftp.mcs.anl.gov/pub/mpi/mpe/mpe2-1.3.0.tar.gz"
diff --git a/var/spack/repos/builtin/packages/mpfr/package.py b/var/spack/repos/builtin/packages/mpfr/package.py
index ed3926a8ab..4612d03849 100644
--- a/var/spack/repos/builtin/packages/mpfr/package.py
+++ b/var/spack/repos/builtin/packages/mpfr/package.py
@@ -24,6 +24,7 @@
##############################################################################
from spack import *
+
class Mpfr(Package):
"""The MPFR library is a C library for multiple-precision
floating-point computations with correct rounding."""
@@ -34,7 +35,7 @@ class Mpfr(Package):
version('3.1.3', '5fdfa3cfa5c86514ee4a241a1affa138')
version('3.1.2', 'ee2c3ac63bf0c2359bf08fc3ee094c19')
- depends_on('gmp')
+ depends_on('gmp') # mpir is a drop-in replacement for this
def install(self, spec, prefix):
configure("--prefix=%s" % prefix)
diff --git a/var/spack/repos/builtin/packages/mpibash/package.py b/var/spack/repos/builtin/packages/mpibash/package.py
index b328b98ecf..f3feaaaa42 100644
--- a/var/spack/repos/builtin/packages/mpibash/package.py
+++ b/var/spack/repos/builtin/packages/mpibash/package.py
@@ -22,9 +22,9 @@
# License along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
-import os
from spack import *
+
class Mpibash(Package):
"""Parallel scripting right from the Bourne-Again Shell (Bash)"""
homepage = "http://www.ccs3.lanl.gov/~pakin/software/mpibash-4.3.html"
@@ -36,7 +36,7 @@ class Mpibash(Package):
patch('mpibash-4.3.patch', level=1, when='@4.3')
# above patch modifies configure.ac
- depends_on('autoconf')
+ depends_on('autoconf', type='build')
# uses MPI_Exscan which is in MPI-1.2 and later
depends_on('mpi@1.2:')
diff --git a/var/spack/repos/builtin/packages/mpich/package.py b/var/spack/repos/builtin/packages/mpich/package.py
index 164d9db541..dd864acbe3 100644
--- a/var/spack/repos/builtin/packages/mpich/package.py
+++ b/var/spack/repos/builtin/packages/mpich/package.py
@@ -23,15 +23,15 @@
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
from spack import *
-import os
-class Mpich(Package):
+class Mpich(AutotoolsPackage):
"""MPICH is a high performance and widely portable implementation of
- the Message Passing Interface (MPI) standard."""
+ the Message Passing Interface (MPI) standard."""
+
homepage = "http://www.mpich.org"
- url = "http://www.mpich.org/static/downloads/3.0.4/mpich-3.0.4.tar.gz"
- list_url = "http://www.mpich.org/static/downloads/"
+ url = "http://www.mpich.org/static/downloads/3.0.4/mpich-3.0.4.tar.gz"
+ list_url = "http://www.mpich.org/static/downloads/"
list_depth = 2
version('3.2', 'f414cfa77099cd1fa1a5ae4e22db508a')
@@ -42,14 +42,27 @@ class Mpich(Package):
version('3.1', '5643dd176499bfb7d25079aaff25f2ec')
version('3.0.4', '9c5d5d4fe1e17dd12153f40bc5b6dbc0')
+ variant('hydra', default=True, description='Build the hydra process manager')
+ variant('pmi', default=True, description='Build with PMI support')
+ variant('romio', default=True, description='Enable ROMIO MPI I/O implementation')
variant('verbs', default=False, description='Build support for OpenFabrics verbs.')
- variant('pmi', default=True, description='Build with PMI support')
- variant('hydra', default=True, description='Build the hydra process manager')
provides('mpi@:3.0', when='@3:')
provides('mpi@:1.3', when='@1:')
def setup_dependent_environment(self, spack_env, run_env, dependent_spec):
+ # On Cray, the regular compiler wrappers *are* the MPI wrappers.
+ if 'platform=cray' in self.spec:
+ spack_env.set('MPICC', spack_cc)
+ spack_env.set('MPICXX', spack_cxx)
+ spack_env.set('MPIF77', spack_fc)
+ spack_env.set('MPIF90', spack_fc)
+ else:
+ spack_env.set('MPICC', join_path(self.prefix.bin, 'mpicc'))
+ spack_env.set('MPICXX', join_path(self.prefix.bin, 'mpic++'))
+ spack_env.set('MPIF77', join_path(self.prefix.bin, 'mpif77'))
+ spack_env.set('MPIF90', join_path(self.prefix.bin, 'mpif90'))
+
spack_env.set('MPICH_CC', spack_cc)
spack_env.set('MPICH_CXX', spack_cxx)
spack_env.set('MPICH_F77', spack_f77)
@@ -57,63 +70,69 @@ class Mpich(Package):
spack_env.set('MPICH_FC', spack_fc)
def setup_dependent_package(self, module, dep_spec):
- self.spec.mpicc = join_path(self.prefix.bin, 'mpicc')
- self.spec.mpicxx = join_path(self.prefix.bin, 'mpic++')
- self.spec.mpifc = join_path(self.prefix.bin, 'mpif90')
- self.spec.mpif77 = join_path(self.prefix.bin, 'mpif77')
-
- def install(self, spec, prefix):
- config_args = ["--prefix=" + prefix,
- "--with-pmi=" + ("yes" if '+pmi' in spec else 'no'),
- "--with-pm=" + ('hydra' if '+hydra' in spec else 'no'),
- "--enable-shared"]
-
- # Variants
- if '+verbs' in spec:
- config_args.append("--with-ibverbs")
+ if 'platform=cray' in self.spec:
+ self.spec.mpicc = spack_cc
+ self.spec.mpicxx = spack_cxx
+ self.spec.mpifc = spack_fc
+ self.spec.mpif77 = spack_f77
else:
- config_args.append("--without-ibverbs")
-
- # TODO: Spack should make it so that you can't actually find
- # these compilers if they're "disabled" for the current
- # compiler configuration.
- if not self.compiler.f77:
- config_args.append("--disable-f77")
-
- if not self.compiler.fc:
- config_args.append("--disable-fc")
-
- if not self.compiler.fc and not self.compiler.f77:
- config_args.append("--disable-fortran")
-
- configure(*config_args)
- make()
- make("install")
-
- self.filter_compilers()
-
-
+ self.spec.mpicc = join_path(self.prefix.bin, 'mpicc')
+ self.spec.mpicxx = join_path(self.prefix.bin, 'mpic++')
+ self.spec.mpifc = join_path(self.prefix.bin, 'mpif90')
+ self.spec.mpif77 = join_path(self.prefix.bin, 'mpif77')
+
+ self.spec.mpicxx_shared_libs = [
+ join_path(self.prefix.lib, 'libmpicxx.{0}'.format(dso_suffix)),
+ join_path(self.prefix.lib, 'libmpi.{0}'.format(dso_suffix))
+ ]
+
+ @AutotoolsPackage.precondition('autoreconf')
+ def die_without_fortran(self):
+ # Until we can pass variants such as +fortran through virtual
+ # dependencies depends_on('mpi'), require Fortran compiler to
+ # avoid delayed build errors in dependents.
+ if (self.compiler.f77 is None) or (self.compiler.fc is None):
+ raise InstallError(
+ 'Mpich requires both C and Fortran compilers!'
+ )
+
+ def configure_args(self):
+ spec = self.spec
+ return [
+ '--enable-shared',
+ '--with-pm={0}'.format('hydra' if '+hydra' in spec else 'no'),
+ '--with-pmi={0}'.format('yes' if '+pmi' in spec else 'no'),
+ '--{0}-romio'.format('enable' if '+romio' in spec else 'disable'),
+ '--{0}-ibverbs'.format('with' if '+verbs' in spec else 'without')
+ ]
+
+ @AutotoolsPackage.sanity_check('install')
def filter_compilers(self):
"""Run after install to make the MPI compilers use the
- compilers that Spack built the package with.
-
- If this isn't done, they'll have CC, CXX, F77, and FC set
- to Spack's generic cc, c++, f77, and f90. We want them to
- be bound to whatever compiler they were built with.
- """
- bin = self.prefix.bin
- mpicc = os.path.join(bin, 'mpicc')
- mpicxx = os.path.join(bin, 'mpicxx')
- mpif77 = os.path.join(bin, 'mpif77')
- mpif90 = os.path.join(bin, 'mpif90')
-
- spack_cc = os.environ['CC']
- spack_cxx = os.environ['CXX']
- spack_f77 = os.environ['F77']
- spack_fc = os.environ['FC']
-
- kwargs = { 'ignore_absent' : True, 'backup' : False, 'string' : True }
- filter_file('CC="%s"' % spack_cc , 'CC="%s"' % self.compiler.cc, mpicc, **kwargs)
- filter_file('CXX="%s"'% spack_cxx, 'CXX="%s"' % self.compiler.cxx, mpicxx, **kwargs)
- filter_file('F77="%s"'% spack_f77, 'F77="%s"' % self.compiler.f77, mpif77, **kwargs)
- filter_file('FC="%s"' % spack_fc , 'FC="%s"' % self.compiler.fc, mpif90, **kwargs)
+ compilers that Spack built the package with.
+
+ If this isn't done, they'll have CC, CXX, F77, and FC set
+ to Spack's generic cc, c++, f77, and f90. We want them to
+ be bound to whatever compiler they were built with."""
+
+ mpicc = join_path(self.prefix.bin, 'mpicc')
+ mpicxx = join_path(self.prefix.bin, 'mpicxx')
+ mpif77 = join_path(self.prefix.bin, 'mpif77')
+ mpif90 = join_path(self.prefix.bin, 'mpif90')
+
+ # Substitute Spack compile wrappers for the real
+ # underlying compiler
+ kwargs = {
+ 'ignore_absent': True,
+ 'backup': False,
+ 'string': True
+ }
+ filter_file(env['CC'], self.compiler.cc, mpicc, **kwargs)
+ filter_file(env['CXX'], self.compiler.cxx, mpicxx, **kwargs)
+ filter_file(env['F77'], self.compiler.f77, mpif77, **kwargs)
+ filter_file(env['FC'], self.compiler.fc, mpif90, **kwargs)
+
+ # Remove this linking flag if present
+ # (it turns RPATH into RUNPATH)
+ for wrapper in (mpicc, mpicxx, mpif77, mpif90):
+ filter_file('-Wl,--enable-new-dtags', '', wrapper, **kwargs)
diff --git a/var/spack/repos/builtin/packages/mpileaks/package.py b/var/spack/repos/builtin/packages/mpileaks/package.py
index 51bc66a0eb..ec4e9b30cc 100644
--- a/var/spack/repos/builtin/packages/mpileaks/package.py
+++ b/var/spack/repos/builtin/packages/mpileaks/package.py
@@ -24,8 +24,10 @@
##############################################################################
from spack import *
+
class Mpileaks(Package):
- """Tool to detect and report leaked MPI objects like MPI_Requests and MPI_Datatypes."""
+ """Tool to detect and report leaked MPI objects like MPI_Requests and
+ MPI_Datatypes."""
homepage = "https://github.com/hpc/mpileaks"
url = "https://github.com/hpc/mpileaks/releases/download/v1.0/mpileaks-1.0.tar.gz"
diff --git a/var/spack/repos/builtin/packages/mpip/package.py b/var/spack/repos/builtin/packages/mpip/package.py
new file mode 100644
index 0000000000..78e1dca68a
--- /dev/null
+++ b/var/spack/repos/builtin/packages/mpip/package.py
@@ -0,0 +1,44 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+import os
+
+
+class Mpip(Package):
+ """mpiP: Lightweight, Scalable MPI Profiling"""
+ homepage = "http://mpip.sourceforge.net/"
+ url = "http://downloads.sourceforge.net/project/mpip/mpiP/mpiP-3.4.1/mpiP-3.4.1.tar.gz"
+
+ version("3.4.1", "1168adc83777ac31d6ebd385823aabbd")
+
+ depends_on("libelf", type="build")
+ depends_on("libdwarf", type="build")
+ depends_on('libunwind', when=os.uname()[4] == "x86_64", type="build")
+ depends_on("mpi", type="build")
+
+ def install(self, spec, prefix):
+ configure("--prefix=" + prefix, "--without-f77")
+ make()
+ make("install")
diff --git a/var/spack/repos/builtin/packages/mpir/package.py b/var/spack/repos/builtin/packages/mpir/package.py
new file mode 100644
index 0000000000..b939a690b2
--- /dev/null
+++ b/var/spack/repos/builtin/packages/mpir/package.py
@@ -0,0 +1,62 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class Mpir(Package):
+ """Multiple Precision Integers and Rationals."""
+
+ homepage = "https://github.com/wbhart/mpir"
+ url = "https://github.com/wbhart/mpir/archive/mpir-2.7.0.tar.gz"
+
+ version('2.7.0', '985b5d57bd0e74c74125ee885b9c8f71')
+ version('2.6.0', 'ec17d6a7e026114ceb734b2466aa0a91')
+ version('develop', git='https://github.com/wbhart/mpir.git')
+
+ # This setting allows mpir to act as a drop-in replacement for gmp
+ variant('gmp_compat', default=False,
+ description='Compile with GMP library compatibility')
+
+ # Build dependencies
+ depends_on('autoconf', type='build')
+
+ # Other dependencies
+ depends_on('yasm')
+
+ def install(self, spec, prefix):
+ # We definitely don't want to have MPIR build its
+ # own version of YASM. This tries to install it
+ # to a system directory.
+ options = ['--prefix={0}'.format(prefix),
+ '--with-system-yasm']
+
+ if '+gmp_compat' in spec:
+ options.extend(['--enable-gmpcompat'])
+
+ configure(*options)
+ make()
+ if self.run_tests:
+ make('check')
+ make('install')
diff --git a/var/spack/repos/builtin/packages/mrnet/package.py b/var/spack/repos/builtin/packages/mrnet/package.py
index 3380c7f823..9da9e29a2e 100644
--- a/var/spack/repos/builtin/packages/mrnet/package.py
+++ b/var/spack/repos/builtin/packages/mrnet/package.py
@@ -24,34 +24,33 @@
##############################################################################
from spack import *
+
class Mrnet(Package):
"""The MRNet Multi-Cast Reduction Network."""
homepage = "http://paradyn.org/mrnet"
url = "ftp://ftp.cs.wisc.edu/paradyn/mrnet/mrnet_5.0.1.tar.gz"
list_url = "http://ftp.cs.wisc.edu/paradyn/mrnet"
- version('5.0.1-2', git='https://github.com/dyninst/mrnet.git', commit='20b1eacfc6d680d9f6472146d2dfaa0f900cc2e9')
+ version('5.0.1-2', git='https://github.com/dyninst/mrnet.git',
+ commit='20b1eacfc6d680d9f6472146d2dfaa0f900cc2e9')
version('5.0.1', '17f65738cf1b9f9b95647ff85f69ecdd')
version('4.1.0', '5a248298b395b329e2371bf25366115c')
version('4.0.0', 'd00301c078cba57ef68613be32ceea2f')
- # Add a patch that brings mrnet-5.0.1 up to date with the current development tree
- # The development tree contains fixes needed for the krell based tools
- variant('krellpatch', default=False, description="Build MRNet with krell openspeedshop based patch.")
- patch('krell-5.0.1.patch', when='@5.0.1+krellpatch')
-
- variant('lwthreads', default=False, description="Also build the MRNet LW threadsafe libraries")
+ variant('lwthreads', default=False,
+ description="Also build the MRNet LW threadsafe libraries")
parallel = False
depends_on("boost")
def install(self, spec, prefix):
- # Build the MRNet LW thread safe libraries when the krelloptions variant is present
+ # Build the MRNet LW thread safe libraries when the
+ # lwthreads variant is present
if '+lwthreads' in spec:
- configure("--prefix=%s" %prefix, "--enable-shared", "--enable-ltwt-threadsafe")
+ configure("--prefix=%s" % prefix, "--enable-shared",
+ "--enable-ltwt-threadsafe")
else:
- configure("--prefix=%s" %prefix, "--enable-shared")
+ configure("--prefix=%s" % prefix, "--enable-shared")
make()
make("install")
-
diff --git a/var/spack/repos/builtin/packages/msgpack-c/package.py b/var/spack/repos/builtin/packages/msgpack-c/package.py
index 925dceabed..9a726e2356 100644
--- a/var/spack/repos/builtin/packages/msgpack-c/package.py
+++ b/var/spack/repos/builtin/packages/msgpack-c/package.py
@@ -24,6 +24,7 @@
##############################################################################
from spack import *
+
class MsgpackC(Package):
"""A small, fast binary interchange format convertible to/from JSON"""
homepage = "http://www.msgpack.org"
@@ -31,6 +32,8 @@ class MsgpackC(Package):
version('1.4.1', 'e2fd3a7419b9bc49e5017fdbefab87e0')
+ depends_on('cmake', type='build')
+
def install(self, spec, prefix):
cmake('.', *std_cmake_args)
diff --git a/var/spack/repos/builtin/packages/mumps/package.py b/var/spack/repos/builtin/packages/mumps/package.py
index 92c45c9b95..82d0c606b6 100644
--- a/var/spack/repos/builtin/packages/mumps/package.py
+++ b/var/spack/repos/builtin/packages/mumps/package.py
@@ -23,7 +23,10 @@
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
from spack import *
-import os, sys, glob
+import os
+import sys
+import glob
+
class Mumps(Package):
"""MUMPS: a MUltifrontal Massively Parallel sparse direct Solver"""
@@ -31,20 +34,31 @@ class Mumps(Package):
homepage = "http://mumps.enseeiht.fr"
url = "http://mumps.enseeiht.fr/MUMPS_5.0.1.tar.gz"
+ version('5.0.2', '591bcb2c205dcb0283872608cdf04927')
+ # Alternate location if main server is down.
+ # version('5.0.1', 'b477573fdcc87babe861f62316833db0', url='http://pkgs.fedoraproject.org/repo/pkgs/MUMPS/MUMPS_5.0.1.tar.gz/md5/b477573fdcc87babe861f62316833db0/MUMPS_5.0.1.tar.gz')
version('5.0.1', 'b477573fdcc87babe861f62316833db0')
- variant('mpi', default=True, description='Activate the compilation of MUMPS with the MPI support')
- variant('scotch', default=False, description='Activate Scotch as a possible ordering library')
- variant('ptscotch', default=False, description='Activate PT-Scotch as a possible ordering library')
- variant('metis', default=False, description='Activate Metis as a possible ordering library')
- variant('parmetis', default=False, description='Activate Parmetis as a possible ordering library')
- variant('double', default=True, description='Activate the compilation of dmumps')
- variant('float', default=True, description='Activate the compilation of smumps')
- variant('complex', default=True, description='Activate the compilation of cmumps and/or zmumps')
- variant('idx64', default=False, description='Use int64_t/integer*8 as default index type')
+ variant('mpi', default=True,
+ description='Compile MUMPS with MPI support')
+ variant('scotch', default=False,
+ description='Activate Scotch as a possible ordering library')
+ variant('ptscotch', default=False,
+ description='Activate PT-Scotch as a possible ordering library')
+ variant('metis', default=False,
+ description='Activate Metis as a possible ordering library')
+ variant('parmetis', default=False,
+ description='Activate Parmetis as a possible ordering library')
+ variant('double', default=True,
+ description='Activate the compilation of dmumps')
+ variant('float', default=True,
+ description='Activate the compilation of smumps')
+ variant('complex', default=True,
+ description='Activate the compilation of cmumps and/or zmumps')
+ variant('int64', default=False,
+ description='Use int64_t/integer*8 as default index type')
variant('shared', default=True, description='Build shared libraries')
-
depends_on('scotch + esmumps', when='~ptscotch+scotch')
depends_on('scotch + esmumps + mpi', when='+ptscotch')
depends_on('metis@5:', when='+metis')
@@ -60,51 +74,63 @@ class Mumps(Package):
# end before install
# def patch(self):
def write_makefile_inc(self):
- if ('+parmetis' in self.spec or '+ptscotch' in self.spec) and '+mpi' not in self.spec:
- raise RuntimeError('You cannot use the variants parmetis or ptscotch without mpi')
+ if ('+parmetis' in self.spec or '+ptscotch' in self.spec) and (
+ '+mpi' not in self.spec):
+ raise RuntimeError(
+ 'You cannot use the variants parmetis or ptscotch without mpi')
- makefile_conf = ["LIBBLAS = -L%s -lblas" % self.spec['blas'].prefix.lib]
+ lapack_blas = (self.spec['lapack'].lapack_libs +
+ self.spec['blas'].blas_libs)
+ makefile_conf = ["LIBBLAS = %s" % lapack_blas.joined()]
orderings = ['-Dpord']
if '+ptscotch' in self.spec or '+scotch' in self.spec:
join_lib = ' -l%s' % ('pt' if '+ptscotch' in self.spec else '')
- makefile_conf.extend(
- ["ISCOTCH = -I%s" % self.spec['scotch'].prefix.include,
- "LSCOTCH = -L%s %s%s" % (self.spec['scotch'].prefix.lib,
- join_lib,
- join_lib.join(['esmumps', 'scotch', 'scotcherr']))])
+ makefile_conf.extend([
+ "ISCOTCH = -I%s" % self.spec['scotch'].prefix.include,
+ "LSCOTCH = -L%s %s%s" % (self.spec['scotch'].prefix.lib,
+ join_lib,
+ join_lib.join(['esmumps',
+ 'scotch',
+ 'scotcherr']))
+ ])
+
orderings.append('-Dscotch')
if '+ptscotch' in self.spec:
orderings.append('-Dptscotch')
if '+parmetis' in self.spec and '+metis' in self.spec:
- libname = 'parmetis' if '+parmetis' in self.spec else 'metis'
- makefile_conf.extend(
- ["IMETIS = -I%s" % self.spec['parmetis'].prefix.include,
- "LMETIS = -L%s -l%s -L%s -l%s" % (self.spec['parmetis'].prefix.lib, 'parmetis',self.spec['metis'].prefix.lib, 'metis')])
+ makefile_conf.extend([
+ "IMETIS = -I%s" % self.spec['parmetis'].prefix.include,
+ "LMETIS = -L%s -l%s -L%s -l%s" % (
+ self.spec['parmetis'].prefix.lib, 'parmetis',
+ self.spec['metis'].prefix.lib, 'metis')
+ ])
orderings.append('-Dparmetis')
elif '+metis' in self.spec:
- makefile_conf.extend(
- ["IMETIS = -I%s" % self.spec['metis'].prefix.include,
- "LMETIS = -L%s -l%s" % (self.spec['metis'].prefix.lib, 'metis')])
+ makefile_conf.extend([
+ "IMETIS = -I%s" % self.spec['metis'].prefix.include,
+ "LMETIS = -L%s -l%s" % (self.spec['metis'].prefix.lib, 'metis')
+ ])
orderings.append('-Dmetis')
makefile_conf.append("ORDERINGSF = %s" % (' '.join(orderings)))
# when building shared libs need -fPIC, otherwise
- # /usr/bin/ld: graph.o: relocation R_X86_64_32 against `.rodata.str1.1' can not be used when making a shared object; recompile with -fPIC
+ # /usr/bin/ld: graph.o: relocation R_X86_64_32 against `.rodata.str1.1'
+ # can not be used when making a shared object; recompile with -fPIC
fpic = '-fPIC' if '+shared' in self.spec else ''
# TODO: test this part, it needs a full blas, scalapack and
# partitionning environment with 64bit integers
- if '+idx64' in self.spec:
+ if '+int64' in self.spec:
makefile_conf.extend(
# the fortran compilation flags most probably are
# working only for intel and gnu compilers this is
# perhaps something the compiler should provide
- ['OPTF = %s -O -DALLOW_NON_INIT %s' % (fpic,'-fdefault-integer-8' if self.compiler.name == "gcc" else '-i8'),
+ ['OPTF = %s -O -DALLOW_NON_INIT %s' % (fpic, '-fdefault-integer-8' if self.compiler.name == "gcc" else '-i8'), # noqa
'OPTL = %s -O ' % fpic,
'OPTC = %s -O -DINTSIZE64' % fpic])
else:
@@ -113,13 +139,13 @@ class Mumps(Package):
'OPTL = %s -O ' % fpic,
'OPTC = %s -O ' % fpic])
-
if '+mpi' in self.spec:
+ scalapack = self.spec['scalapack'].scalapack_libs
makefile_conf.extend(
["CC = %s" % join_path(self.spec['mpi'].prefix.bin, 'mpicc'),
"FC = %s" % join_path(self.spec['mpi'].prefix.bin, 'mpif90'),
"FL = %s" % join_path(self.spec['mpi'].prefix.bin, 'mpif90'),
- "SCALAP = %s" % self.spec['scalapack'].fc_link,
+ "SCALAP = %s" % scalapack.ld_flags,
"MUMPS_TYPE = par"])
else:
makefile_conf.extend(
@@ -130,20 +156,27 @@ class Mumps(Package):
# TODO: change the value to the correct one according to the
# compiler possible values are -DAdd_, -DAdd__ and/or -DUPPER
- makefile_conf.append("CDEFS = -DAdd_")
+ if self.compiler.name == 'intel':
+ # Intel Fortran compiler provides the main() function so
+ # C examples linked with the Fortran compiler require a
+ # hack defined by _DMAIN_COMP (see examples/c_example.c)
+ makefile_conf.append("CDEFS = -DAdd_ -DMAIN_COMP")
+ else:
+ makefile_conf.append("CDEFS = -DAdd_")
if '+shared' in self.spec:
if sys.platform == 'darwin':
- # Building dylibs with mpif90 causes segfaults on 10.8 and 10.10. Use gfortran. (Homebrew)
+ # Building dylibs with mpif90 causes segfaults on 10.8 and
+ # 10.10. Use gfortran. (Homebrew)
makefile_conf.extend([
'LIBEXT=.dylib',
- 'AR=%s -dynamiclib -Wl,-install_name -Wl,%s/$(notdir $@) -undefined dynamic_lookup -o ' % (os.environ['FC'],prefix.lib),
+ 'AR=%s -dynamiclib -Wl,-install_name -Wl,%s/$(notdir $@) -undefined dynamic_lookup -o ' % (os.environ['FC'], prefix.lib), # noqa
'RANLIB=echo'
])
else:
makefile_conf.extend([
'LIBEXT=.so',
- 'AR=$(FL) -shared -Wl,-soname -Wl,%s/$(notdir $@) -o' % prefix.lib,
+ 'AR=$(FL) -shared -Wl,-soname -Wl,%s/$(notdir $@) -o' % prefix.lib, # noqa
'RANLIB=echo'
])
else:
@@ -153,9 +186,8 @@ class Mumps(Package):
'RANLIB = ranlib'
])
-
- makefile_inc_template = join_path(os.path.dirname(self.module.__file__),
- 'Makefile.inc')
+ makefile_inc_template = join_path(
+ os.path.dirname(self.module.__file__), 'Makefile.inc')
with open(makefile_inc_template, "r") as fh:
makefile_conf.extend(fh.read().split('\n'))
@@ -164,8 +196,6 @@ class Mumps(Package):
makefile_inc = '\n'.join(makefile_conf)
fh.write(makefile_inc)
-
-
def install(self, spec, prefix):
make_libs = []
@@ -189,15 +219,15 @@ class Mumps(Package):
install_tree('lib', prefix.lib)
install_tree('include', prefix.include)
- if '~mpi' in spec:
+ if '~mpi' in spec:
lib_dsuffix = '.dylib' if sys.platform == 'darwin' else '.so'
lib_suffix = lib_dsuffix if '+shared' in spec else '.a'
install('libseq/libmpiseq%s' % lib_suffix, prefix.lib)
- for f in glob.glob(join_path('libseq','*.h')):
+ for f in glob.glob(join_path('libseq', '*.h')):
install(f, prefix.include)
- # FIXME: extend the tests to mpirun -np 2 (or alike) when build with MPI
- # FIXME: use something like numdiff to compare blessed output with the current
+ # FIXME: extend the tests to mpirun -np 2 when build with MPI
+ # FIXME: use something like numdiff to compare output files
with working_dir('examples'):
if '+float' in spec:
os.system('./ssimpletest < input_simpletest_real')
diff --git a/var/spack/repos/builtin/packages/munge/package.py b/var/spack/repos/builtin/packages/munge/package.py
index ebe3e18882..38dbfa1cc1 100644
--- a/var/spack/repos/builtin/packages/munge/package.py
+++ b/var/spack/repos/builtin/packages/munge/package.py
@@ -25,20 +25,18 @@
from spack import *
import os
-class Munge(Package):
+
+class Munge(AutotoolsPackage):
""" MUNGE Uid 'N' Gid Emporium """
homepage = "https://code.google.com/p/munge/"
url = "https://github.com/dun/munge/releases/download/munge-0.5.11/munge-0.5.11.tar.bz2"
- version('0.5.11', 'bd8fca8d5f4c1fcbef1816482d49ee01', url='https://github.com/dun/munge/releases/download/munge-0.5.11/munge-0.5.11.tar.bz2')
+ version('0.5.11', 'bd8fca8d5f4c1fcbef1816482d49ee01',
+ url='https://github.com/dun/munge/releases/download/munge-0.5.11/munge-0.5.11.tar.bz2')
depends_on('openssl')
depends_on('libgcrypt')
def install(self, spec, prefix):
os.makedirs(os.path.join(prefix, "lib/systemd/system"))
- configure("--prefix=%s" % prefix)
-
- make()
- make("install")
-
+ super(Munge, self).install(spec, prefix)
diff --git a/var/spack/repos/builtin/packages/muparser/package.py b/var/spack/repos/builtin/packages/muparser/package.py
index 47d1855329..1373c8cd7b 100644
--- a/var/spack/repos/builtin/packages/muparser/package.py
+++ b/var/spack/repos/builtin/packages/muparser/package.py
@@ -24,6 +24,7 @@
##############################################################################
from spack import *
+
class Muparser(Package):
"""C++ math expression parser library."""
homepage = "http://muparser.beltoforion.de/"
diff --git a/var/spack/repos/builtin/packages/muster/package.py b/var/spack/repos/builtin/packages/muster/package.py
index 993f147245..81817e48dc 100644
--- a/var/spack/repos/builtin/packages/muster/package.py
+++ b/var/spack/repos/builtin/packages/muster/package.py
@@ -24,6 +24,7 @@
##############################################################################
from spack import *
+
class Muster(Package):
"""The Muster library provides implementations of sequential and
parallel K-Medoids clustering algorithms. It is intended as a
@@ -39,6 +40,7 @@ class Muster(Package):
depends_on("boost")
depends_on("mpi")
+ depends_on('cmake', type='build')
def install(self, spec, prefix):
cmake(".", *std_cmake_args)
diff --git a/var/spack/repos/builtin/packages/mvapich2/package.py b/var/spack/repos/builtin/packages/mvapich2/package.py
index f4997bdfa1..18f2ebe0bb 100644
--- a/var/spack/repos/builtin/packages/mvapich2/package.py
+++ b/var/spack/repos/builtin/packages/mvapich2/package.py
@@ -23,41 +23,48 @@
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
from spack import *
-import os
+
class Mvapich2(Package):
"""MVAPICH2 is an MPI implementation for Infiniband networks."""
homepage = "http://mvapich.cse.ohio-state.edu/"
- url = "http://mvapich.cse.ohio-state.edu/download/mvapich/mv2/mvapich2-2.2b.tar.gz"
+ url = "http://mvapich.cse.ohio-state.edu/download/mvapich/mv2/mvapich2-2.2.tar.gz"
- version('2.2b', '5651e8b7a72d7c77ca68da48f3a5d108')
- version('2.2a', 'b8ceb4fc5f5a97add9b3ff1b9cbe39d2')
- version('2.0', '9fbb68a4111a8b6338e476dc657388b4')
- version('1.9', '5dc58ed08fd3142c260b70fe297e127c')
+ version('2.2', '939b65ebe5b89a5bc822cdab0f31f96e')
+ version('2.1', '0095ceecb19bbb7fb262131cb9c2cdd6')
+ version('2.0', '9fbb68a4111a8b6338e476dc657388b4')
+ version('1.9', '5dc58ed08fd3142c260b70fe297e127c')
patch('ad_lustre_rwcontig_open_source.patch', when='@1.9')
provides('mpi@:2.2', when='@1.9') # MVAPICH2-1.9 supports MPI 2.2
provides('mpi@:3.0', when='@2.0:') # MVAPICH2-2.0 supports MPI 3.0
- variant('debug', default=False, description='Enables debug information and error messages at run-time')
+ variant('debug', default=False,
+ description='Enable debug info and error messages at run-time')
##########
- # TODO : Process managers should be grouped into the same variant, as soon as variant capabilities will be extended
- # See https://groups.google.com/forum/#!topic/spack/F8-f8B4_0so
+ # TODO : Process managers should be grouped into the same variant,
+ # as soon as variant capabilities will be extended See
+ # https://groups.google.com/forum/#!topic/spack/F8-f8B4_0so
SLURM = 'slurm'
HYDRA = 'hydra'
GFORKER = 'gforker'
REMSHELL = 'remshell'
SLURM_INCOMPATIBLE_PMS = (HYDRA, GFORKER, REMSHELL)
- variant(SLURM, default=False, description='Sets slurm as the only process manager')
- variant(HYDRA, default=False, description='Sets hydra as one of the process managers')
- variant(GFORKER, default=False, description='Sets gforker as one of the process managers')
- variant(REMSHELL, default=False, description='Sets remshell as one of the process managers')
+ variant(SLURM, default=False,
+ description='Set slurm as the only process manager')
+ variant(HYDRA, default=False,
+ description='Set hydra as one of the process managers')
+ variant(GFORKER, default=False,
+ description='Set gforker as one of the process managers')
+ variant(REMSHELL, default=False,
+ description='Set remshell as one of the process managers')
##########
##########
- # TODO : Network types should be grouped into the same variant, as soon as variant capabilities will be extended
+ # TODO : Network types should be grouped into the same variant, as
+ # soon as variant capabilities will be extended
PSM = 'psm'
SOCK = 'sock'
NEMESISIBTCP = 'nemesisibtcp'
@@ -65,15 +72,29 @@ class Mvapich2(Package):
NEMESIS = 'nemesis'
MRAIL = 'mrail'
SUPPORTED_NETWORKS = (PSM, SOCK, NEMESIS, NEMESISIB, NEMESISIBTCP)
- variant(PSM, default=False, description='Configures a build for QLogic PSM-CH3')
- variant(SOCK, default=False, description='Configures a build for TCP/IP-CH3')
- variant(NEMESISIBTCP, default=False, description='Configures a build for both OFA-IB-Nemesis and TCP/IP-Nemesis')
- variant(NEMESISIB, default=False, description='Configures a build for OFA-IB-Nemesis')
- variant(NEMESIS, default=False, description='Configures a build for TCP/IP-Nemesis')
- variant(MRAIL, default=False, description='Configures a build for OFA-IB-CH3')
+ variant(
+ PSM, default=False,
+ description='Configure for QLogic PSM-CH3')
+ variant(
+ SOCK, default=False,
+ description='Configure for TCP/IP-CH3')
+ variant(
+ NEMESISIBTCP, default=False,
+ description='Configure for both OFA-IB-Nemesis and TCP/IP-Nemesis')
+ variant(
+ NEMESISIB, default=False,
+ description='Configure for OFA-IB-Nemesis')
+ variant(
+ NEMESIS, default=False,
+ description='Configure for TCP/IP-Nemesis')
+ variant(
+ MRAIL, default=False,
+ description='Configure for OFA-IB-CH3')
##########
# FIXME : CUDA support is missing
+ depends_on('bison')
+ depends_on('libpciaccess')
def url_for_version(self, version):
base_url = "http://mvapich.cse.ohio-state.edu/download"
@@ -84,8 +105,8 @@ class Mvapich2(Package):
@staticmethod
def enabled(x):
- """
- Given a variant name returns the string that means the variant is enabled
+ """Given a variant name returns the string that means the variant is
+ enabled
:param x: variant name
:return:
@@ -93,8 +114,8 @@ class Mvapich2(Package):
return '+' + x
def set_build_type(self, spec, configure_args):
- """
- Appends to configure_args the flags that depends only on the build type (i.e. release or debug)
+ """Appends to configure_args the flags that depends only on the build
+ type (i.e. release or debug)
:param spec: spec
:param configure_args: list of current configure arguments
@@ -104,7 +125,8 @@ class Mvapich2(Package):
"--disable-fast",
"--enable-error-checking=runtime",
"--enable-error-messages=all",
- "--enable-g=dbg", "--enable-debuginfo" # Permits debugging with TotalView
+ # Permits debugging with TotalView
+ "--enable-g=dbg", "--enable-debuginfo"
]
else:
build_type_options = ["--enable-fast=all"]
@@ -112,25 +134,41 @@ class Mvapich2(Package):
configure_args.extend(build_type_options)
def set_process_manager(self, spec, configure_args):
- """
- Appends to configure_args the flags that will enable the appropriate process managers
+ """Appends to configure_args the flags that will enable the
+ appropriate process managers
:param spec: spec
:param configure_args: list of current configure arguments
"""
- # Check that slurm variant is not activated together with other pm variants
- has_slurm_incompatible_variants = any(self.enabled(x) in spec for x in Mvapich2.SLURM_INCOMPATIBLE_PMS)
- if self.enabled(Mvapich2.SLURM) in spec and has_slurm_incompatible_variants:
- raise RuntimeError(" %s : 'slurm' cannot be activated together with other process managers" % self.name)
+ # Check that slurm variant is not activated together with
+ # other pm variants
+ has_slurm_incompatible_variants = \
+ any(self.enabled(x) in spec
+ for x in Mvapich2.SLURM_INCOMPATIBLE_PMS)
+
+ if self.enabled(Mvapich2.SLURM) in spec and \
+ has_slurm_incompatible_variants:
+ raise RuntimeError(" %s : 'slurm' cannot be activated \
+ together with other process managers" % self.name)
process_manager_options = []
+ # See: http://slurm.schedmd.com/mpi_guide.html#mvapich2
if self.enabled(Mvapich2.SLURM) in spec:
- process_manager_options = [
- "--with-pm=slurm"
- ]
+ if self.version > Version('2.0'):
+ process_manager_options = [
+ "--with-pmi=pmi2",
+ "--with-pm=slurm"
+ ]
+ else:
+ process_manager_options = [
+ "--with-pmi=slurm",
+ "--with-pm=no"
+ ]
+
elif has_slurm_incompatible_variants:
pms = []
- # The variant name is equal to the process manager name in the configuration options
+ # The variant name is equal to the process manager name in
+ # the configuration options
for x in Mvapich2.SLURM_INCOMPATIBLE_PMS:
if self.enabled(x) in spec:
pms.append(x)
@@ -146,7 +184,9 @@ class Mvapich2(Package):
if self.enabled(x) in spec:
count += 1
if count > 1:
- raise RuntimeError('network variants are mutually exclusive (only one can be selected at a time)')
+ raise RuntimeError('network variants are mutually exclusive \
+ (only one can be selected at a time)')
+
network_options = []
# From here on I can suppose that only one variant has been selected
if self.enabled(Mvapich2.PSM) in spec:
@@ -164,7 +204,17 @@ class Mvapich2(Package):
configure_args.extend(network_options)
+ def setup_environment(self, spack_env, run_env):
+ if self.enabled(Mvapich2.SLURM) in self.spec and \
+ self.version > Version('2.0'):
+ run_env.set('SLURM_MPI_TYPE', 'pmi2')
+
def setup_dependent_environment(self, spack_env, run_env, extension_spec):
+ spack_env.set('MPICC', join_path(self.prefix.bin, 'mpicc'))
+ spack_env.set('MPICXX', join_path(self.prefix.bin, 'mpicxx'))
+ spack_env.set('MPIF77', join_path(self.prefix.bin, 'mpif77'))
+ spack_env.set('MPIF90', join_path(self.prefix.bin, 'mpif90'))
+
spack_env.set('MPICH_CC', spack_cc)
spack_env.set('MPICH_CXX', spack_cxx)
spack_env.set('MPICH_F77', spack_f77)
@@ -176,9 +226,21 @@ class Mvapich2(Package):
self.spec.mpicxx = join_path(self.prefix.bin, 'mpicxx')
self.spec.mpifc = join_path(self.prefix.bin, 'mpif90')
self.spec.mpif77 = join_path(self.prefix.bin, 'mpif77')
+ self.spec.mpicxx_shared_libs = [
+ join_path(self.prefix.lib, 'libmpicxx.{0}'.format(dso_suffix)),
+ join_path(self.prefix.lib, 'libmpi.{0}'.format(dso_suffix))
+ ]
def install(self, spec, prefix):
- # we'll set different configure flags depending on our environment
+ # Until we can pass variants such as +fortran through virtual
+ # dependencies depends_on('mpi'), require Fortran compiler to
+ # avoid delayed build errors in dependents.
+ if (self.compiler.f77 is None) or (self.compiler.fc is None):
+ raise InstallError('Mvapich2 requires both C and Fortran ',
+ 'compilers!')
+
+ # we'll set different configure flags depending on our
+ # environment
configure_args = [
"--prefix=%s" % prefix,
"--enable-shared",
@@ -208,7 +270,6 @@ class Mvapich2(Package):
self.filter_compilers()
-
def filter_compilers(self):
"""Run after install to make the MPI compilers use the
compilers that Spack built the package with.
@@ -218,18 +279,20 @@ class Mvapich2(Package):
be bound to whatever compiler they were built with.
"""
bin = self.prefix.bin
- mpicc = os.path.join(bin, 'mpicc')
- mpicxx = os.path.join(bin, 'mpicxx')
- mpif77 = os.path.join(bin, 'mpif77')
- mpif90 = os.path.join(bin, 'mpif90')
-
- spack_cc = os.environ['CC']
- spack_cxx = os.environ['CXX']
- spack_f77 = os.environ['F77']
- spack_fc = os.environ['FC']
-
- kwargs = { 'ignore_absent' : True, 'backup' : False, 'string' : True }
- filter_file('CC="%s"' % spack_cc , 'CC="%s"' % self.compiler.cc, mpicc, **kwargs)
- filter_file('CXX="%s"'% spack_cxx, 'CXX="%s"' % self.compiler.cxx, mpicxx, **kwargs)
- filter_file('F77="%s"'% spack_f77, 'F77="%s"' % self.compiler.f77, mpif77, **kwargs)
- filter_file('FC="%s"' % spack_fc , 'FC="%s"' % self.compiler.fc, mpif90, **kwargs)
+ mpicc = join_path(bin, 'mpicc')
+ mpicxx = join_path(bin, 'mpicxx')
+ mpif77 = join_path(bin, 'mpif77')
+ mpif90 = join_path(bin, 'mpif90')
+
+ # Substitute Spack compile wrappers for the real
+ # underlying compiler
+ kwargs = {'ignore_absent': True, 'backup': False, 'string': True}
+ filter_file(env['CC'], self.compiler.cc, mpicc, **kwargs)
+ filter_file(env['CXX'], self.compiler.cxx, mpicxx, **kwargs)
+ filter_file(env['F77'], self.compiler.f77, mpif77, **kwargs)
+ filter_file(env['FC'], self.compiler.fc, mpif90, **kwargs)
+
+ # Remove this linking flag if present
+ # (it turns RPATH into RUNPATH)
+ for wrapper in (mpicc, mpicxx, mpif77, mpif90):
+ filter_file('-Wl,--enable-new-dtags', '', wrapper, **kwargs)
diff --git a/var/spack/repos/builtin/packages/mxml/package.py b/var/spack/repos/builtin/packages/mxml/package.py
new file mode 100644
index 0000000000..29e3b27d6e
--- /dev/null
+++ b/var/spack/repos/builtin/packages/mxml/package.py
@@ -0,0 +1,50 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class Mxml(Package):
+ """Mini-XML is a small XML library that you can use to read and write XML
+ and XML-like data files in your application without requiring large
+ non-standard libraries.
+ """
+
+ homepage = "http://www.msweet.org"
+ url = "http://www.msweet.org/files/project3/mxml-2.9.tar.gz"
+
+ version('2.9', 'e21cad0f7aacd18f942aa0568a8dee19')
+ version('2.8', 'd85ee6d30de053581242c4a86e79a5d2')
+ version('2.7', '76f2ae49bf0f5745d5cb5d9507774dc9')
+ version('2.6', '68977789ae64985dddbd1a1a1652642e')
+ version('2.5', 'f706377fba630b39fa02fd63642b17e5')
+
+ # module swap PrgEnv-intel PrgEnv-$COMP
+ # (Can use whatever compiler you want to use)
+ # Case statement to change CC and CXX flags
+
+ def install(self, spec, prefix):
+ configure('--prefix=%s' % prefix, "--disable-shared", 'CFLAGS=-static')
+ make()
+ make("install")
diff --git a/var/spack/repos/builtin/packages/nag/package.py b/var/spack/repos/builtin/packages/nag/package.py
index 63269a50b1..66cb2a6a54 100644
--- a/var/spack/repos/builtin/packages/nag/package.py
+++ b/var/spack/repos/builtin/packages/nag/package.py
@@ -30,7 +30,7 @@ class Nag(Package):
"""The NAG Fortran Compiler."""
homepage = "http://www.nag.com/nagware/np.asp"
- version('6.1', '1e29d9d435b7ccc2842a320150b28ba4')
+ version('6.1', 'f49bd548e0d5e2458b2dabb3ee01341a')
version('6.0', '3fa1e7f7b51ef8a23e6c687cdcad9f96')
# Licensing
@@ -43,8 +43,8 @@ class Nag(Package):
def url_for_version(self, version):
# TODO: url and checksum are architecture dependent
# TODO: We currently only support x86_64
- return 'http://www.nag.com/downloads/impl/npl6a%sna_amd64.tgz' % \
- str(version).replace('.', '')
+ url = 'http://www.nag.com/downloads/impl/npl6a{0}na_amd64.tgz'
+ return url.format(version.joined)
def install(self, spec, prefix):
# Set installation directories
diff --git a/var/spack/repos/builtin/packages/nano/package.py b/var/spack/repos/builtin/packages/nano/package.py
new file mode 100644
index 0000000000..3e87ec8ffe
--- /dev/null
+++ b/var/spack/repos/builtin/packages/nano/package.py
@@ -0,0 +1,40 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class Nano(Package):
+ """Tiny little text editor"""
+
+ homepage = "http://www.nano-editor.org"
+ url = "https://www.nano-editor.org/dist/v2.6/nano-2.6.3.tar.gz"
+
+ version('2.6.3', '1213c7f17916e65afefc95054c1f90f9')
+ version('2.6.2', '58568a4b8a33841d774c25f285fc11c1')
+
+ def install(self, spec, prefix):
+ configure("--prefix=%s" % prefix)
+ make()
+ make('install')
diff --git a/var/spack/repos/builtin/packages/nasm/package.py b/var/spack/repos/builtin/packages/nasm/package.py
index c955e6d13e..9faccccaae 100644
--- a/var/spack/repos/builtin/packages/nasm/package.py
+++ b/var/spack/repos/builtin/packages/nasm/package.py
@@ -24,6 +24,7 @@
##############################################################################
from spack import *
+
class Nasm(Package):
"""NASM (Netwide Assembler) is an 80x86 assembler designed for
portability and modularity. It includes a disassembler as well."""
diff --git a/var/spack/repos/builtin/packages/nauty/package.py b/var/spack/repos/builtin/packages/nauty/package.py
new file mode 100644
index 0000000000..0d5eed251b
--- /dev/null
+++ b/var/spack/repos/builtin/packages/nauty/package.py
@@ -0,0 +1,89 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+
+import shutil
+from spack import *
+
+
+class Nauty(Package):
+ """nauty and Traces are programs for computing automorphism groups of
+ graphsq and digraphs"""
+ homepage = "http://pallini.di.uniroma1.it/index.html"
+ url = "http://pallini.di.uniroma1.it/nauty26r7.tar.gz"
+
+ version('2.6r7', 'b2b18e03ea7698db3fbe06c5d76ad8fe')
+ version('2.6r5', '91b03a7b069962e94fc9aac8831ce8d2')
+ version('2.5r9', 'e8ecd08b0892a1fb13329c147f08de6d')
+
+ def url_for_version(self, version):
+ url = "http://pallini.di.uniroma1.it/nauty{0}.tar.gz"
+ return url.format(version.joined)
+
+ def install(self, spec, prefix):
+ configure('--prefix=%s' % prefix)
+ make()
+
+ exes = [
+ "NRswitchg",
+ "addedgeg",
+ "amtog",
+ "biplabg",
+ "catg",
+ "complg",
+ "converseg",
+ "copyg",
+ "countg",
+ "cubhamg",
+ "deledgeg",
+ "delptg",
+ "directg",
+ "dreadnaut",
+ "dretodot",
+ "dretog",
+ "genbg",
+ "genbgL",
+ "geng",
+ "genquarticg",
+ "genrang",
+ "genspecialg",
+ "gentourng",
+ "gentreeg",
+ "hamheuristic",
+ "labelg",
+ "linegraphg",
+ "listg",
+ "multig",
+ "newedgeg",
+ "pickg",
+ "planarg",
+ "ranlabg",
+ "shortg",
+ "subdivideg",
+ "twohamg",
+ "vcolg",
+ "watercluster2"]
+ mkdirp(prefix.bin)
+ for exe in exes:
+ shutil.copyfile(exe, join_path(prefix.bin, exe))
diff --git a/var/spack/repos/builtin/packages/nccmp/package.py b/var/spack/repos/builtin/packages/nccmp/package.py
index 68bddd6957..d59ca09381 100644
--- a/var/spack/repos/builtin/packages/nccmp/package.py
+++ b/var/spack/repos/builtin/packages/nccmp/package.py
@@ -24,6 +24,7 @@
##############################################################################
from spack import *
+
class Nccmp(Package):
"""Compare NetCDF Files"""
homepage = "http://nccmp.sourceforge.net/"
diff --git a/var/spack/repos/builtin/packages/ncdu/package.py b/var/spack/repos/builtin/packages/ncdu/package.py
index 0f2f9cda45..0842a592cc 100644
--- a/var/spack/repos/builtin/packages/ncdu/package.py
+++ b/var/spack/repos/builtin/packages/ncdu/package.py
@@ -24,9 +24,9 @@
##############################################################################
from spack import *
+
class Ncdu(Package):
- """
- Ncdu is a disk usage analyzer with an ncurses interface. It is designed
+ """Ncdu is a disk usage analyzer with an ncurses interface. It is designed
to find space hogs on a remote server where you don't have an entire
gaphical setup available, but it is a useful tool even on regular desktop
systems. Ncdu aims to be fast, simple and easy to use, and should be able
@@ -38,15 +38,15 @@ class Ncdu(Package):
version('1.11', '9e44240a5356b029f05f0e70a63c4d12')
version('1.10', '7535decc8d54eca811493e82d4bfab2d')
- version('1.9' , '93258079db897d28bb8890e2db89b1fb')
- version('1.8' , '94d7a821f8a0d7ba8ef3dd926226f7d5')
- version('1.7' , '172047c29d232724cc62e773e82e592a')
+ version('1.9', '93258079db897d28bb8890e2db89b1fb')
+ version('1.8', '94d7a821f8a0d7ba8ef3dd926226f7d5')
+ version('1.7', '172047c29d232724cc62e773e82e592a')
depends_on("ncurses")
def install(self, spec, prefix):
configure('--prefix=%s' % prefix,
- '--with-ncurses=%s' % spec['ncurses'])
+ '--with-ncurses=%s' % spec['ncurses'])
make()
make("install")
diff --git a/var/spack/repos/builtin/packages/ncl/package.py b/var/spack/repos/builtin/packages/ncl/package.py
new file mode 100644
index 0000000000..7d31c7a8f7
--- /dev/null
+++ b/var/spack/repos/builtin/packages/ncl/package.py
@@ -0,0 +1,233 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+import os
+import shutil
+import tempfile
+
+
+class Ncl(Package):
+ """NCL is an interpreted language designed specifically for
+ scientific data analysis and visualization. Supports NetCDF 3/4,
+ GRIB 1/2, HDF 4/5, HDF-EOD 2/5, shapefile, ASCII, binary.
+ Numerous analysis functions are built-in."""
+
+ homepage = "https://www.ncl.ucar.edu"
+
+ version('6.3.0', '4834df63d3b56778441246303ab921c4',
+ url='https://www.earthsystemgrid.org/download/fileDownload.html?'
+ 'logicalFileId=bec58cb3-cd9b-11e4-bb80-00c0f03d5b7c',
+ extension='tar.gz')
+ patch('spack_ncl.patch')
+
+ # This installation script is implemented according to this manual:
+ # http://www.ncl.ucar.edu/Download/build_from_src.shtml
+
+ variant('hdf4', default=False, description='Enable HDF4 support.')
+ variant('gdal', default=False, description='Enable GDAL support.')
+ variant('triangle', default=True, description='Enable Triangle support.')
+ variant('udunits2', default=True, description='Enable UDUNITS-2 support.')
+ variant('openmp', default=True, description='Enable OpenMP support.')
+
+ # Non-optional dependencies according to the manual:
+ depends_on('jpeg')
+ depends_on('netcdf')
+ depends_on('cairo')
+
+ # Also, the manual says that ncl requires zlib, but that comes as a
+ # mandatory dependency of libpng, which is a mandatory dependency of cairo.
+
+ # In Spack, we do not have an option to compile netcdf without netcdf-4
+ # support, so we will tell the ncl configuration script that we want
+ # support for netcdf-4, but the script assumes that hdf5 is compiled with
+ # szip support. We introduce this restriction with the following dependency
+ # statement.
+ depends_on('hdf5@:1.8+szip')
+
+ # In Spack, we also do not have an option to compile netcdf without DAP
+ # support, so we will tell the ncl configuration script that we have it.
+
+ # Some of the optional dependencies according to the manual:
+ depends_on('hdf', when='+hdf4')
+ depends_on('gdal', when='+gdal')
+ depends_on('udunits2', when='+udunits2')
+
+ # We need src files of triangle to appear in ncl's src tree if we want
+ # triangle's features.
+ resource(
+ name='triangle',
+ url='http://www.netlib.org/voronoi/triangle.zip',
+ md5='10aff8d7950f5e0e2fb6dd2e340be2c9',
+ placement='triangle_src',
+ when='+triangle')
+
+ def install(self, spec, prefix):
+
+ if (self.compiler.fc is None) or (self.compiler.cc is None):
+ raise InstallError('NCL package requires both '
+ 'C and Fortran compilers.')
+
+ self.prepare_site_config()
+ self.prepare_install_config()
+ self.prepare_src_tree()
+ make('Everything', parallel=False)
+
+ def setup_environment(self, spack_env, run_env):
+ run_env.set('NCARG_ROOT', self.spec.prefix)
+
+ def prepare_site_config(self):
+ fc_flags = []
+ cc_flags = []
+ c2f_flags = []
+
+ if '+openmp' in self.spec:
+ fc_flags.append(self.compiler.openmp_flag)
+ cc_flags.append(self.compiler.openmp_flag)
+
+ if self.compiler.name == 'gcc':
+ fc_flags.append('-fno-range-check')
+ c2f_flags.extend(['-lgfortran'])
+ elif self.compiler.name == 'intel':
+ fc_flags.append('-fp-model precise')
+ cc_flags.append('-fp-model precise')
+ c2f_flags.extend(['-lifcore', '-lifport'])
+
+ with open('./config/Spack', 'w') as f:
+ f.writelines([
+ '#define HdfDefines\n',
+ '#define CppCommand \'/usr/bin/env cpp -traditional\'\n',
+ '#define CCompiler cc\n',
+ '#define FCompiler fc\n',
+ ('#define CtoFLibraries ' + ' '.join(c2f_flags) + '\n'
+ if len(c2f_flags) > 0
+ else ''),
+ ('#define CtoFLibrariesUser ' + ' '.join(c2f_flags) + '\n'
+ if len(c2f_flags) > 0
+ else ''),
+ ('#define CcOptions ' + ' '.join(cc_flags) + '\n'
+ if len(cc_flags) > 0
+ else ''),
+ ('#define FcOptions ' + ' '.join(fc_flags) + '\n'
+ if len(fc_flags) > 0
+ else ''),
+ '#define BuildShared NO'
+ ])
+
+ def prepare_install_config(self):
+ # Remove the results of the previous configuration attempts.
+ self.delete_files('./Makefile', './config/Site.local')
+
+ # Generate an array of answers that will be passed to the interactive
+ # configuration script.
+ config_answers = [
+ # Enter Return to continue
+ '\n',
+ # Build NCL?
+ 'y\n',
+ # Parent installation directory :
+ '\'' + self.spec.prefix + '\'\n',
+ # System temp space directory :
+ '\'' + tempfile.mkdtemp(prefix='ncl_ncar_') + '\'\n',
+ # Build NetCDF4 feature support (optional)?
+ 'y\n'
+ ]
+
+ if '+hdf4' in self.spec:
+ config_answers.extend([
+ # Build HDF4 support (optional) into NCL?
+ 'y\n',
+ # Also build HDF4 support (optional) into raster library?
+ 'y\n',
+ # Did you build HDF4 with szip support?
+ 'y\n' if self.spec.satisfies('^hdf+szip') else 'n\n'
+ ])
+ else:
+ config_answers.extend([
+ # Build HDF4 support (optional) into NCL?
+ 'n\n',
+ # Also build HDF4 support (optional) into raster library?
+ 'n\n'
+ ])
+
+ config_answers.extend([
+ # Build Triangle support (optional) into NCL
+ 'y\n' if '+triangle' in self.spec else 'n\n',
+ # If you are using NetCDF V4.x, did you enable NetCDF-4 support?
+ 'y\n',
+ # Did you build NetCDF with OPeNDAP support?
+ 'y\n',
+ # Build GDAL support (optional) into NCL?
+ 'y\n' if '+gdal' in self.spec else 'n\n',
+ # Build Udunits-2 support (optional) into NCL?
+ 'y\n' if '+uduints2' in self.spec else 'n\n',
+ # Build Vis5d+ support (optional) into NCL?
+ 'n\n',
+ # Build HDF-EOS2 support (optional) into NCL?
+ 'n\n',
+ # Build HDF5 support (optional) into NCL?
+ 'y\n',
+ # Build HDF-EOS5 support (optional) into NCL?
+ 'n\n',
+ # Build GRIB2 support (optional) into NCL?
+ 'n\n',
+ # Enter local library search path(s) :
+ # The paths will be passed by the Spack wrapper.
+ ' \n',
+ # Enter local include search path(s) :
+ # All other paths will be passed by the Spack wrapper.
+ '\'' + join_path(self.spec['freetype'].prefix.include,
+ 'freetype2') + '\'\n',
+ # Go back and make more changes or review?
+ 'n\n',
+ # Save current configuration?
+ 'y\n'
+ ])
+
+ config_answers_filename = 'spack-config.in'
+ config_script = Executable('./Configure')
+
+ with open(config_answers_filename, 'w') as f:
+ f.writelines(config_answers)
+
+ with open(config_answers_filename, 'r') as f:
+ config_script(input=f)
+
+ def prepare_src_tree(self):
+ if '+triangle' in self.spec:
+ triangle_src = join_path(self.stage.source_path, 'triangle_src')
+ triangle_dst = join_path(self.stage.source_path, 'ni', 'src',
+ 'lib', 'hlu')
+ shutil.copy(join_path(triangle_src, 'triangle.h'), triangle_dst)
+ shutil.copy(join_path(triangle_src, 'triangle.c'), triangle_dst)
+
+ @staticmethod
+ def delete_files(*filenames):
+ for filename in filenames:
+ if os.path.exists(filename):
+ try:
+ os.remove(filename)
+ except OSError, e:
+ raise InstallError('Failed to delete file %s: %s' % (
+ e.filename, e.strerror))
diff --git a/var/spack/repos/builtin/packages/ncl/spack_ncl.patch b/var/spack/repos/builtin/packages/ncl/spack_ncl.patch
new file mode 100644
index 0000000000..ebbecc43ba
--- /dev/null
+++ b/var/spack/repos/builtin/packages/ncl/spack_ncl.patch
@@ -0,0 +1,30 @@
+--- a/config/ymake 2015-03-16 22:21:42.000000000 +0100
++++ b/config/ymake 2016-10-14 13:44:49.530646098 +0200
+@@ -537,0 +538,3 @@
++# We want to have our own definitions for spack
++set sysincs = Spack
++
+--- a/Configure 2015-03-16 22:22:17.000000000 +0100
++++ b/Configure 2016-10-14 13:49:42.157631106 +0200
+@@ -1137,5 +1137,13 @@
+- if (! -d $incs[1]) then
+- echo " *** Warning: <$incs[1]> does not exist"
+- echo ""
+- goto proc_locincdir
+- else
++
++ # We don't want our path(s) to be preprocessed by cpp
++ # inside ymake script. That is why we pass them in quotes (')
++ # to this script. But if we do so, the following condition
++ # is always false. That is why we comment it out and promise
++ # to pass only correct path(s). You might want to do the same
++ # thing for the libraries search path(s).
++
++ # if (! -d $incs[1]) then
++ # echo " *** Warning: <$incs[1]> does not exist"
++ # echo ""
++ # goto proc_locincdir
++ # else
+@@ -1143 +1151 @@
+- endif
++ # endif
diff --git a/var/spack/repos/builtin/packages/nco/package.py b/var/spack/repos/builtin/packages/nco/package.py
index 4bc4da68e3..acd96f5e9c 100644
--- a/var/spack/repos/builtin/packages/nco/package.py
+++ b/var/spack/repos/builtin/packages/nco/package.py
@@ -23,32 +23,33 @@
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
from spack import *
-import os
-class Nco(Package):
+
+class Nco(AutotoolsPackage):
"""The NCO toolkit manipulates and analyzes data stored in
netCDF-accessible formats"""
- homepage = "https://sourceforge.net/projects/nco"
- url = "https://github.com/nco/nco/archive/4.5.5.tar.gz"
+ homepage = "http://nco.sourceforge.net/"
+ url = "https://github.com/nco/nco/archive/4.6.2.tar.gz"
+ version('4.6.3', '0e1d6616c65ed3a30c54cc776da4f987')
+ version('4.6.2', 'b7471acf0cc100343392f4171fb56113')
+ version('4.6.1', 'ef43cc989229c2790a9094bd84728fd8')
version('4.5.5', '9f1f1cb149ad6407c5a03c20122223ce')
+ variant('doc', default=False, description='Build/install NCO TexInfo-based documentation')
+
# See "Compilation Requirements" at:
# http://nco.sourceforge.net/#bld
-
depends_on('netcdf')
- depends_on('antlr@2.7.7+cxx') # (required for ncap2)
- depends_on('gsl') # (desirable for ncap2)
- depends_on('udunits2') # (allows dimensional unit transformations)
- # depends_on('opendap') # (enables network transparency),
-
- def install(self, spec, prefix):
- opts = [
- '--prefix=%s' % prefix,
- '--disable-openmp', # TODO: Make this a variant
- '--disable-dap', # TODO: Make this a variant
- '--disable-esmf']
- configure(*opts)
- make()
- make("install")
+ depends_on('antlr@2.7.7+cxx') # required for ncap2
+ depends_on('gsl') # desirable for ncap2
+ depends_on('udunits2') # allows dimensional unit transformations
+
+ depends_on('flex', type='build')
+ depends_on('bison', type='build')
+ depends_on('texinfo@4.12:', type='build', when='+doc')
+
+ def configure_args(self):
+ spec = self.spec
+ return ['--{0}-doc'.format('enable' if '+doc' in spec else 'disable')]
diff --git a/var/spack/repos/builtin/packages/ncurses/package.py b/var/spack/repos/builtin/packages/ncurses/package.py
index 3ab2b0477d..e4fd5bf269 100644
--- a/var/spack/repos/builtin/packages/ncurses/package.py
+++ b/var/spack/repos/builtin/packages/ncurses/package.py
@@ -24,11 +24,14 @@
##############################################################################
from spack import *
+
class Ncurses(Package):
- """The ncurses (new curses) library is a free software emulation of curses
- in System V Release 4.0, and more. It uses terminfo format, supports pads and
- color and multiple highlights and forms characters and function-key mapping,
- and has all the other SYSV-curses enhancements over BSD curses.
+ """The ncurses (new curses) library is a free software emulation of
+ curses in System V Release 4.0, and more. It uses terminfo format,
+ supports pads and color and multiple highlights and forms
+ characters and function-key mapping, and has all the other
+ SYSV-curses enhancements over BSD curses.
+
"""
homepage = "http://invisible-island.net/ncurses/ncurses.html"
@@ -47,7 +50,10 @@ class Ncurses(Package):
"--enable-widec",
"--enable-overwrite",
"--disable-lib-suffixes",
- "--without-ada"]
+ "--without-ada",
+ "--enable-pc-files",
+ "--with-pkg-config-libdir={0}/lib/pkgconfig".format(prefix)
+ ]
configure(*opts)
make()
make("install")
diff --git a/var/spack/repos/builtin/packages/ncview/package.py b/var/spack/repos/builtin/packages/ncview/package.py
index b39e17ca49..5c3cf300d2 100644
--- a/var/spack/repos/builtin/packages/ncview/package.py
+++ b/var/spack/repos/builtin/packages/ncview/package.py
@@ -24,6 +24,7 @@
##############################################################################
from spack import *
+
class Ncview(Package):
"""Simple viewer for NetCDF files."""
homepage = "http://meteora.ucsd.edu/~pierce/ncview_home_page.html"
@@ -31,14 +32,13 @@ class Ncview(Package):
version('2.1.7', 'debd6ca61410aac3514e53122ab2ba07')
- depends_on("netcdf")
- depends_on("udunits2")
-
- # OS Dependencies
- # Ubuntu: apt-get install libxaw7-dev
- # CentOS 7: yum install libXaw-devel
+ depends_on('netcdf')
+ depends_on('udunits2')
+ depends_on('libpng')
+ depends_on('libxaw')
def install(self, spec, prefix):
- configure('--prefix=%s' % prefix)
+ configure('--prefix={0}'.format(prefix))
+
make()
- make("install")
+ make('install')
diff --git a/var/spack/repos/builtin/packages/ndiff/package.py b/var/spack/repos/builtin/packages/ndiff/package.py
index 3c9dd4054a..dc41add03f 100644
--- a/var/spack/repos/builtin/packages/ndiff/package.py
+++ b/var/spack/repos/builtin/packages/ndiff/package.py
@@ -24,11 +24,15 @@
##############################################################################
from spack import *
+
class Ndiff(Package):
- """The ndiff tool is a binary utility that compares putatively similar files
- while ignoring small numeric differernces. This utility is most often used
- to compare files containing a lot of floating-point numeric data that
- may be slightly different due to numeric error."""
+ """The ndiff tool is a binary utility that compares putatively similar
+ files while ignoring small numeric differernces. This utility is
+ most often used to compare files containing a lot of
+ floating-point numeric data that may be slightly different due to
+ numeric error.
+
+ """
homepage = "http://ftp.math.utah.edu/pub/ndiff/"
url = "http://ftp.math.utah.edu/pub/ndiff/ndiff-2.00.tar.gz"
diff --git a/var/spack/repos/builtin/packages/netcdf-cxx/package.py b/var/spack/repos/builtin/packages/netcdf-cxx/package.py
index 994c51c0da..2ad710fc45 100644
--- a/var/spack/repos/builtin/packages/netcdf-cxx/package.py
+++ b/var/spack/repos/builtin/packages/netcdf-cxx/package.py
@@ -24,7 +24,8 @@
##############################################################################
from spack import *
-class NetcdfCxx(Package):
+
+class NetcdfCxx(AutotoolsPackage):
"""Deprecated C++ compatibility bindings for NetCDF.
These do NOT read or write NetCDF-4 files, and are no longer
maintained by Unidata. Developers should migrate to current
@@ -36,8 +37,3 @@ class NetcdfCxx(Package):
version('4.2', 'd32b20c00f144ae6565d9e98d9f6204c')
depends_on('netcdf')
-
- def install(self, spec, prefix):
- configure('--prefix=%s' % prefix)
- make()
- make("install")
diff --git a/var/spack/repos/builtin/packages/netcdf-cxx4/package.py b/var/spack/repos/builtin/packages/netcdf-cxx4/package.py
index f8af76429b..2da30c7b0c 100644
--- a/var/spack/repos/builtin/packages/netcdf-cxx4/package.py
+++ b/var/spack/repos/builtin/packages/netcdf-cxx4/package.py
@@ -25,7 +25,7 @@
from spack import *
-class NetcdfCxx4(Package):
+class NetcdfCxx4(AutotoolsPackage):
"""C++ interface for NetCDF4"""
homepage = "http://www.unidata.ucar.edu/software/netcdf"
url = "https://www.github.com/unidata/netcdf-cxx4/tarball/v4.3.0"
@@ -34,11 +34,8 @@ class NetcdfCxx4(Package):
version('4.2.1', 'd019853802092cf686254aaba165fc81')
depends_on('netcdf')
- depends_on("autoconf")
+ depends_on('autoconf', type='build')
- def install(self, spec, prefix):
+ def autoreconf(self, spec, prefix):
# Rebuild to prevent problems of inconsistency in git repo
which('autoreconf')('-ivf')
- configure('--prefix=%s' % prefix)
- make()
- make("install")
diff --git a/var/spack/repos/builtin/packages/netcdf-fortran/package.py b/var/spack/repos/builtin/packages/netcdf-fortran/package.py
index 3d1951ceee..a2556d8783 100644
--- a/var/spack/repos/builtin/packages/netcdf-fortran/package.py
+++ b/var/spack/repos/builtin/packages/netcdf-fortran/package.py
@@ -24,17 +24,14 @@
##############################################################################
from spack import *
-class NetcdfFortran(Package):
+
+class NetcdfFortran(AutotoolsPackage):
"""Fortran interface for NetCDF4"""
homepage = "http://www.unidata.ucar.edu/software/netcdf"
url = "http://www.unidata.ucar.edu/downloads/netcdf/ftp/netcdf-fortran-4.4.3.tar.gz"
+ version('4.4.4', 'e855c789cd72e1b8bc1354366bf6ac72')
version('4.4.3', 'bfd4ae23a34635b273d3eb0d91cbde9e')
depends_on('netcdf')
-
- def install(self, spec, prefix):
- configure("--prefix=%s" % prefix)
- make()
- make("install")
diff --git a/var/spack/repos/builtin/packages/netcdf/package.py b/var/spack/repos/builtin/packages/netcdf/package.py
index c2256d5e9f..79a1be2090 100644
--- a/var/spack/repos/builtin/packages/netcdf/package.py
+++ b/var/spack/repos/builtin/packages/netcdf/package.py
@@ -25,64 +25,116 @@
from spack import *
-class Netcdf(Package):
- """NetCDF is a set of software libraries and self-describing, machine-independent
- data formats that support the creation, access, and sharing of array-oriented
- scientific data."""
+class Netcdf(AutotoolsPackage):
+ """NetCDF is a set of software libraries and self-describing,
+ machine-independent data formats that support the creation, access,
+ and sharing of array-oriented scientific data."""
homepage = "http://www.unidata.ucar.edu/software/netcdf"
url = "ftp://ftp.unidata.ucar.edu/pub/netcdf/netcdf-4.3.3.tar.gz"
- version('4.4.0', 'cffda0cbd97fdb3a06e9274f7aef438e')
- version('4.3.3', '5fbd0e108a54bd82cb5702a73f56d2ae')
-
- variant('mpi', default=True, description='Enables MPI parallelism')
- variant('hdf4', default=False, description='Enable HDF4 support')
-
- depends_on("m4")
+ version('4.4.1', '7843e35b661c99e1d49e60791d5072d8')
+ version('4.4.0', 'cffda0cbd97fdb3a06e9274f7aef438e')
+ version('4.3.3.1', '5c9dad3705a3408d27f696e5b31fb88c')
+ version('4.3.3', '5fbd0e108a54bd82cb5702a73f56d2ae')
+
+ variant('mpi', default=True, description='Enables MPI parallelism')
+ variant('hdf4', default=False, description='Enable HDF4 support')
+ variant('shared', default=True, description='Enable shared library')
+ variant('parallel-netcdf', default=False, description='Enable PnetCDF support')
+ variant('dap', default=False, description='Enable DAP support')
+ variant('cdmremote', default=False, description='Enable CDM Remote support')
+ # These variants control the number of dimensions (i.e. coordinates and
+ # attributes) and variables (e.g. time, entity ID, number of coordinates)
+ # that can be used in any particular NetCDF file.
+ variant('maxdims', default=1024,
+ description='Defines the maximum dimensions of NetCDF files.')
+ variant('maxvars', default=8192,
+ description='Defines the maximum variables of NetCDF files.')
+
+ depends_on("m4", type='build')
depends_on("hdf", when='+hdf4')
-
- # Required for DAP support
- depends_on("curl")
+ depends_on("curl@7.18.0:", when='+dap')
+ depends_on("curl@7.18.0:", when='+cdmremote')
+ depends_on('parallel-netcdf', when='@4.2.1.1:+parallel-netcdf')
# Required for NetCDF-4 support
- depends_on("zlib")
- depends_on("hdf5+mpi", when='+mpi')
- depends_on("hdf5~mpi", when='~mpi')
+ depends_on("zlib@1.2.5:")
+ depends_on('hdf5')
+
+ # NetCDF 4.4.0 and prior have compatibility issues with HDF5 1.10 and later
+ # https://github.com/Unidata/netcdf-c/issues/250
+ depends_on('hdf5@:1.8', when='@:4.4.0')
+
+ def patch(self):
+ try:
+ max_dims = int(self.spec.variants['maxdims'].value)
+ max_vars = int(self.spec.variants['maxvars'].value)
+ except (ValueError, TypeError):
+ raise TypeError('NetCDF variant values max[dims|vars] must be '
+ 'integer values.')
+
+ ff = FileFilter(join_path('include', 'netcdf.h'))
+ ff.filter(r'^(#define\s+NC_MAX_DIMS\s+)\d+(.*)$',
+ r'\1{0}\2'.format(max_dims))
+ ff.filter(r'^(#define\s+NC_MAX_VARS\s+)\d+(.*)$',
+ r'\1{0}\2'.format(max_vars))
+
+ def configure_args(self):
+ spec = self.spec
+ # Workaround until variant forwarding works properly
+ if '+mpi' in spec and spec.satisfies('^hdf5~mpi'):
+ raise RuntimeError('Invalid spec. Package netcdf requires '
+ 'hdf5+mpi, but spec asked for hdf5~mpi.')
- def install(self, spec, prefix):
# Environment variables
+ CFLAGS = []
CPPFLAGS = []
LDFLAGS = []
LIBS = []
config_args = [
- "--prefix=%s" % prefix,
"--enable-fsync",
"--enable-v2",
"--enable-utilities",
- "--enable-shared",
"--enable-static",
"--enable-largefile",
# necessary for HDF5 support
"--enable-netcdf-4",
"--enable-dynamic-loading",
- # necessary for DAP support
- "--enable-dap"
]
- # Make sure Netcdf links against Spack's curl
- # Otherwise it may pick up system's curl, which could lead to link errors:
- # /usr/lib/x86_64-linux-gnu/libcurl.so: undefined reference to `SSL_CTX_use_certificate_chain_file@OPENSSL_1.0.0'
- LIBS.append("-lcurl")
- CPPFLAGS.append("-I%s" % spec['curl'].prefix.include)
- LDFLAGS.append( "-L%s" % spec['curl'].prefix.lib)
+ if '+shared' in spec:
+ config_args.append('--enable-shared')
+ else:
+ config_args.append('--disable-shared')
+ # We don't have shared libraries but we still want it to be
+ # possible to use this library in shared builds
+ CFLAGS.append('-fPIC')
+
+ if '+dap' in spec:
+ config_args.append('--enable-dap')
+ else:
+ config_args.append('--disable-dap')
+
+ if '+cdmremote' in spec:
+ config_args.append('--enable-cdmremote')
+ else:
+ config_args.append('--disable-cdmremote')
+
+ if '+dap' in spec or '+cdmremote' in spec:
+ # Make sure Netcdf links against Spack's curl, otherwise it may
+ # pick up system's curl, which can give link errors, e.g.:
+ # undefined reference to `SSL_CTX_use_certificate_chain_file`
+ LIBS.append("-lcurl")
+ CPPFLAGS.append("-I%s" % spec['curl'].prefix.include)
+ LDFLAGS.append("-L%s" % spec['curl'].prefix.lib)
if '+mpi' in spec:
config_args.append('--enable-parallel4')
CPPFLAGS.append("-I%s/include" % spec['hdf5'].prefix)
- LDFLAGS.append( "-L%s/lib" % spec['hdf5'].prefix)
+ LDFLAGS.append("-L%s/lib" % spec['hdf5'].prefix)
# HDF4 support
# As of NetCDF 4.1.3, "--with-hdf4=..." is no longer a valid option
@@ -90,22 +142,32 @@ class Netcdf(Package):
if '+hdf4' in spec:
config_args.append("--enable-hdf4")
CPPFLAGS.append("-I%s/include" % spec['hdf'].prefix)
- LDFLAGS.append( "-L%s/lib" % spec['hdf'].prefix)
- LIBS.append( "-l%s" % "jpeg")
+ LDFLAGS.append("-L%s/lib" % spec['hdf'].prefix)
+ LIBS.append("-l%s" % "jpeg")
- if 'szip' in spec:
+ if '+szip' in spec:
CPPFLAGS.append("-I%s/include" % spec['szip'].prefix)
- LDFLAGS.append( "-L%s/lib" % spec['szip'].prefix)
- LIBS.append( "-l%s" % "sz")
+ LDFLAGS.append("-L%s/lib" % spec['szip'].prefix)
+ LIBS.append("-l%s" % "sz")
+
+ # PnetCDF support
+ if '+parallel-netcdf' in spec:
+ config_args.append('--enable-pnetcdf')
+ config_args.append('CC=%s' % spec['mpi'].mpicc)
+ CPPFLAGS.append("-I%s/include" % spec['parallel-netcdf'].prefix)
+ LDFLAGS.append("-L%s/lib" % spec['parallel-netcdf'].prefix)
# Fortran support
# In version 4.2+, NetCDF-C and NetCDF-Fortran have split.
# Use the netcdf-fortran package to install Fortran support.
+ config_args.append('CFLAGS=%s' % ' '.join(CFLAGS))
config_args.append('CPPFLAGS=%s' % ' '.join(CPPFLAGS))
config_args.append('LDFLAGS=%s' % ' '.join(LDFLAGS))
config_args.append('LIBS=%s' % ' '.join(LIBS))
- configure(*config_args)
- make()
- make("install")
+ return config_args
+
+ def check(self):
+ # h5_test fails when run in parallel
+ make('check', parallel=False)
diff --git a/var/spack/repos/builtin/packages/netgauge/package.py b/var/spack/repos/builtin/packages/netgauge/package.py
index be9292fabb..b57cdbe5f3 100644
--- a/var/spack/repos/builtin/packages/netgauge/package.py
+++ b/var/spack/repos/builtin/packages/netgauge/package.py
@@ -24,6 +24,7 @@
##############################################################################
from spack import *
+
class Netgauge(Package):
"""Netgauge is a high-precision network parameter measurement
tool. It supports benchmarking of many different network protocols
diff --git a/var/spack/repos/builtin/packages/netlib-lapack/package.py b/var/spack/repos/builtin/packages/netlib-lapack/package.py
index 47857eb713..4b03259cbd 100644
--- a/var/spack/repos/builtin/packages/netlib-lapack/package.py
+++ b/var/spack/repos/builtin/packages/netlib-lapack/package.py
@@ -1,4 +1,4 @@
-##############################################################################
+#############################################################################
# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
# Produced at the Lawrence Livermore National Laboratory.
#
@@ -26,15 +26,17 @@ from spack import *
class NetlibLapack(Package):
- """
- LAPACK version 3.X is a comprehensive FORTRAN library that does linear algebra operations including matrix
- inversions, least squared solutions to linear sets of equations, eigenvector analysis, singular value
- decomposition, etc. It is a very comprehensive and reputable package that has found extensive use in the
- scientific community.
+ """LAPACK version 3.X is a comprehensive FORTRAN library that does
+ linear algebra operations including matrix inversions, least squared
+ solutions to linear sets of equations, eigenvector analysis, singular
+ value decomposition, etc. It is a very comprehensive and reputable
+ package that has found extensive use in the scientific community.
+
"""
homepage = "http://www.netlib.org/lapack/"
url = "http://www.netlib.org/lapack/lapack-3.5.0.tgz"
+ version('3.6.1', '421b2cb72e15f237e144428f9c460ee0')
version('3.6.0', 'f2f6c67134e851fe189bb3ca1fbb5101')
version('3.5.0', 'b1d3e3e425b2e44a06760ff173104bdf')
version('3.4.2', '61bf1a8a4469d4bdb7604f5897179478')
@@ -44,37 +46,64 @@ class NetlibLapack(Package):
variant('debug', default=False, description='Activates the Debug build type')
variant('shared', default=True, description="Build shared library version")
- variant('external-blas', default=False, description='Build lapack with an external blas')
+ variant('external-blas', default=False,
+ description='Build lapack with an external blas')
- variant('lapacke', default=True, description='Activates the build of the LAPACKE C interface')
+ variant('lapacke', default=True,
+ description='Activates the build of the LAPACKE C interface')
# virtual dependency
provides('blas', when='~external-blas')
provides('lapack')
- depends_on('cmake')
+ depends_on('cmake', type='build')
depends_on('blas', when='+external-blas')
-
def patch(self):
# Fix cblas CMakeLists.txt -- has wrong case for subdirectory name.
if self.spec.satisfies('@3.6.0:'):
- filter_file('${CMAKE_CURRENT_SOURCE_DIR}/CMAKE/',
- '${CMAKE_CURRENT_SOURCE_DIR}/cmake/', 'CBLAS/CMakeLists.txt', string=True)
+ filter_file(
+ '${CMAKE_CURRENT_SOURCE_DIR}/CMAKE/',
+ '${CMAKE_CURRENT_SOURCE_DIR}/cmake/',
+ 'CBLAS/CMakeLists.txt', string=True)
+
+ @property
+ def blas_libs(self):
+ shared = True if '+shared' in self.spec else False
+ return find_libraries(
+ ['libblas'], root=self.prefix, shared=shared, recurse=True
+ )
+
+ @property
+ def lapack_libs(self):
+ shared = True if '+shared' in self.spec else False
+ return find_libraries(
+ ['liblapack'], root=self.prefix, shared=shared, recurse=True
+ )
def install_one(self, spec, prefix, shared):
- cmake_args = ['-DBUILD_SHARED_LIBS:BOOL=%s' % ('ON' if shared else 'OFF'),
- '-DCMAKE_BUILD_TYPE:STRING=%s' % ('Debug' if '+debug' in spec else 'Release'),
- '-DLAPACKE:BOOL=%s' % ('ON' if '+lapacke' in spec else 'OFF')]
+ cmake_args = [
+ '-DBUILD_SHARED_LIBS:BOOL=%s' % ('ON' if shared else 'OFF'),
+ '-DCMAKE_BUILD_TYPE:STRING=%s' % (
+ 'Debug' if '+debug' in spec else 'Release'),
+ '-DLAPACKE:BOOL=%s' % ('ON' if '+lapacke' in spec else 'OFF')]
if spec.satisfies('@3.6.0:'):
- cmake_args.extend(['-DCBLAS=ON']) # always build CBLAS
+ cmake_args.extend(['-DCBLAS=ON']) # always build CBLAS
+
+ if self.compiler.name == 'intel':
+ # Intel compiler finds serious syntax issues when trying to
+ # build CBLAS and LapackE
+ cmake_args.extend(['-DCBLAS=OFF'])
+ cmake_args.extend(['-DLAPACKE:BOOL=OFF'])
+
+ # deprecated routines are commonly need by, for example, suitesparse
+ # Note that OpenBLAS spack is built with deprecated routines
+ cmake_args.extend(['-DBUILD_DEPRECATED:BOOL=ON'])
if '+external-blas' in spec:
- # TODO : the mechanism to specify the library should be more general,
- # TODO : but this allows to have an hook to an external blas
cmake_args.extend([
'-DUSE_OPTIMIZED_BLAS:BOOL=ON',
- '-DBLAS_LIBRARIES:PATH=%s' % join_path(spec['blas'].prefix.lib, 'libblas.a')
+ '-DBLAS_LIBRARIES:PATH=%s' % spec['blas'].blas_libs.joined(';')
])
cmake_args.extend(std_cmake_args)
@@ -85,7 +114,6 @@ class NetlibLapack(Package):
make()
make("install")
-
def install(self, spec, prefix):
# Always build static libraries.
self.install_one(spec, prefix, False)
@@ -93,16 +121,3 @@ class NetlibLapack(Package):
# Build shared libraries if requested.
if '+shared' in spec:
self.install_one(spec, prefix, True)
-
-
- def setup_dependent_package(self, module, dspec):
- # This is WIP for a prototype interface for virtual packages.
- # We can update this as more builds start depending on BLAS/LAPACK.
- libdir = find_library_path('libblas.a', self.prefix.lib64, self.prefix.lib)
-
- self.spec.blas_static_lib = join_path(libdir, 'libblas.a')
- self.spec.lapack_static_lib = join_path(libdir, 'liblapack.a')
-
- if '+shared' in self.spec:
- self.spec.blas_shared_lib = join_path(libdir, 'libblas.%s' % dso_suffix)
- self.spec.lapack_shared_lib = join_path(libdir, 'liblapack.%s' % dso_suffix)
diff --git a/var/spack/repos/builtin/packages/netlib-scalapack/package.py b/var/spack/repos/builtin/packages/netlib-scalapack/package.py
index f7fe26a42d..578d2d8988 100644
--- a/var/spack/repos/builtin/packages/netlib-scalapack/package.py
+++ b/var/spack/repos/builtin/packages/netlib-scalapack/package.py
@@ -25,11 +25,14 @@
from spack import *
import sys
+
class NetlibScalapack(Package):
- """ScaLAPACK is a library of high-performance linear algebra routines for parallel distributed memory machines"""
+ """ScaLAPACK is a library of high-performance linear algebra routines for
+ parallel distributed memory machines
+ """
homepage = "http://www.netlib.org/scalapack/"
- url = "http://www.netlib.org/scalapack/scalapack-2.0.2.tgz"
+ url = "http://www.netlib.org/scalapack/scalapack-2.0.2.tgz"
version('2.0.2', '2f75e600a2ba155ed9ce974a1c4b536f')
version('2.0.1', '17b8cde589ea0423afe1ec43e7499161')
@@ -37,21 +40,48 @@ class NetlibScalapack(Package):
# versions before 2.0.0 are not using cmake and requires blacs as
# a separated package
- variant('shared', default=True, description='Build the shared library version')
- variant('fpic', default=False, description="Build with -fpic compiler option")
+ variant(
+ 'shared',
+ default=True,
+ description='Build the shared library version'
+ )
+ variant(
+ 'fpic',
+ default=False,
+ description='Build with -fpic compiler option'
+ )
provides('scalapack')
- depends_on('cmake')
depends_on('mpi')
depends_on('lapack')
+ depends_on('blas')
+ depends_on('cmake', when='@2.0.0:', type='build')
+
+ @property
+ def scalapack_libs(self):
+ shared = True if '+shared' in self.spec else False
+ return find_libraries(
+ ['libscalapack'], root=self.prefix, shared=shared, recurse=True
+ )
def install(self, spec, prefix):
options = [
- "-DBUILD_SHARED_LIBS:BOOL=%s" % ('ON' if '+shared' in spec else 'OFF'),
- "-DBUILD_STATIC_LIBS:BOOL=%s" % ('OFF' if '+shared' in spec else 'ON'),
- "-DUSE_OPTIMIZED_LAPACK_BLAS:BOOL=ON", # forces scalapack to use find_package(LAPACK)
- ]
+ "-DBUILD_SHARED_LIBS:BOOL=%s" % ('ON' if '+shared' in spec else
+ 'OFF'),
+ "-DBUILD_STATIC_LIBS:BOOL=%s" % ('OFF' if '+shared' in spec else
+ 'ON')
+ ]
+
+ # Make sure we use Spack's Lapack:
+ blas = spec['blas'].blas_libs
+ lapack = spec['lapack'].lapack_libs
+ options.extend([
+ '-DLAPACK_FOUND=true',
+ '-DLAPACK_INCLUDE_DIRS=%s' % spec['lapack'].prefix.include,
+ '-DLAPACK_LIBRARIES=%s' % (lapack.joined(';')),
+ '-DBLAS_LIBRARIES=%s' % (blas.joined(';'))
+ ])
if '+fpic' in spec:
options.extend([
@@ -66,16 +96,6 @@ class NetlibScalapack(Package):
make()
make("install")
- # The shared libraries are not installed correctly on Darwin; correct this
+ # The shared libraries are not installed correctly on Darwin:
if (sys.platform == 'darwin') and ('+shared' in spec):
fix_darwin_install_name(prefix.lib)
-
-
- def setup_dependent_package(self, module, dependent_spec):
- spec = self.spec
- lib_dsuffix = '.dylib' if sys.platform == 'darwin' else '.so'
- lib_suffix = lib_dsuffix if '+shared' in spec else '.a'
-
- spec.fc_link = '-L%s -lscalapack' % spec.prefix.lib
- spec.cc_link = spec.fc_link
- spec.libraries = [join_path(spec.prefix.lib, 'libscalapack%s' % lib_suffix)]
diff --git a/var/spack/repos/builtin/packages/nettle/package.py b/var/spack/repos/builtin/packages/nettle/package.py
index 56e4836611..7e2b758bc0 100644
--- a/var/spack/repos/builtin/packages/nettle/package.py
+++ b/var/spack/repos/builtin/packages/nettle/package.py
@@ -24,18 +24,24 @@
##############################################################################
from spack import *
+
class Nettle(Package):
"""The Nettle package contains the low-level cryptographic library
that is designed to fit easily in many contexts."""
- homepage = "http://www.example.com"
+ homepage = "https://www.lysator.liu.se/~nisse/nettle/"
url = "http://ftp.gnu.org/gnu/nettle/nettle-2.7.1.tar.gz"
+ version('3.2', 'afb15b4764ebf1b4e6d06c62bd4d29e4')
version('2.7', '2caa1bd667c35db71becb93c5d89737f')
depends_on('gmp')
+ depends_on('m4', type='build')
def install(self, spec, prefix):
- configure("--prefix=%s" % prefix)
+ configure('--prefix={0}'.format(prefix))
+
make()
- make("install")
+ if self.run_tests:
+ make('check')
+ make('install')
diff --git a/var/spack/repos/builtin/packages/nextflow/package.py b/var/spack/repos/builtin/packages/nextflow/package.py
new file mode 100644
index 0000000000..54f3dbf915
--- /dev/null
+++ b/var/spack/repos/builtin/packages/nextflow/package.py
@@ -0,0 +1,42 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class Nextflow(Package):
+ """Data-driven computational pipelines"""
+
+ homepage = "http://www.nextflow.io"
+
+ version('0.20.1', '0e4e0e3eca1c2c97f9b4bffd944b923a',
+ url='https://github.com/nextflow-io/nextflow/releases/download/v0.20.1/nextflow',
+ expand=False)
+
+ depends_on('jdk')
+
+ def install(self, spec, prefix):
+ mkdirp(prefix.bin)
+ install("nextflow", join_path(prefix.bin, "nextflow"))
+ set_executable(join_path(prefix.bin, "nextflow"))
diff --git a/var/spack/repos/builtin/packages/nfft/package.py b/var/spack/repos/builtin/packages/nfft/package.py
new file mode 100644
index 0000000000..4c1ce2b03d
--- /dev/null
+++ b/var/spack/repos/builtin/packages/nfft/package.py
@@ -0,0 +1,60 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class Nfft(AutotoolsPackage):
+ """NFFT is a C subroutine library for computing the nonequispaced discrete
+ Fourier transform (NDFT) in one or more dimensions, of arbitrary input
+ size, and of complex data."""
+
+ homepage = "https://www-user.tu-chemnitz.de/~potts/nfft"
+ url = "https://www-user.tu-chemnitz.de/~potts/nfft/download/nfft-3.3.2.tar.gz"
+
+ version('3.3.2', '550737c06f4d6ea6c156800169d8f0d9')
+
+ depends_on('fftw')
+
+ def install(self, spec, prefix):
+ options = ['--prefix={0}'.format(prefix)]
+
+ configure(*options)
+ make()
+ if self.run_tests:
+ make("check")
+ make("install")
+
+ if '+float' in spec['fftw']:
+ configure('--enable-float', *options)
+ make()
+ if self.run_tests:
+ make("check")
+ make("install")
+ if '+long_double' in spec['fftw']:
+ configure('--enable-long-double', *options)
+ make()
+ if self.run_tests:
+ make("check")
+ make("install")
diff --git a/var/spack/repos/builtin/packages/ninja/package.py b/var/spack/repos/builtin/packages/ninja/package.py
index e3f3819289..dcd00576dd 100644
--- a/var/spack/repos/builtin/packages/ninja/package.py
+++ b/var/spack/repos/builtin/packages/ninja/package.py
@@ -25,6 +25,7 @@
from spack import *
import os
+
class Ninja(Package):
""" A small, fast Make alternative """
homepage = "https://martine.github.io/ninja/"
@@ -35,7 +36,6 @@ class Ninja(Package):
extends('python')
def install(self, spec, prefix):
- sh = which('sh')
python('configure.py', '--bootstrap')
cp = which('cp')
diff --git a/var/spack/repos/builtin/packages/nmap/package.py b/var/spack/repos/builtin/packages/nmap/package.py
new file mode 100644
index 0000000000..f4576cde53
--- /dev/null
+++ b/var/spack/repos/builtin/packages/nmap/package.py
@@ -0,0 +1,37 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class Nmap(AutotoolsPackage):
+ """Nmap ("Network Mapper") is a free and open source (license)
+ utility for network discovery and security auditing.
+ It also provides ncat an updated nc"""
+
+ homepage = "https://nmap.org"
+ url = "https://nmap.org/dist/nmap-7.31.tar.bz2"
+
+ version('7.31', 'f2f6660142a777862342a58cc54258ea')
+ version('7.30', '8d86797d5c9e56de571f9630c0e6b5f8')
diff --git a/var/spack/repos/builtin/packages/node-js/package.py b/var/spack/repos/builtin/packages/node-js/package.py
new file mode 100644
index 0000000000..8f129bd3e0
--- /dev/null
+++ b/var/spack/repos/builtin/packages/node-js/package.py
@@ -0,0 +1,110 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+import sys
+import subprocess
+
+
+class NodeJs(Package):
+ """Node.js is a JavaScript runtime built on Chrome's V8 JavaScript
+ engine."""
+
+ homepage = "https://nodejs.org/"
+ url = "https://nodejs.org/download/release/v6.3.0/node-v6.3.0.tar.gz"
+
+ version('7.1.0', '1db5df2cb025f9c70e83d9cf21c4266a')
+ version('6.3.0', '8c14e5c89d66d4d060c91b3ba15dfd31')
+ version('6.2.2', '1120e8bf191fdaee42206d031935210d')
+
+ # variant('bash-completion', default=False, description='Build with bash-completion support for npm') # NOQA: ignore=E501
+ variant('debug', default=False, description='Include debugger support')
+ variant('doc', default=False, description='Compile with documentation')
+ variant('icu4c', default=False, description='Build with support for all locales instead of just English')
+ variant('openssl', default=True, description='Build with Spacks OpenSSL instead of the bundled version')
+ variant('zlib', default=True, description='Build with Spacks zlib instead of the bundled version')
+
+ depends_on('libtool', type='build', when=sys.platform != 'darwin')
+ depends_on('pkg-config', type='build')
+ depends_on('python@2.7:2.7.999', type='build')
+ # depends_on('bash-completion', when="+bash-completion")
+ depends_on('icu4c', when='+icu4c')
+ depends_on('openssl', when='+openssl')
+
+ def install(self, spec, prefix):
+ options = []
+ options.extend(['--prefix={0}'.format(prefix)])
+
+ # Note: npm is updated more regularly than node.js, so we build the
+ # package instead of using the bundled version
+ options.extend(['--without-npm'])
+
+ # On OSX, the system libtool must be used
+ # So, we ensure that this is the case by...
+ if sys.platform == 'darwin':
+ process_pipe = subprocess.Popen(["which", "libtool"],
+ stdout=subprocess.PIPE)
+ result_which = process_pipe.communicate()[0]
+ process_pipe = subprocess.Popen(["whereis", "libtool"],
+ stdout=subprocess.PIPE)
+ result_whereis = process_pipe.communicate()[0]
+ assert result_which == result_whereis, (
+ 'On OSX the system libtool must be used. Please'
+ '(temporarily) remove \n %s or its link to libtool from'
+ 'path')
+
+ # TODO: Add bash-completion
+
+ if '+debug' in spec:
+ options.extend(['--debug'])
+
+ if '+openssl' in spec:
+ options.extend([
+ '--shared-openssl',
+ '--shared-openssl-includes=%s' % spec['openssl'].prefix.include, # NOQA: ignore=E501
+ '--shared-openssl-libpath=%s' % spec['openssl'].prefix.lib,
+ ])
+
+ if '+zlib' in spec:
+ options.extend([
+ '--shared-zlib',
+ '--shared-zlib-includes=%s' % spec['zlib'].prefix.include,
+ '--shared-zlib-libpath=%s' % spec['zlib'].prefix.lib,
+ ])
+
+ if '+icu4c' in spec:
+ options.extend(['--with-intl=full-icu'])
+ # else:
+ # options.extend(['--with-intl=system-icu'])
+
+ configure(*options)
+
+ if self.run_tests:
+ make('test')
+ make('test-addons')
+
+ if '+doc' in spec:
+ make('doc')
+
+ make('install')
diff --git a/var/spack/repos/builtin/packages/npm/package.py b/var/spack/repos/builtin/packages/npm/package.py
new file mode 100644
index 0000000000..7910a8af9b
--- /dev/null
+++ b/var/spack/repos/builtin/packages/npm/package.py
@@ -0,0 +1,54 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+import os
+from spack import *
+
+
+class Npm(Package):
+ """npm: A package manager for javascript."""
+
+ homepage = "https://github.com/npm/npm"
+ # base http://www.npmjs.com/
+ url = "https://registry.npmjs.org/npm/-/npm-3.10.5.tgz"
+
+ version('3.10.9', 'ec1eb22b466ce87cdd0b90182acce07f')
+ version('3.10.5', '46002413f4a71de9b0da5b506bf1d992')
+
+ depends_on('node-js')
+
+ def setup_dependent_environment(self, spack_env, run_env, dependent_spec):
+ npm_config_cache_dir = "%s/npm-cache" % dependent_spec.prefix
+ if not os.path.isdir(npm_config_cache_dir):
+ mkdir(npm_config_cache_dir)
+ run_env.set('npm_config_cache', npm_config_cache_dir)
+ spack_env.set('npm_config_cache', npm_config_cache_dir)
+
+ def install(self, spec, prefix):
+ configure('--prefix={0}'.format(prefix))
+
+ if self.run_tests:
+ make('test')
+
+ make('install')
diff --git a/var/spack/repos/builtin/packages/numdiff/package.py b/var/spack/repos/builtin/packages/numdiff/package.py
index e15d60cb0b..0a912d3db9 100644
--- a/var/spack/repos/builtin/packages/numdiff/package.py
+++ b/var/spack/repos/builtin/packages/numdiff/package.py
@@ -25,7 +25,8 @@
from spack import *
import sys
-class Numdiff(Package):
+
+class Numdiff(AutotoolsPackage):
"""Numdiff is a little program that can be used to compare putatively
similar files line by line and field by field, ignoring small numeric
differences or/and different numeric formats."""
@@ -35,10 +36,4 @@ class Numdiff(Package):
version('5.8.1', 'a295eb391f6cb1578209fc6b4f9d994e')
- depends_on('gettext', sys.platform=='darwin')
-
- def install(self, spec, prefix):
- options = ['--prefix=%s' % prefix]
- configure(*options)
- make()
- make('install')
+ depends_on('gettext', when=sys.platform == 'darwin')
diff --git a/var/spack/repos/builtin/packages/nwchem/package.py b/var/spack/repos/builtin/packages/nwchem/package.py
new file mode 100644
index 0000000000..556dba34c2
--- /dev/null
+++ b/var/spack/repos/builtin/packages/nwchem/package.py
@@ -0,0 +1,170 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+import sys
+import os
+
+
+class Nwchem(Package):
+ """High-performance computational chemistry software"""
+
+ homepage = "http://www.nwchem-sw.org"
+ url = "http://www.nwchem-sw.org/images/Nwchem-6.6.revision27746-src.2015-10-20.tar.gz"
+
+ version('6.6', 'c581001c004ea5e5dfacb783385825e3',
+ url='http://www.nwchem-sw.org/images/Nwchem-6.6.revision27746-src.2015-10-20.tar.gz')
+
+ depends_on('blas')
+ depends_on('lapack')
+ depends_on('mpi')
+ depends_on('scalapack')
+
+ depends_on('python@2.7:2.8', type=('build', 'run'))
+
+ # patches for 6.6-27746:
+ urls_for_patches = {
+ '@6.6': [
+ ('http://www.nwchem-sw.org/images/Tddft_mxvec20.patch.gz', 'f91c6a04df56e228fe946291d2f38c9a'),
+ ('http://www.nwchem-sw.org/images/Tools_lib64.patch.gz', 'b71e8dbad27f1c97b60a53ec34d3f6e0'),
+ ('http://www.nwchem-sw.org/images/Config_libs66.patch.gz', 'cc4be792e7b5128c3f9b7b1167ade2cf'),
+ ('http://www.nwchem-sw.org/images/Cosmo_meminit.patch.gz', '1d94685bf3b72d8ecd40c46334348ca7'),
+ ('http://www.nwchem-sw.org/images/Sym_abelian.patch.gz', 'b19cade61c787916a73a4aaf6e2445d6'),
+ ('http://www.nwchem-sw.org/images/Xccvs98.patch.gz', 'b9aecc516a3551dcf871cb2f066598cb'),
+ ('http://www.nwchem-sw.org/images/Dplot_tolrho.patch.gz', '0a5bdad63d2d0ffe46b28db7ad6d9cec'),
+ ('http://www.nwchem-sw.org/images/Driver_smalleig.patch.gz', 'c3f609947220c0adb524b02c316b5564'),
+ ('http://www.nwchem-sw.org/images/Ga_argv.patch.gz', '7a665c981cfc17187455e1826f095f6f'),
+ ('http://www.nwchem-sw.org/images/Raman_displ.patch.gz', 'ed334ca0b2fe81ce103ef8cada990c4c'),
+ ('http://www.nwchem-sw.org/images/Ga_defs.patch.gz', '0c3cab4d5cbef5acac16ffc5e6f869ef'),
+ ('http://www.nwchem-sw.org/images/Zgesvd.patch.gz', '8fd5a11622968ef4351bd3d5cddce8f2'),
+ ('http://www.nwchem-sw.org/images/Cosmo_dftprint.patch.gz', '64dcf27f3c6ced2cadfb504fa66e9d08'),
+ ('http://www.nwchem-sw.org/images/Txs_gcc6.patch.gz', '56595a7252da051da13f94edc54fe059'),
+ ('http://www.nwchem-sw.org/images/Gcc6_optfix.patch.gz', 'c6642c21363c09223784b47b8636047d'),
+ ('http://www.nwchem-sw.org/images/Util_gnumakefile.patch.gz', 'af74ea2e32088030137001ce5cb047c5'),
+ ('http://www.nwchem-sw.org/images/Util_getppn.patch.gz', '8dec8ee198bf5ec4c3a22a6dbf31683c'),
+ ('http://www.nwchem-sw.org/images/Gcc6_macs_optfix.patch.gz', 'a891a2713aac8b0423c8096461c243eb'),
+ ('http://www.nwchem-sw.org/images/Notdir_fc.patch.gz', '2dc997d4ab3719ac7964201adbc6fd79')
+ ]
+ }
+ # Iterate over patches
+ for condition, urls in urls_for_patches.iteritems():
+ for url, md5 in urls:
+ patch(url, when=condition, level=0, md5=md5)
+
+ def install(self, spec, prefix):
+ scalapack = spec['scalapack'].scalapack_libs
+ lapack = spec['lapack'].lapack_libs
+ blas = spec['blas'].blas_libs
+ # see http://www.nwchem-sw.org/index.php/Compiling_NWChem
+ args = []
+ args.extend([
+ 'NWCHEM_TOP=%s' % self.stage.source_path,
+ # NWCHEM is picky about FC and CC. They should NOT be full path.
+ # see http://www.nwchem-sw.org/index.php/Special:AWCforum/sp/id7524
+ 'CC=%s' % os.path.basename(spack_cc),
+ 'FC=%s' % os.path.basename(spack_fc),
+ 'USE_MPI=y',
+ 'MPI_LOC=%s' % spec['mpi'].prefix,
+ 'USE_PYTHONCONFIG=y',
+ 'PYTHONVERSION=%s' % spec['python'].version.up_to(2),
+ 'PYTHONHOME=%s' % spec['python'].prefix,
+ 'BLASOPT=%s' % ((lapack + blas).ld_flags),
+ 'BLAS_LIB=%s' % blas.ld_flags,
+ 'LAPACK_LIB=%s' % lapack.ld_flags,
+ 'USE_SCALAPACK=y',
+ 'SCALAPACK=%s' % scalapack.ld_flags,
+ 'NWCHEM_MODULES=all python',
+ 'NWCHEM_LONG_PATHS=Y' # by default NWCHEM_TOP is 64 char max
+ ])
+
+ # TODO: query if blas/lapack/scalapack uses 64bit Ints
+ # A flag to distinguish between 32bit and 64bit integers in linear
+ # algebra (Blas, Lapack, Scalapack)
+ use32bitLinAlg = True
+
+ if use32bitLinAlg:
+ args.extend([
+ 'USE_64TO32=y',
+ 'BLAS_SIZE=4',
+ 'LAPACK_SIZE=4',
+ 'SCALAPACK_SIZE=4'
+ ])
+ else:
+ args.extend([
+ 'BLAS_SIZE=8',
+ 'LAPACK_SIZE=8'
+ 'SCALAPACK_SIZE=8'
+ ])
+
+ if sys.platform == 'darwin':
+ target = 'MACX64'
+ args.extend([
+ 'CFLAGS_FORGA=-DMPICH_NO_ATTR_TYPE_TAGS'
+ ])
+ else:
+ target = 'LINUX64'
+
+ args.extend(['NWCHEM_TARGET=%s' % target])
+
+ with working_dir('src'):
+ make('nwchem_config', *args)
+ if use32bitLinAlg:
+ make('64_to_32', *args)
+ make(*args)
+
+ # need to install by hand. Follow Ubuntu:
+ # http://packages.ubuntu.com/trusty/all/nwchem-data/filelist
+ # http://packages.ubuntu.com/trusty/amd64/nwchem/filelist
+ share_path = join_path(prefix, 'share', 'nwchem')
+ mkdirp(prefix.bin)
+
+ install_tree('data', share_path)
+ install_tree(join_path('basis', 'libraries'),
+ join_path(share_path, 'libraries'))
+ install_tree(join_path('nwpw', 'libraryps'),
+ join_path(share_path, 'libraryps'))
+
+ b_path = join_path(self.stage.source_path, 'bin',
+ target, 'nwchem')
+ chmod = which('chmod')
+ chmod('+x', b_path)
+ install(b_path, prefix.bin)
+
+ # Finally, make user's life easier by creating a .nwchemrc file
+ # to point to the required data files.
+ nwchemrc = """\
+ nwchem_basis_library {data}/libraries/
+ nwchem_nwpw_library {data}/libraryps/
+ ffield amber
+ amber_1 {data}/amber_s/
+ amber_2 {data}/amber_q/
+ amber_3 {data}/amber_x/
+ amber_4 {data}/amber_u/
+ spce {data}/solvents/spce.rst
+ charmm_s {data}/charmm_s/
+ charmm_x {data}/charmm_x/
+""".format(data=share_path)
+ with open(".nwchemrc", 'w') as f:
+ f.write(nwchemrc)
+ install(".nwchemrc", share_path)
diff --git a/var/spack/repos/builtin/packages/ocaml/package.py b/var/spack/repos/builtin/packages/ocaml/package.py
new file mode 100644
index 0000000000..9488d3b7a6
--- /dev/null
+++ b/var/spack/repos/builtin/packages/ocaml/package.py
@@ -0,0 +1,43 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class Ocaml(Package):
+ """OCaml is an industrial strength programming language supporting
+ functional, imperative and object-oriented styles"""
+
+ homepage = "http://ocaml.org/"
+ url = "http://caml.inria.fr/pub/distrib/ocaml-4.03/ocaml-4.03.0.tar.gz"
+
+ version('4.03.0', '43812739ea1b4641cf480f57f977c149')
+
+ depends_on('ncurses')
+
+ def install(self, spec, prefix):
+ configure('-prefix', '{0}'.format(prefix))
+
+ make('world.opt')
+ make('install')
diff --git a/var/spack/repos/builtin/packages/oce/package.py b/var/spack/repos/builtin/packages/oce/package.py
index 0f0fcfb733..2d518d83c3 100644
--- a/var/spack/repos/builtin/packages/oce/package.py
+++ b/var/spack/repos/builtin/packages/oce/package.py
@@ -23,24 +23,27 @@
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
from spack import *
-import platform, sys
+import platform
+
class Oce(Package):
- """
- Open CASCADE Community Edition:
- patches/improvements/experiments contributed by users over the official Open CASCADE library.
+ """Open CASCADE Community Edition:
+ patches/improvements/experiments contributed by users over the official
+ Open CASCADE library.
"""
homepage = "https://github.com/tpaviot/oce"
url = "https://github.com/tpaviot/oce/archive/OCE-0.17.tar.gz"
+ version('0.17.2', 'bf2226be4cd192606af677cf178088e5')
version('0.17.1', '36c67b87093c675698b483454258af91')
- version('0.17' , 'f1a89395c4b0d199bea3db62b85f818d')
+ version('0.17', 'f1a89395c4b0d199bea3db62b85f818d')
version('0.16.1', '4d591b240c9293e879f50d86a0cb2bb3')
- version('0.16' , '7a4b4df5a104d75a537e25e7dd387eca')
+ version('0.16', '7a4b4df5a104d75a537e25e7dd387eca')
- variant('tbb', default=True, description='Build with Intel Threading Building Blocks')
+ variant('tbb', default=True,
+ description='Build with Intel Threading Building Blocks')
- depends_on('cmake@2.8:')
+ depends_on('cmake@2.8:', type='build')
depends_on('tbb', when='+tbb')
# There is a bug in OCE which appears with Clang (version?) or GCC 6.0
@@ -49,8 +52,12 @@ class Oce(Package):
# http://tracker.dev.opencascade.org/view.php?id=26042
# https://github.com/tpaviot/oce/issues/605
# https://github.com/tpaviot/oce/commit/61cb965b9ffeca419005bc15e635e67589c421dd.patch
- patch('null.patch',when='@0.16:0.17.1')
+ patch('null.patch', when='@0.16:0.17.1')
+ # fix build with Xcode 8 "previous definition of CLOCK_REALTIME"
+ # reported 27 Sep 2016 https://github.com/tpaviot/oce/issues/643
+ if (platform.system() == "Darwin") and (platform.mac_ver()[0] == '10.12'):
+ patch('sierra.patch')
def install(self, spec, prefix):
options = []
@@ -63,7 +70,8 @@ class Oce(Package):
'-DOCE_DISABLE_X11:BOOL=ON',
'-DOCE_DRAW:BOOL=OFF',
'-DOCE_MODEL:BOOL=ON',
- '-DOCE_MULTITHREAD_LIBRARY:STRING=%s' % ('TBB' if '+tbb' in spec else 'NONE'),
+ '-DOCE_MULTITHREAD_LIBRARY:STRING=%s' % (
+ 'TBB' if '+tbb' in spec else 'NONE'),
'-DOCE_OCAF:BOOL=ON',
'-DOCE_USE_TCL_TEST_FRAMEWORK:BOOL=OFF',
'-DOCE_VISUALISATION:BOOL=OFF',
@@ -77,15 +85,8 @@ class Oce(Package):
'-DOCE_OSX_USE_COCOA:BOOL=ON',
])
- cmake('.', *options)
+ options.append('-DCMAKE_INSTALL_NAME_DIR:PATH=%s/lib' % prefix)
+ cmake('.', *options)
make("install/strip")
-
- # OCE tests build is brocken at least on Darwin.
- # Unit tests are linked against libTKernel.10.dylib isntead of /full/path/libTKernel.10.dylib
- # see https://github.com/tpaviot/oce/issues/612
- # make("test")
-
- # The shared libraries are not installed correctly on Darwin; correct this
- if (sys.platform == 'darwin'):
- fix_darwin_install_name(prefix.lib)
+ make("test")
diff --git a/var/spack/repos/builtin/packages/oce/sierra.patch b/var/spack/repos/builtin/packages/oce/sierra.patch
new file mode 100644
index 0000000000..8db3af5e37
--- /dev/null
+++ b/var/spack/repos/builtin/packages/oce/sierra.patch
@@ -0,0 +1,45 @@
+diff --git a/adm/cmake/TKernel/CMakeLists.txt b/adm/cmake/TKernel/CMakeLists.txt
+index 54e241e..72873c6 100644
+--- a/adm/cmake/TKernel/CMakeLists.txt
++++ b/adm/cmake/TKernel/CMakeLists.txt
+@@ -29,7 +29,7 @@ if(WIN32)
+ set(TOOLKIT_LIBS ${TOOLKIT_LIBS} ${CSF_SOCKETLibs_LIB} ${CSF_advapi32_LIB} ${CSF_gdi32_LIB} ${CSF_user32_LIB} ${CSF_kernel32_LIB} ${CSF_psapi_LIB})
+ else(WIN32)
+ #  An implementation for Mac OS X has been added in src/OSD/gettime_osx.h
+- if(NOT APPLE)
++ if(APPLE)
+ include( CheckFunctionExists )
+ check_function_exists( clock_gettime CLOCK_GETTIME_IN_LIBC )
+ if(NOT CLOCK_GETTIME_IN_LIBC)
+@@ -40,7 +40,7 @@ else(WIN32)
+ endif(NOT CLOCK_GETTIME_IN_LIBRT)
+ set(TOOLKIT_LIBS ${TOOLKIT_LIBS} rt)
+ endif(NOT CLOCK_GETTIME_IN_LIBC)
+- endif(NOT APPLE)
++ endif(APPLE)
+ endif(WIN32)
+
+ # Adde-DHAVE_TBB in TKernel in order to benefit from Standard_MMgrTBBalloc
+@@ -70,4 +70,3 @@ if (OCE_BUILD_TKERNEL_WITH_INSTALL_RPATH)
+ set_target_properties_install_rpath(${TOOLKIT} "${OCE_INSTALL_LIB_DIR_RPATH};${OCE_INSTALL_PACKAGE_LIB_DIR_RPATH}")
+ set_target_properties(${TOOLKIT} PROPERTIES BUILD_WITH_INSTALL_RPATH ON)
+ endif ()
+-
+diff --git a/src/OSD/OSD_Chronometer.cxx b/src/OSD/OSD_Chronometer.cxx
+index 444f844..25970d0 100644
+--- a/src/OSD/OSD_Chronometer.cxx
++++ b/src/OSD/OSD_Chronometer.cxx
+@@ -51,7 +51,7 @@
+ #include <mach/mach.h>
+ #endif
+
+-#if defined(__APPLE__) && defined(__MACH__)
++#if defined(__OOOOPPLE__) && defined(__MACH__)
+ #include "gettime_osx.h"
+ #endif
+
+@@ -283,4 +283,3 @@ void OSD_Chronometer::Show (Standard_Real& user,
+ system = Cumul_sys;
+ if (!StopSav) Start();
+ }
+-
diff --git a/var/spack/repos/builtin/packages/oclock/package.py b/var/spack/repos/builtin/packages/oclock/package.py
new file mode 100644
index 0000000000..84da93c36e
--- /dev/null
+++ b/var/spack/repos/builtin/packages/oclock/package.py
@@ -0,0 +1,50 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class Oclock(Package):
+ """oclock is a simple analog clock using the SHAPE extension to make
+ a round (possibly transparent) window."""
+
+ homepage = "http://cgit.freedesktop.org/xorg/app/oclock"
+ url = "https://www.x.org/archive/individual/app/oclock-1.0.3.tar.gz"
+
+ version('1.0.3', 'f25b05d987ef8ed6dd5a887c82eace62')
+
+ depends_on('libx11')
+ depends_on('libxmu')
+ depends_on('libxext')
+ depends_on('libxt')
+ depends_on('libxkbfile')
+
+ depends_on('pkg-config@0.9.0:', type='build')
+ depends_on('util-macros', type='build')
+
+ def install(self, spec, prefix):
+ configure('--prefix={0}'.format(prefix))
+
+ make()
+ make('install')
diff --git a/var/spack/repos/builtin/packages/octave-splines/package.py b/var/spack/repos/builtin/packages/octave-splines/package.py
new file mode 100644
index 0000000000..11c9cc5ba7
--- /dev/null
+++ b/var/spack/repos/builtin/packages/octave-splines/package.py
@@ -0,0 +1,44 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class OctaveSplines(Package):
+ """Additional spline functions."""
+
+ homepage = "http://octave.sourceforge.net/splines/index.html"
+ url = "http://downloads.sourceforge.net/octave/splines-1.3.1.tar.gz"
+
+ version('1.3.1', 'f9665d780c37aa6a6e17d1f424c49bdeedb89d1192319a4e39c08784122d18f9')
+
+ extends('octave@3.6.0:')
+
+ def install(self, spec, prefix):
+ octave('--quiet',
+ '--norc',
+ '--built-in-docstrings-file=/dev/null',
+ '--texi-macros-file=/dev/null',
+ '--eval', 'pkg prefix %s; pkg install %s' %
+ (prefix, self.stage.archive_file))
diff --git a/var/spack/repos/builtin/packages/octave/package.py b/var/spack/repos/builtin/packages/octave/package.py
index 17c7ff82f4..df144c8bbb 100644
--- a/var/spack/repos/builtin/packages/octave/package.py
+++ b/var/spack/repos/builtin/packages/octave/package.py
@@ -23,6 +23,8 @@
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
from spack import *
+import sys
+
class Octave(Package):
"""GNU Octave is a high-level language, primarily intended for numerical
@@ -34,7 +36,10 @@ class Octave(Package):
homepage = "https://www.gnu.org/software/octave/"
url = "ftp://ftp.gnu.org/gnu/octave/octave-4.0.0.tar.gz"
- version('4.0.0' , 'a69f8320a4f20a8480c1b278b1adb799')
+ extendable = True
+
+ version('4.0.2', 'c2a5cacc6e4c52f924739cdf22c2c687')
+ version('4.0.0', 'a69f8320a4f20a8480c1b278b1adb799')
# Variants
variant('readline', default=True)
@@ -56,39 +61,41 @@ class Octave(Package):
variant('qrupdate', default=False)
variant('qscintilla', default=False)
variant('qt', default=False)
- variant('suiteparse', default=False)
+ variant('suitesparse', default=False)
variant('zlib', default=False)
# Required dependencies
depends_on('blas')
depends_on('lapack')
+ # Octave does not configure with sed from darwin:
+ depends_on('sed', when=sys.platform == 'darwin', type='build')
depends_on('pcre')
+ depends_on('pkg-config', type='build')
# Strongly recommended dependencies
- depends_on('readline', when='+readline')
+ depends_on('readline', when='+readline')
# Optional dependencies
- depends_on('arpack', when='+arpack')
- depends_on('curl', when='+curl')
- depends_on('fftw', when='+fftw')
- depends_on('fltk', when='+fltk')
- depends_on('fontconfig', when='+fontconfig')
- depends_on('freetype', when='+freetype')
- depends_on('glpk', when='+glpk')
- depends_on('gl2ps', when='+gl2ps')
- depends_on('gnuplot', when='+gnuplot')
- depends_on('ImageMagick', when='+magick')
- depends_on('hdf5', when='+hdf5')
- depends_on('jdk', when='+jdk')
- depends_on('llvm', when='+llvm')
- #depends_on('opengl', when='+opengl') # TODO: add package
- depends_on('qhull', when='+qhull')
- depends_on('qrupdate', when='+qrupdate')
- #depends_on('qscintilla', when='+qscintilla) # TODO: add package
- depends_on('qt', when='+qt')
- depends_on('suite-sparse',when='+suitesparse')
- depends_on('zlib', when='+zlib')
-
+ depends_on('arpack', when='+arpack')
+ depends_on('curl', when='+curl')
+ depends_on('fftw', when='+fftw')
+ depends_on('fltk', when='+fltk')
+ depends_on('fontconfig', when='+fontconfig')
+ depends_on('freetype', when='+freetype')
+ depends_on('glpk', when='+glpk')
+ depends_on('gl2ps', when='+gl2ps')
+ depends_on('gnuplot', when='+gnuplot')
+ depends_on('image-magick', when='+magick')
+ depends_on('hdf5', when='+hdf5')
+ depends_on('jdk', when='+jdk')
+ depends_on('llvm', when='+llvm')
+ # depends_on('opengl', when='+opengl') # TODO: add package
+ depends_on('qhull', when='+qhull')
+ depends_on('qrupdate', when='+qrupdate')
+ # depends_on('qscintilla', when='+qscintilla) # TODO: add package
+ depends_on('qt', when='+qt')
+ depends_on('suite-sparse', when='+suitesparse')
+ depends_on('zlib', when='+zlib')
def install(self, spec, prefix):
config_args = [
@@ -154,7 +161,8 @@ class Octave(Package):
config_args.append("--without-glpk")
if '+magick' in spec:
- config_args.append("--with-magick=%s" % spec['ImageMagick'].prefix.lib)
+ config_args.append("--with-magick=%s"
+ % spec['image-magick'].prefix.lib)
if '+hdf5' in spec:
config_args.extend([
@@ -187,7 +195,8 @@ class Octave(Package):
if '+qrupdate' in spec:
config_args.extend([
- "--with-qrupdate-includedir=%s" % spec['qrupdate'].prefix.include,
+ "--with-qrupdate-includedir=%s"
+ % spec['qrupdate'].prefix.include,
"--with-qrupdate-libdir=%s" % spec['qrupdate'].prefix.lib
])
else:
@@ -205,3 +214,16 @@ class Octave(Package):
make()
make("install")
+
+ # ========================================================================
+ # Set up environment to make install easy for Octave extensions.
+ # ========================================================================
+
+ def setup_dependent_package(self, module, ext_spec):
+ """Called before Octave modules' install() methods.
+
+ In most cases, extensions will only need to have one line:
+ octave('--eval', 'pkg install %s' % self.stage.archive_file)
+ """
+ # Octave extension builds can have a global Octave executable function
+ module.octave = Executable(join_path(self.spec.prefix.bin, 'octave'))
diff --git a/var/spack/repos/builtin/packages/octopus/package.py b/var/spack/repos/builtin/packages/octopus/package.py
new file mode 100644
index 0000000000..b74befbe35
--- /dev/null
+++ b/var/spack/repos/builtin/packages/octopus/package.py
@@ -0,0 +1,134 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class Octopus(Package):
+ """A real-space finite-difference (time-dependent) density-functional
+ theory code."""
+
+ homepage = "http://www.tddft.org/programs/octopus/"
+ base_url = "http://www.tddft.org/programs/octopus/down.php?file="
+
+ version('6.0', '5d1168c2a8d7fd9cb9492eaebaa7182e')
+ version('5.0.1', '2b6392ab67b843f9d4ca7413fc07e822')
+
+ # Sample url is:
+ # "http://www.tddft.org/programs/octopus/down.php?file=5.0.1/octopus-5.0.1.tar.gz"
+ def url_for_version(self, version):
+ return '{0}/{1}/octopus-{1}.tar.gz'.format(Octopus.base_url,
+ version.dotted)
+
+ variant('scalapack', default=False,
+ description='Compile with Scalapack')
+ variant('metis', default=True,
+ description='Compile with METIS')
+ variant('parmetis', default=False,
+ description='Compile with ParMETIS')
+ variant('netcdf', default=False,
+ description='Compile with Netcdf')
+ variant('arpack-ng', default=False,
+ description='Compile with ARPACK-ng')
+
+ depends_on('blas')
+ depends_on('gsl')
+ depends_on('lapack')
+ depends_on('libxc')
+ depends_on('mpi')
+ depends_on('fftw+mpi')
+ depends_on('metis@5:', when='+metis')
+ depends_on('parmetis', when='+parmetis')
+ depends_on('scalapack', when='+scalapack')
+ depends_on('netcdf-fortran', when='+netcdf')
+ depends_on('arpack-ng', when='+arpack-ng')
+
+ # optional dependencies:
+ # TODO: parmetis, etsf-io, sparskit,
+ # feast, libfm, pfft, isf, pnfft
+
+ def install(self, spec, prefix):
+ arpack = find_libraries(['libarpack'], root=spec[
+ 'arpack-ng'].prefix.lib, shared=True)
+ lapack = spec['lapack'].lapack_libs
+ blas = spec['blas'].blas_libs
+ args = []
+ args.extend([
+ '--prefix=%s' % prefix,
+ '--with-blas=%s' % blas.ld_flags,
+ '--with-lapack=%s' % lapack.ld_flags,
+ '--with-gsl-prefix=%s' % spec['gsl'].prefix,
+ '--with-libxc-prefix=%s' % spec['libxc'].prefix,
+ 'CC=%s' % spec['mpi'].mpicc,
+ 'FC=%s' % spec['mpi'].mpifc,
+ '--enable-mpi',
+ '--with-fft-lib=-L%s -lfftw3' % spec['fftw'].prefix.lib,
+ ])
+ if '+metis' in spec:
+ args.extend([
+ '--with-metis-prefix=%s' % spec['metis'].prefix,
+ ])
+ if '+parmetis' in spec:
+ args.extend([
+ '--with-parmetis-prefix=%s' % spec['parmetis'].prefix,
+ ])
+ if '+netcdf' in spec:
+ args.extend([
+ '--with-netcdf-prefix=%s' % spec['netcdf-fortran'].prefix,
+ '--with-netcdf-include=%s' %
+ spec['netcdf-fortran'].prefix.include,
+ ])
+ if '+arpack-ng' in spec:
+ args.extend([
+ '--with-arpack={0}'.format(arpack.joined()),
+ ])
+ if '+scalapack' in spec:
+ args.extend([
+ '--with-blacs=%s' % spec['scalapack'].scalapack_libs,
+ '--with-scalapack=%s' % spec['scalapack'].scalapack_libs,
+ ])
+ # --with-etsf-io-prefix=
+ # --with-sparskit=${prefix}/lib/libskit.a
+ # --with-pfft-prefix=${prefix} --with-mpifftw-prefix=${prefix}
+ # --with-parpack=${prefix}/lib/libparpack.dylib
+ # --with-parmetis-prefix=${prefix}
+ # --with-berkeleygw-prefix=${prefix}
+
+ # When preprocessor expands macros (i.e. CFLAGS) defined as quoted
+ # strings the result may be > 132 chars and is terminated.
+ # This will look to a compiler as an Unterminated character constant
+ # and produce Line truncated errors. To vercome this, add flags to
+ # let compiler know that the entire line is meaningful.
+ # TODO: For the lack of better approach, assume that clang is mixed
+ # with GNU fortran.
+ if spec.satisfies('%clang') or spec.satisfies('%gcc'):
+ args.extend([
+ 'FCFLAGS=-O2 -ffree-line-length-none'
+ ])
+
+ configure(*args)
+ make()
+ # short tests take forever...
+ # make('check-short')
+ make('install')
diff --git a/var/spack/repos/builtin/packages/ompss/package.py b/var/spack/repos/builtin/packages/ompss/package.py
index c0848ffd70..02925974ea 100644
--- a/var/spack/repos/builtin/packages/ompss/package.py
+++ b/var/spack/repos/builtin/packages/ompss/package.py
@@ -26,19 +26,18 @@ from spack import *
import os
import glob
-# working config lines for ompss 14.06 :
-#./nanox-0.7/config.log: $ ./configure --prefix=/usr/gapps/exmatex/ompss --with-mcc=/usr/gapps/exmatex/ompss/ --with-hwloc=/usr
-#./mcxx-1.99.2/config.log: $ ./configure --prefix=/usr/gapps/exmatex/ompss --with-nanox=/usr/gapps/exmatex/ompss --enable-ompss --with-mpi=/opt/mvapich2-intel-shmem-1.7 --enable-tl-openmp-profile --enable-tl-openmp-intel
class Ompss(Package):
- """OmpSs is an effort to integrate features from the StarSs
- programming model developed by BSC into a single programming
- model. In particular, our objective is to extend OpenMP with
- new directives to support asynchronous parallelism and
- heterogeneity (devices like GPUs). However, it can also be
- understood as new directives extending other accelerator based
- APIs like CUDA or OpenCL. Our OmpSs environment is built on top
- of our Mercurium compiler and Nanos++ runtime system."""
+ """OmpSs is an effort to integrate features from the StarSs programming
+ model developed by BSC into a single programming model. In
+ particular, our objective is to extend OpenMP with new directives
+ to support asynchronous parallelism and heterogeneity (devices
+ like GPUs). However, it can also be understood as new directives
+ extending other accelerator based APIs like CUDA or OpenCL. Our
+ OmpSs environment is built on top of our Mercurium compiler and
+ Nanos++ runtime system.
+
+ """
homepage = "http://pm.bsc.es/"
url = "http://pm.bsc.es/sites/default/files/ftp/ompss/releases/ompss-14.10.tar.gz"
list_url = 'http://pm.bsc.es/ompss-downloads'
@@ -47,7 +46,7 @@ class Ompss(Package):
# all dependencies are optional, really
depends_on("mpi")
- #depends_on("openmp")
+ # depends_on("openmp")
depends_on("hwloc")
depends_on("extrae")
@@ -61,14 +60,22 @@ class Ompss(Package):
openmp_options = ["--enable-tl-openmp-profile"]
if spec.satisfies('%intel'):
- openmp_options.append( "--enable-tl-openmp-intel" )
+ openmp_options.append("--enable-tl-openmp-intel")
os.chdir(glob.glob('./nanox-*').pop())
- configure("--prefix=%s" % prefix, "--with-mcc=%s" % prefix, "--with-extrae=%s" % spec['extrae'].prefix, "--with-hwloc=%s" % spec['hwloc'].prefix)
+ configure("--prefix=%s" % prefix,
+ "--with-mcc=%s" % prefix,
+ "--with-extrae=%s" %
+ spec['extrae'].prefix,
+ "--with-hwloc=%s" % spec['hwloc'].prefix)
make()
make("install")
os.chdir(glob.glob('../mcxx-*').pop())
- configure("--prefix=%s" % prefix, "--with-nanox=%s" % prefix, "--enable-ompss", "--with-mpi=%s" % mpi.prefix, *openmp_options)
+ configure("--prefix=%s" % prefix,
+ "--with-nanox=%s" % prefix,
+ "--enable-ompss",
+ "--with-mpi=%s" % mpi.prefix,
+ *openmp_options)
make()
make("install")
diff --git a/var/spack/repos/builtin/packages/ompt-openmp/package.py b/var/spack/repos/builtin/packages/ompt-openmp/package.py
index e74dcf6c23..40159e4c6c 100644
--- a/var/spack/repos/builtin/packages/ompt-openmp/package.py
+++ b/var/spack/repos/builtin/packages/ompt-openmp/package.py
@@ -24,24 +24,26 @@
##############################################################################
from spack import *
+
class OmptOpenmp(Package):
- """LLVM/Clang OpenMP runtime with OMPT support. This is a fork of the OpenMPToolsInterface/LLVM-openmp fork of the official LLVM OpenMP mirror. This library provides a drop-in replacement of the OpenMP runtimes for GCC, Intel and LLVM/Clang."""
+ """LLVM/Clang OpenMP runtime with OMPT support. This is a fork of the
+ OpenMPToolsInterface/LLVM-openmp fork of the official LLVM OpenMP
+ mirror. This library provides a drop-in replacement of the OpenMP
+ runtimes for GCC, Intel and LLVM/Clang.
+
+ """
homepage = "https://github.com/OpenMPToolsInterface/LLVM-openmp"
url = "http://github.com/khuck/LLVM-openmp/archive/v0.1.tar.gz"
version('0.1', '2334e6a84b52da41b27afd9831ed5370')
- # depends_on("foo")
+ depends_on('cmake', type='build')
def install(self, spec, prefix):
with working_dir("runtime/build", create=True):
-
- # FIXME: Modify the configure line to suit your build system here.
- cmake('-DCMAKE_C_COMPILER=%s' % self.compiler.cc,
+ cmake('-DCMAKE_C_COMPILER=%s' % self.compiler.cc,
'-DCMAKE_CXX_COMPILER=%s' % self.compiler.cxx,
'-DCMAKE_INSTALL_PREFIX=%s' % prefix,
'..', *std_cmake_args)
-
- # FIXME: Add logic to build and install here
make()
make("install")
diff --git a/var/spack/repos/builtin/packages/opari2/package.py b/var/spack/repos/builtin/packages/opari2/package.py
index 510fff9fc2..20c67716a4 100644
--- a/var/spack/repos/builtin/packages/opari2/package.py
+++ b/var/spack/repos/builtin/packages/opari2/package.py
@@ -22,21 +22,25 @@
# License along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
-
from spack import *
+
class Opari2(Package):
- """
- OPARI2 is a source-to-source instrumentation tool for OpenMP and hybrid codes. It surrounds OpenMP directives and
- runtime library calls with calls to the POMP2 measurement interface. OPARI2 will provide you with a new
- initialization method that allows for multi-directory and parallel builds as well as the usage of pre-instrumented
- libraries. Furthermore, an efficient way of tracking parent-child relationships was added. Additionally, we extended
- OPARI2 to support instrumentation of OpenMP 3.0 tied tasks.
+ """OPARI2 is a source-to-source instrumentation tool for OpenMP and hybrid
+ codes. It surrounds OpenMP directives and runtime library calls with calls
+ to the POMP2 measurement interface. OPARI2 will provide you with a new
+ initialization method that allows for multi-directory and parallel builds
+ as well as the usage of pre-instrumented libraries. Furthermore, an
+ efficient way of tracking parent-child relationships was added.
+ Additionally, we extended OPARI2 to support instrumentation of OpenMP 3.0
+ tied tasks.
"""
homepage = "http://www.vi-hps.org/projects/score-p"
url = "http://www.vi-hps.org/upload/packages/opari2/opari2-1.1.2.tar.gz"
+ version('2.0', '72350dbdb6139f2e68a5055a4f0ba16c',
+ url='http://www.vi-hps.org/upload/packages/opari2/opari2-2.0.tar.gz')
version('1.1.4', '245d3d11147a06de77909b0805f530c0',
url='http://www.vi-hps.org/upload/packages/opari2/opari2-1.1.4.tar.gz')
version('1.1.2', '9a262c7ca05ff0ab5f7775ae96f3539e')
diff --git a/var/spack/repos/builtin/packages/openblas/openblas_icc.patch b/var/spack/repos/builtin/packages/openblas/openblas_icc.patch
new file mode 100644
index 0000000000..fb379aea41
--- /dev/null
+++ b/var/spack/repos/builtin/packages/openblas/openblas_icc.patch
@@ -0,0 +1,253 @@
+diff --git a/kernel/x86_64/cscal_microk_bulldozer-2.c b/kernel/x86_64/cscal_microk_bulldozer-2.c
+index f470cf8..3abffc4 100644
+--- a/kernel/x86_64/cscal_microk_bulldozer-2.c
++++ b/kernel/x86_64/cscal_microk_bulldozer-2.c
+@@ -120,7 +120,7 @@ static void cscal_kernel_16( BLASLONG n, FLOAT *alpha, FLOAT *x)
+ "r" (n), // 0
+ "r" (x), // 1
+ "r" (alpha) // 2
+- : "cc", "%0", "%1",
++ : "cc", //"%0", "%1",
+ "%xmm0", "%xmm1", "%xmm2", "%xmm3",
+ "%xmm4", "%xmm5", "%xmm6", "%xmm7",
+ "%xmm8", "%xmm9", "%xmm10", "%xmm11",
+@@ -212,7 +212,7 @@ static void cscal_kernel_16_zero_r( BLASLONG n, FLOAT *alpha, FLOAT *x)
+ "r" (n), // 0
+ "r" (x), // 1
+ "r" (alpha) // 2
+- : "cc", "%0", "%1",
++ : "cc", //"%0", "%1",
+ "%xmm0", "%xmm1", "%xmm2", "%xmm3",
+ "%xmm4", "%xmm5", "%xmm6", "%xmm7",
+ "%xmm8", "%xmm9", "%xmm10", "%xmm11",
+@@ -289,7 +289,7 @@ static void cscal_kernel_16_zero_i( BLASLONG n, FLOAT *alpha, FLOAT *x)
+ "r" (n), // 0
+ "r" (x), // 1
+ "r" (alpha) // 2
+- : "cc", "%0", "%1",
++ : "cc", //"%0", "%1",
+ "%xmm0", "%xmm1", "%xmm2", "%xmm3",
+ "%xmm4", "%xmm5", "%xmm6", "%xmm7",
+ "%xmm8", "%xmm9", "%xmm10", "%xmm11",
+@@ -334,7 +334,7 @@ static void cscal_kernel_16_zero( BLASLONG n, FLOAT *alpha, FLOAT *x)
+ "r" (n), // 0
+ "r" (x), // 1
+ "r" (alpha) // 2
+- : "cc", "%0", "%1",
++ : "cc", //"%0", "%1",
+ "%xmm0", "%xmm1", "%xmm2", "%xmm3",
+ "%xmm4", "%xmm5", "%xmm6", "%xmm7",
+ "%xmm8", "%xmm9", "%xmm10", "%xmm11",
+diff --git a/kernel/x86_64/cscal_microk_haswell-2.c b/kernel/x86_64/cscal_microk_haswell-2.c
+index 0424de3..48e3395 100644
+--- a/kernel/x86_64/cscal_microk_haswell-2.c
++++ b/kernel/x86_64/cscal_microk_haswell-2.c
+@@ -120,7 +120,7 @@ static void cscal_kernel_16( BLASLONG n, FLOAT *alpha, FLOAT *x)
+ "r" (n), // 0
+ "r" (x), // 1
+ "r" (alpha) // 2
+- : "cc", "0", "1",
++ : "cc", //"0", "1",
+ "%xmm0", "%xmm1", "%xmm2", "%xmm3",
+ "%xmm4", "%xmm5", "%xmm6", "%xmm7",
+ "%xmm8", "%xmm9", "%xmm10", "%xmm11",
+@@ -212,7 +212,7 @@ static void cscal_kernel_16_zero_r( BLASLONG n, FLOAT *alpha, FLOAT *x)
+ "r" (n), // 0
+ "r" (x), // 1
+ "r" (alpha) // 2
+- : "cc", "0", "1",
++ : "cc", // "0", "1",
+ "%xmm0", "%xmm1", "%xmm2", "%xmm3",
+ "%xmm4", "%xmm5", "%xmm6", "%xmm7",
+ "%xmm8", "%xmm9", "%xmm10", "%xmm11",
+@@ -289,7 +289,7 @@ static void cscal_kernel_16_zero_i( BLASLONG n, FLOAT *alpha, FLOAT *x)
+ "r" (n), // 0
+ "r" (x), // 1
+ "r" (alpha) // 2
+- : "cc", "%0", "%1",
++ : "cc", //"%0", "%1",
+ "%xmm0", "%xmm1", "%xmm2", "%xmm3",
+ "%xmm4", "%xmm5", "%xmm6", "%xmm7",
+ "%xmm8", "%xmm9", "%xmm10", "%xmm11",
+@@ -334,7 +334,7 @@ static void cscal_kernel_16_zero( BLASLONG n, FLOAT *alpha, FLOAT *x)
+ "r" (n), // 0
+ "r" (x), // 1
+ "r" (alpha) // 2
+- : "cc", "0", "1",
++ : "cc", //"0", "1",
+ "%xmm0", "%xmm1", "%xmm2", "%xmm3",
+ "%xmm4", "%xmm5", "%xmm6", "%xmm7",
+ "%xmm8", "%xmm9", "%xmm10", "%xmm11",
+diff --git a/kernel/x86_64/cscal_microk_steamroller-2.c b/kernel/x86_64/cscal_microk_steamroller-2.c
+index 763e7ad..8346e17 100644
+--- a/kernel/x86_64/cscal_microk_steamroller-2.c
++++ b/kernel/x86_64/cscal_microk_steamroller-2.c
+@@ -121,7 +121,7 @@ static void cscal_kernel_16( BLASLONG n, FLOAT *alpha, FLOAT *x)
+ "r" (n), // 0
+ "r" (x), // 1
+ "r" (alpha) // 2
+- : "cc", "0", "1",
++ : "cc", //"0", "1",
+ "%xmm0", "%xmm1", "%xmm2", "%xmm3",
+ "%xmm4", "%xmm5", "%xmm6", "%xmm7",
+ "%xmm8", "%xmm9", "%xmm10", "%xmm11",
+@@ -213,7 +213,7 @@ static void cscal_kernel_16_zero_r( BLASLONG n, FLOAT *alpha, FLOAT *x)
+ "r" (n), // 0
+ "r" (x), // 1
+ "r" (alpha) // 2
+- : "cc", "0", "1",
++ : "cc", //"0", "1",
+ "%xmm0", "%xmm1", "%xmm2", "%xmm3",
+ "%xmm4", "%xmm5", "%xmm6", "%xmm7",
+ "%xmm8", "%xmm9", "%xmm10", "%xmm11",
+@@ -290,7 +290,7 @@ static void cscal_kernel_16_zero_i( BLASLONG n, FLOAT *alpha, FLOAT *x)
+ "r" (n), // 0
+ "r" (x), // 1
+ "r" (alpha) // 2
+- : "cc", "%0", "%1",
++ : "cc", //"%0", "%1",
+ "%xmm0", "%xmm1", "%xmm2", "%xmm3",
+ "%xmm4", "%xmm5", "%xmm6", "%xmm7",
+ "%xmm8", "%xmm9", "%xmm10", "%xmm11",
+@@ -335,7 +335,7 @@ static void cscal_kernel_16_zero( BLASLONG n, FLOAT *alpha, FLOAT *x)
+ "r" (n), // 0
+ "r" (x), // 1
+ "r" (alpha) // 2
+- : "cc", "0", "1",
++ : "cc", //"0", "1",
+ "%xmm0", "%xmm1", "%xmm2", "%xmm3",
+ "%xmm4", "%xmm5", "%xmm6", "%xmm7",
+ "%xmm8", "%xmm9", "%xmm10", "%xmm11",
+diff --git a/kernel/x86_64/dscal.c b/kernel/x86_64/dscal.c
+index bbc1c96..b6bde6c 100644
+--- a/kernel/x86_64/dscal.c
++++ b/kernel/x86_64/dscal.c
+@@ -141,7 +141,7 @@ static void dscal_kernel_inc_8(BLASLONG n, FLOAT *alpha, FLOAT *x, BLASLONG inc_
+ "r" (alpha), // 3
+ "r" (inc_x), // 4
+ "r" (inc_x3) // 5
+- : "cc", "%0", "%1", "%2",
++ : "cc", //"%0", "%1", "%2",
+ "%xmm0", "%xmm1", "%xmm2", "%xmm3",
+ "%xmm4", "%xmm5", "%xmm6", "%xmm7",
+ "%xmm8", "%xmm9", "%xmm10", "%xmm11",
+diff --git a/kernel/x86_64/zscal_microk_bulldozer-2.c b/kernel/x86_64/zscal_microk_bulldozer-2.c
+index 28fe734..03882d6 100644
+--- a/kernel/x86_64/zscal_microk_bulldozer-2.c
++++ b/kernel/x86_64/zscal_microk_bulldozer-2.c
+@@ -120,7 +120,7 @@ static void zscal_kernel_8( BLASLONG n, FLOAT *alpha, FLOAT *x)
+ "r" (n), // 0
+ "r" (x), // 1
+ "r" (alpha) // 2
+- : "cc", "%0", "%1",
++ : "cc", //"%0", "%1",
+ "%xmm0", "%xmm1", "%xmm2", "%xmm3",
+ "%xmm4", "%xmm5", "%xmm6", "%xmm7",
+ "%xmm8", "%xmm9", "%xmm10", "%xmm11",
+@@ -212,7 +212,7 @@ static void zscal_kernel_8_zero_r( BLASLONG n, FLOAT *alpha, FLOAT *x)
+ "r" (n), // 0
+ "r" (x), // 1
+ "r" (alpha) // 2
+- : "cc", "%0", "%1",
++ : "cc", //"%0", "%1",
+ "%xmm0", "%xmm1", "%xmm2", "%xmm3",
+ "%xmm4", "%xmm5", "%xmm6", "%xmm7",
+ "%xmm8", "%xmm9", "%xmm10", "%xmm11",
+@@ -289,7 +289,7 @@ static void zscal_kernel_8_zero_i( BLASLONG n, FLOAT *alpha, FLOAT *x)
+ "r" (n), // 0
+ "r" (x), // 1
+ "r" (alpha) // 2
+- : "cc", "%0", "%1",
++ : "cc", //"%0", "%1",
+ "%xmm0", "%xmm1", "%xmm2", "%xmm3",
+ "%xmm4", "%xmm5", "%xmm6", "%xmm7",
+ "%xmm8", "%xmm9", "%xmm10", "%xmm11",
+@@ -334,7 +334,7 @@ static void zscal_kernel_8_zero( BLASLONG n, FLOAT *alpha, FLOAT *x)
+ "r" (n), // 0
+ "r" (x), // 1
+ "r" (alpha) // 2
+- : "cc", "%0", "%1",
++ : "cc", //"%0", "%1",
+ "%xmm0", "%xmm1", "%xmm2", "%xmm3",
+ "%xmm4", "%xmm5", "%xmm6", "%xmm7",
+ "%xmm8", "%xmm9", "%xmm10", "%xmm11",
+diff --git a/kernel/x86_64/zscal_microk_haswell-2.c b/kernel/x86_64/zscal_microk_haswell-2.c
+index a93308e..b1a34c1 100644
+--- a/kernel/x86_64/zscal_microk_haswell-2.c
++++ b/kernel/x86_64/zscal_microk_haswell-2.c
+@@ -120,7 +120,7 @@ static void zscal_kernel_8( BLASLONG n, FLOAT *alpha, FLOAT *x)
+ "r" (n), // 0
+ "r" (x), // 1
+ "r" (alpha) // 2
+- : "cc", "%0", "%1",
++ : "cc", //"%0", "%1",
+ "%xmm0", "%xmm1", "%xmm2", "%xmm3",
+ "%xmm4", "%xmm5", "%xmm6", "%xmm7",
+ "%xmm8", "%xmm9", "%xmm10", "%xmm11",
+@@ -212,7 +212,7 @@ static void zscal_kernel_8_zero_r( BLASLONG n, FLOAT *alpha, FLOAT *x)
+ "r" (n), // 0
+ "r" (x), // 1
+ "r" (alpha) // 2
+- : "cc", "%0", "%1",
++ : "cc", //"%0", "%1",
+ "%xmm0", "%xmm1", "%xmm2", "%xmm3",
+ "%xmm4", "%xmm5", "%xmm6", "%xmm7",
+ "%xmm8", "%xmm9", "%xmm10", "%xmm11",
+@@ -289,7 +289,7 @@ static void zscal_kernel_8_zero_i( BLASLONG n, FLOAT *alpha, FLOAT *x)
+ "r" (n), // 0
+ "r" (x), // 1
+ "r" (alpha) // 2
+- : "cc", "%0", "%1",
++ : "cc", //"%0", "%1",
+ "%xmm0", "%xmm1", "%xmm2", "%xmm3",
+ "%xmm4", "%xmm5", "%xmm6", "%xmm7",
+ "%xmm8", "%xmm9", "%xmm10", "%xmm11",
+@@ -334,7 +334,7 @@ static void zscal_kernel_8_zero( BLASLONG n, FLOAT *alpha, FLOAT *x)
+ "r" (n), // 0
+ "r" (x), // 1
+ "r" (alpha) // 2
+- : "cc", "%0", "%1",
++ : "cc", //"%0", "%1",
+ "%xmm0", "%xmm1", "%xmm2", "%xmm3",
+ "%xmm4", "%xmm5", "%xmm6", "%xmm7",
+ "%xmm8", "%xmm9", "%xmm10", "%xmm11",
+diff --git a/kernel/x86_64/zscal_microk_steamroller-2.c b/kernel/x86_64/zscal_microk_steamroller-2.c
+index d611bf5..97b07ad 100644
+--- a/kernel/x86_64/zscal_microk_steamroller-2.c
++++ b/kernel/x86_64/zscal_microk_steamroller-2.c
+@@ -121,7 +121,7 @@ static void zscal_kernel_8( BLASLONG n, FLOAT *alpha, FLOAT *x)
+ "r" (n), // 0
+ "r" (x), // 1
+ "r" (alpha) // 2
+- : "cc", "%0", "%1",
++ : "cc", //"%0", "%1",
+ "%xmm0", "%xmm1", "%xmm2", "%xmm3",
+ "%xmm4", "%xmm5", "%xmm6", "%xmm7",
+ "%xmm8", "%xmm9", "%xmm10", "%xmm11",
+@@ -213,7 +213,7 @@ static void zscal_kernel_8_zero_r( BLASLONG n, FLOAT *alpha, FLOAT *x)
+ "r" (n), // 0
+ "r" (x), // 1
+ "r" (alpha) // 2
+- : "cc", "%0", "%1",
++ : "cc", //"%0", "%1",
+ "%xmm0", "%xmm1", "%xmm2", "%xmm3",
+ "%xmm4", "%xmm5", "%xmm6", "%xmm7",
+ "%xmm8", "%xmm9", "%xmm10", "%xmm11",
+@@ -290,7 +290,7 @@ static void zscal_kernel_8_zero_i( BLASLONG n, FLOAT *alpha, FLOAT *x)
+ "r" (n), // 0
+ "r" (x), // 1
+ "r" (alpha) // 2
+- : "cc", "%0", "%1",
++ : "cc", //"%0", "%1",
+ "%xmm0", "%xmm1", "%xmm2", "%xmm3",
+ "%xmm4", "%xmm5", "%xmm6", "%xmm7",
+ "%xmm8", "%xmm9", "%xmm10", "%xmm11",
+@@ -335,7 +335,7 @@ static void zscal_kernel_8_zero( BLASLONG n, FLOAT *alpha, FLOAT *x)
+ "r" (n), // 0
+ "r" (x), // 1
+ "r" (alpha) // 2
+- : "cc", "%0", "%1",
++ : "cc", //"%0", "%1",
+ "%xmm0", "%xmm1", "%xmm2", "%xmm3",
+ "%xmm4", "%xmm5", "%xmm6", "%xmm7",
+ "%xmm8", "%xmm9", "%xmm10", "%xmm11",
diff --git a/var/spack/repos/builtin/packages/openblas/package.py b/var/spack/repos/builtin/packages/openblas/package.py
index 22e49daaa7..d5a70f077b 100644
--- a/var/spack/repos/builtin/packages/openblas/package.py
+++ b/var/spack/repos/builtin/packages/openblas/package.py
@@ -27,114 +27,133 @@ from spack.package_test import *
import os
-class Openblas(Package):
+class Openblas(MakefilePackage):
"""OpenBLAS: An optimized BLAS library"""
- homepage = "http://www.openblas.net"
- url = "http://github.com/xianyi/OpenBLAS/archive/v0.2.15.tar.gz"
+ homepage = 'http://www.openblas.net'
+ url = 'http://github.com/xianyi/OpenBLAS/archive/v0.2.15.tar.gz'
+ version('0.2.19', '28c998054fd377279741c6f0b9ea7941')
version('0.2.18', '805e7f660877d588ea7e3792cda2ee65')
version('0.2.17', '664a12807f2a2a7cda4781e3ab2ae0e1')
version('0.2.16', 'fef46ab92463bdbb1479dcec594ef6dc')
version('0.2.15', 'b1190f3d3471685f17cfd1ec1d252ac9')
- variant('shared', default=True, description="Build shared libraries as well as static libs.") # NOQA: ignore=E501
+ variant(
+ 'shared',
+ default=True,
+ description='Build shared libraries as well as static libs.'
+ )
variant('openmp', default=False, description="Enable OpenMP support.")
- variant('fpic', default=True, description="Build position independent code") # NOQA: ignore=E501
+ variant('pic', default=True, description='Build position independent code')
# virtual dependency
provides('blas')
provides('lapack')
patch('make.patch')
+ # This patch is in a pull request to OpenBLAS that has not been handled
+ # https://github.com/xianyi/OpenBLAS/pull/915
+ patch('openblas_icc.patch', when='%intel')
+
+ parallel = False
+
+ @property
+ def blas_libs(self):
+ shared = True if '+shared' in self.spec else False
+ return find_libraries(
+ ['libopenblas'], root=self.prefix, shared=shared, recurse=True
+ )
+
+ @property
+ def lapack_libs(self):
+ return self.blas_libs
+
+ @MakefilePackage.precondition('edit')
+ def check_compilers(self):
+ # As of 06/2016 there is no mechanism to specify that packages which
+ # depends on Blas/Lapack need C or/and Fortran symbols. For now
+ # require both.
+ if self.compiler.f77 is None:
+ raise InstallError(
+ 'OpenBLAS requires both C and Fortran compilers!'
+ )
+ # Add support for OpenMP
+ if '+openmp' in self.spec and self.spec.satisfies('%clang'):
+ # Openblas (as of 0.2.18) hardcoded that OpenMP cannot
+ # be used with any (!) compiler named clang, bummer.
+ raise InstallError(
+ 'OpenBLAS does not support OpenMP with clang!'
+ )
- def install(self, spec, prefix):
+ @property
+ def make_defs(self):
# Configure fails to pick up fortran from FC=/abs/path/to/f77, but
# works fine with FC=/abs/path/to/gfortran.
# When mixing compilers make sure that
# $SPACK_ROOT/lib/spack/env/<compiler> have symlinks with reasonable
# names and hack them inside lib/spack/spack/compilers/<compiler>.py
- make_defs = ['CC=%s' % spack_cc,
- 'FC=%s' % spack_f77,
- 'MAKE_NO_J=1']
- make_targets = ['libs', 'netlib']
-
- # Build shared if variant is set.
- if '+shared' in spec:
- make_targets += ['shared']
- else:
- if '+fpic' in spec:
- make_defs.extend(['CFLAGS=-fPIC', 'FFLAGS=-fPIC'])
+ make_defs = [
+ 'CC={0}'.format(spack_cc),
+ 'FC={0}'.format(spack_f77),
+ 'MAKE_NO_J=1'
+ ]
+ if self.spec.satisfies('%gcc@:4.8.4'):
+ make_defs += ['NO_AVX2=1']
+ if '~shared' in self.spec:
+ if '+pic' in self.spec:
+ make_defs.extend([
+ 'CFLAGS={0}'.format(self.compiler.pic_flag),
+ 'FFLAGS={0}'.format(self.compiler.pic_flag)
+ ])
make_defs += ['NO_SHARED=1']
-
# fix missing _dggsvd_ and _sggsvd_
- if spec.satisfies('@0.2.16'):
+ if self.spec.satisfies('@0.2.16'):
make_defs += ['BUILD_LAPACK_DEPRECATED=1']
-
# Add support for OpenMP
- if '+openmp' in spec:
- # Openblas (as of 0.2.18) hardcoded that OpenMP cannot
- # be used with any (!) compiler named clang, bummer.
- if spec.satisfies('%clang'):
- raise InstallError('OpenBLAS does not support ',
- 'OpenMP with clang!')
-
+ if '+openmp' in self.spec:
make_defs += ['USE_OPENMP=1']
- make_args = make_defs + make_targets
- make(*make_args)
+ return make_defs
- make("tests", *make_defs)
+ @property
+ def build_targets(self):
+ targets = ['libs', 'netlib']
- # no quotes around prefix (spack doesn't use a shell)
- make('install', "PREFIX=%s" % prefix, *make_defs)
+ # Build shared if variant is set.
+ if '+shared' in self.spec:
+ targets += ['shared']
- # Blas virtual package should provide blas.a and libblas.a
- with working_dir(prefix.lib):
- symlink('libopenblas.a', 'blas.a')
- symlink('libopenblas.a', 'libblas.a')
- if '+shared' in spec:
- symlink('libopenblas.%s' % dso_suffix,
- 'libblas.%s' % dso_suffix)
+ return self.make_defs + targets
- # Lapack virtual package should provide liblapack.a
- with working_dir(prefix.lib):
- symlink('libopenblas.a', 'liblapack.a')
- if '+shared' in spec:
- symlink('libopenblas.%s' % dso_suffix,
- 'liblapack.%s' % dso_suffix)
+ @MakefilePackage.sanity_check('build')
+ def check_build(self):
+ make('tests', *self.make_defs)
+ @property
+ def install_targets(self):
+ make_args = [
+ 'install',
+ 'PREFIX={0}'.format(self.prefix),
+ ]
+ return make_args + self.make_defs
+
+ @MakefilePackage.sanity_check('install')
+ def check_install(self):
+ spec = self.spec
# Openblas may pass its own test but still fail to compile Lapack
# symbols. To make sure we get working Blas and Lapack, do a small
# test.
- self.check_install(spec)
-
- def setup_dependent_package(self, module, dspec):
- # This is WIP for a prototype interface for virtual packages.
- # We can update this as more builds start depending on BLAS/LAPACK.
- libdir = find_library_path('libopenblas.a',
- self.prefix.lib64,
- self.prefix.lib)
-
- self.spec.blas_static_lib = join_path(libdir, 'libopenblas.a')
- self.spec.lapack_static_lib = self.spec.blas_static_lib
-
- if '+shared' in self.spec:
- self.spec.blas_shared_lib = join_path(libdir, 'libopenblas.%s' %
- dso_suffix)
- self.spec.lapack_shared_lib = self.spec.blas_shared_lib
-
- def check_install(self, spec):
source_file = join_path(os.path.dirname(self.module.__file__),
'test_cblas_dgemm.c')
blessed_file = join_path(os.path.dirname(self.module.__file__),
'test_cblas_dgemm.output')
include_flags = ["-I%s" % join_path(spec.prefix, "include")]
- link_flags = ["-L%s" % join_path(spec.prefix, "lib"),
- "-llapack",
- "-lblas",
- "-lpthread"]
+ link_flags = self.lapack_libs.ld_flags.split()
+ if self.compiler.name == 'intel':
+ link_flags.extend(["-lifcore"])
+ link_flags.extend(["-lpthread"])
if '+openmp' in spec:
link_flags.extend([self.compiler.openmp_flag])
diff --git a/var/spack/repos/builtin/packages/opencoarrays/package.py b/var/spack/repos/builtin/packages/opencoarrays/package.py
new file mode 100644
index 0000000000..387bc45946
--- /dev/null
+++ b/var/spack/repos/builtin/packages/opencoarrays/package.py
@@ -0,0 +1,49 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class Opencoarrays(CMakePackage):
+ """OpenCoarrays is an open-source software project that produces an
+ application binary interface (ABI) supporting coarray Fortran (CAF)
+ compilers, an application programming interface (API) that supports users
+ of non-CAF compilers, and an associated compiler wrapper and program
+ launcher.
+ """
+
+ homepage = "http://www.opencoarrays.org/"
+ url = "https://github.com/sourceryinstitute/opencoarrays/releases/download/1.7.4/OpenCoarrays-1.7.4.tar.gz"
+
+ version('1.8.0', 'ca78d1507b2a118c75128c6c2e093e27')
+ version('1.7.4', '85ba87def461e3ff5a164de2e6482930')
+ version('1.6.2', '5a4da993794f3e04ea7855a6678981ba')
+
+ depends_on('mpi')
+
+ def cmake_args(self):
+ args = []
+ args.append("-DCMAKE_C_COMPILER=%s" % self.spec['mpi'].mpicc)
+ args.append("-DCMAKE_Fortran_COMPILER=%s" % self.spec['mpi'].mpifc)
+ return args
diff --git a/var/spack/repos/builtin/packages/opencv/package.py b/var/spack/repos/builtin/packages/opencv/package.py
index 989c66316c..8a721032a6 100644
--- a/var/spack/repos/builtin/packages/opencv/package.py
+++ b/var/spack/repos/builtin/packages/opencv/package.py
@@ -23,50 +23,186 @@
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
from spack import *
+from glob import glob
class Opencv(Package):
+ """OpenCV is released under a BSD license and hence it's free for both
+ academic and commercial use. It has C++, C, Python and Java interfaces and
+ supports Windows, Linux, Mac OS, iOS and Android. OpenCV was designed for
+ computational efficiency and with a strong focus on real-time applications.
+ Written in optimized C/C++, the library can take advantage of multi-core
+ processing. Enabled with OpenCL, it can take advantage of the hardware
+ acceleration of the underlying heterogeneous compute platform. Adopted all
+ around the world, OpenCV has more than 47 thousand people of user community
+ and estimated number of downloads exceeding 9 million. Usage ranges from
+ interactive art, to mines inspection, stitching maps on the web or through
+ advanced robotics.
"""
- OpenCV is released under a BSD license and hence it's free for both academic and commercial use. It has C++, C,
- Python and Java interfaces and supports Windows, Linux, Mac OS, iOS and Android. OpenCV was designed for
- computational efficiency and with a strong focus on real-time applications. Written in optimized C/C++, the library
- can take advantage of multi-core processing. Enabled with OpenCL, it can take advantage of the hardware
- acceleration of the underlying heterogeneous compute platform. Adopted all around the world, OpenCV has more than
- 47 thousand people of user community and estimated number of downloads exceeding 9 million. Usage ranges from
- interactive art, to mines inspection, stitching maps on the web or through advanced robotics.
- """
+
homepage = 'http://opencv.org/'
url = 'https://github.com/Itseez/opencv/archive/3.1.0.tar.gz'
version('3.1.0', '70e1dd07f0aa06606f1bc0e3fa15abd3')
- variant('shared', default=True, description='Enables the build of shared libraries')
- variant('debug', default=False, description='Builds a debug version of the libraries')
+ variant('shared', default=True,
+ description='Enables the build of shared libraries')
+ variant('debug', default=False,
+ description='Builds a debug version of the libraries')
variant('eigen', default=True, description='Activates support for eigen')
variant('ipp', default=True, description='Activates support for IPP')
+ variant('jasper', default=True, description='Activates support for JasPer')
+ variant('cuda', default=False, description='Activates support for CUDA')
+ variant('gtk', default=False, description='Activates support for GTK')
+ variant('vtk', default=False, description='Activates support for VTK')
+ variant('qt', default=False, description='Activates support for QT')
+ variant('python', default=False,
+ description='Enables the build of Python extensions')
+ variant('java', default=False,
+ description='Activates support for Java')
+
+ depends_on('cmake', type='build')
+ depends_on('eigen', when='+eigen', type='build')
depends_on('zlib')
depends_on('libpng')
depends_on('libjpeg-turbo')
depends_on('libtiff')
- depends_on('python')
- depends_on('py-numpy')
-
- depends_on('eigen', when='+eigen')
+ depends_on('jasper', when='+jasper')
+ depends_on('cuda', when='+cuda')
+ depends_on('gtkplus', when='+gtk')
+ depends_on('vtk', when='+vtk')
+ depends_on('qt', when='+qt')
+ depends_on('jdk', when='+java')
+ depends_on('py-numpy', when='+python', type=('build', 'run'))
- # FIXME : GUI extensions missing
- # FIXME : CUDA extensions still missing
+ extends('python', when='+python')
def install(self, spec, prefix):
cmake_options = []
cmake_options.extend(std_cmake_args)
- cmake_options.extend(['-DCMAKE_BUILD_TYPE:STRING=%s' % ('Debug' if '+debug' in spec else 'Release'),
- '-DBUILD_SHARED_LIBS:BOOL=%s' % ('ON' if '+shared' in spec else 'OFF'),
- '-DENABLE_PRECOMPILED_HEADERS:BOOL=OFF',
- '-DWITH_IPP:BOOL=%s' % ('ON' if '+ipp' in spec else 'OFF')])
+ cmake_options.extend([
+ '-DCMAKE_BUILD_TYPE:STRING={0}'.format((
+ 'Debug' if '+debug' in spec else 'Release')),
+ '-DBUILD_SHARED_LIBS:BOOL={0}'.format((
+ 'ON' if '+shared' in spec else 'OFF')),
+ '-DENABLE_PRECOMPILED_HEADERS:BOOL=OFF',
+ '-DWITH_IPP:BOOL={0}'.format((
+ 'ON' if '+ipp' in spec else 'OFF')),
+ '-DWITH_CUDA:BOOL={0}'.format((
+ 'ON' if '+cuda' in spec else 'OFF')),
+ '-DWITH_QT:BOOL={0}'.format((
+ 'ON' if '+qt' in spec else 'OFF')),
+ '-DWITH_VTK:BOOL={0}'.format((
+ 'ON' if '+vtk' in spec else 'OFF')),
+ '-DBUILD_opencv_java:BOOL={0}'.format((
+ 'ON' if '+java' in spec else 'OFF')),
+ ])
+
+ # Media I/O
+ zlib = spec['zlib']
+ cmake_options.extend([
+ '-DZLIB_LIBRARY_{0}:FILEPATH={1}'.format((
+ 'DEBUG' if '+debug' in spec else 'RELEASE'),
+ join_path(zlib.prefix.lib,
+ 'libz.{0}'.format(dso_suffix))),
+ '-DZLIB_INCLUDE_DIR:PATH={0}'.format(zlib.prefix.include)
+ ])
+
+ libpng = spec['libpng']
+ cmake_options.extend([
+ '-DPNG_LIBRARY_{0}:FILEPATH={1}'.format((
+ 'DEBUG' if '+debug' in spec else 'RELEASE'),
+ join_path(libpng.prefix.lib,
+ 'libpng.{0}'.format(dso_suffix))),
+ '-DPNG_INCLUDE_DIR:PATH={0}'.format(libpng.prefix.include)
+ ])
+
+ libjpeg = spec['libjpeg-turbo']
+ cmake_options.extend([
+ '-DJPEG_LIBRARY:FILEPATH={0}'.format(
+ join_path(libjpeg.prefix.lib,
+ 'libjpeg.{0}'.format(dso_suffix))),
+ '-DJPEG_INCLUDE_DIR:PATH={0}'.format(libjpeg.prefix.include)
+ ])
+
+ libtiff = spec['libtiff']
+ cmake_options.extend([
+ '-DTIFF_LIBRARY_{0}:FILEPATH={1}'.format((
+ 'DEBUG' if '+debug' in spec else 'RELEASE'),
+ join_path(libtiff.prefix.lib,
+ 'libtiff.{0}'.format(dso_suffix))),
+ '-DTIFF_INCLUDE_DIR:PATH={0}'.format(libtiff.prefix.include)
+ ])
+
+ jasper = spec['jasper']
+ cmake_options.extend([
+ '-DJASPER_LIBRARY_{0}:FILEPATH={1}'.format((
+ 'DEBUG' if '+debug' in spec else 'RELEASE'),
+ join_path(jasper.prefix.lib,
+ 'libjasper.{0}'.format(dso_suffix))),
+ '-DJASPER_INCLUDE_DIR:PATH={0}'.format(jasper.prefix.include)
+ ])
+
+ # GUI
+ if '+gtk' not in spec:
+ cmake_options.extend([
+ '-DWITH_GTK:BOOL=OFF',
+ '-DWITH_GTK_2_X:BOOL=OFF'
+ ])
+ elif '^gtkplus@3:' in spec:
+ cmake_options.extend([
+ '-DWITH_GTK:BOOL=ON',
+ '-DWITH_GTK_2_X:BOOL=OFF'
+ ])
+ elif '^gtkplus@2:3' in spec:
+ cmake_options.extend([
+ '-DWITH_GTK:BOOL=OFF',
+ '-DWITH_GTK_2_X:BOOL=ON'
+ ])
+
+ # Python
+ if '+python' in spec:
+ python = spec['python']
+
+ try:
+ python_lib = glob(join_path(
+ python.prefix.lib, 'libpython*.{0}'.format(dso_suffix)))[0]
+ except KeyError:
+ raise InstallError('Cannot find libpython')
+
+ try:
+ python_include_dir = glob(join_path(python.prefix.include,
+ 'python*'))[0]
+ except KeyError:
+ raise InstallError('Cannot find python include directory')
+
+ if '^python@3:' in spec:
+ python_exe = join_path(python.prefix.bin, 'python3')
+ cmake_options.extend([
+ '-DBUILD_opencv_python3=ON',
+ '-DPYTHON3_EXECUTABLE={0}'.format(python_exe),
+ '-DPYTHON3_LIBRARY={0}'.format(python_lib),
+ '-DPYTHON3_INCLUDE_DIR={0}'.format(python_include_dir),
+ '-DBUILD_opencv_python2=OFF',
+ ])
+ elif '^python@2:3' in spec:
+ python_exe = join_path(python.prefix.bin, 'python2')
+ cmake_options.extend([
+ '-DBUILD_opencv_python2=ON',
+ '-DPYTHON2_EXECUTABLE={0}'.format(python_exe),
+ '-DPYTHON2_LIBRARY={0}'.format(python_lib),
+ '-DPYTHON2_INCLUDE_DIR={0}'.format(python_include_dir),
+ '-DBUILD_opencv_python3=OFF',
+ ])
+ else:
+ cmake_options.extend([
+ '-DBUILD_opencv_python2=OFF',
+ '-DBUILD_opencv_python3=OFF'
+ ])
with working_dir('spack_build', create=True):
cmake('..', *cmake_options)
diff --git a/var/spack/repos/builtin/packages/openexr/package.py b/var/spack/repos/builtin/packages/openexr/package.py
new file mode 100644
index 0000000000..3619bd063c
--- /dev/null
+++ b/var/spack/repos/builtin/packages/openexr/package.py
@@ -0,0 +1,54 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class Openexr(Package):
+ """OpenEXR Graphics Tools (high dynamic-range image file format)"""
+
+ homepage = "http://www.openexr.com/"
+ url = "https://savannah.nongnu.org/download/openexr/openexr-2.2.0.tar.gz"
+
+ version('2.2.0', 'b64e931c82aa3790329c21418373db4e')
+ version('2.1.0', '33735d37d2ee01c6d8fbd0df94fb8b43')
+ version('2.0.1', '4387e6050d2faa65dd5215618ff2ddce')
+ version('1.7.0', '27113284f7d26a58f853c346e0851d7a')
+ version('1.6.1', '11951f164f9c872b183df75e66de145a')
+ version('1.5.0', '55342d2256ab3ae99da16f16b2e12ce9')
+ version('1.4.0a', 'd0a4b9a930c766fa51561b05fb204afe')
+ version('1.3.2', '1522fe69135016c52eb88fc7d8514409')
+
+ variant('debug', default=False,
+ description='Builds a debug version of the libraries')
+
+ depends_on('pkg-config', type='build')
+ depends_on('ilmbase')
+
+ def install(self, spec, prefix):
+ configure_options = ['--prefix={0}'.format(prefix)]
+ if '+debug' not in spec:
+ configure_options.append('--disable-debug')
+ configure(*configure_options)
+ make('install')
diff --git a/var/spack/repos/builtin/packages/openjpeg/package.py b/var/spack/repos/builtin/packages/openjpeg/package.py
index 9b2063593a..9790c52e7d 100644
--- a/var/spack/repos/builtin/packages/openjpeg/package.py
+++ b/var/spack/repos/builtin/packages/openjpeg/package.py
@@ -24,27 +24,26 @@
##############################################################################
from spack import *
-class Openjpeg(Package):
- """
- OpenJPEG is an open-source JPEG 2000 codec written in C language.
+
+class Openjpeg(CMakePackage):
+ """OpenJPEG is an open-source JPEG 2000 codec written in C language.
+
It has been developed in order to promote the use of JPEG 2000, a
still-image compression standard from the Joint Photographic
Experts Group (JPEG).
Since April 2015, it is officially recognized by ISO/IEC and
ITU-T as a JPEG 2000 Reference Software.
"""
+
homepage = "https://github.com/uclouvain/openjpeg"
- url = "https://github.com/uclouvain/openjpeg/archive/version.2.1.tar.gz"
+ url = "https://github.com/uclouvain/openjpeg/archive/version.2.1.tar.gz"
- version('2.1' , '3e1c451c087f8462955426da38aa3b3d')
+ version('2.1', '3e1c451c087f8462955426da38aa3b3d')
version('2.0.1', '105876ed43ff7dbb2f90b41b5a43cfa5')
- version('2.0' , 'cdf266530fee8af87454f15feb619609')
+ version('2.0', 'cdf266530fee8af87454f15feb619609')
version('1.5.2', '545f98923430369a6b046ef3632ef95c')
version('1.5.1', 'd774e4b5a0db5f0f171c4fc0aabfa14e')
-
- def install(self, spec, prefix):
- cmake('.', *std_cmake_args)
-
- make()
- make("install")
+ def url_for_version(self, version):
+ fmt = 'https://github.com/uclouvain/openjpeg/archive/version.{0}.tar.gz'
+ return fmt.format(version.dotted)
diff --git a/var/spack/repos/builtin/packages/openmpi/package.py b/var/spack/repos/builtin/packages/openmpi/package.py
index 163990bf15..754cc8d93e 100644
--- a/var/spack/repos/builtin/packages/openmpi/package.py
+++ b/var/spack/repos/builtin/packages/openmpi/package.py
@@ -22,18 +22,37 @@
# License along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
-import os
-
from spack import *
+import os
-class Openmpi(Package):
- """Open MPI is a project combining technologies and resources from
- several other projects (FT-MPI, LA-MPI, LAM/MPI, and PACX-MPI)
- in order to build the best MPI library available. A completely
- new MPI-2 compliant implementation, Open MPI offers advantages
- for system and software vendors, application developers and
- computer science researchers.
+def _verbs_dir():
+ """Try to find the directory where the OpenFabrics verbs package is
+ installed. Return None if not found."""
+ try:
+ # Try to locate Verbs by looking for a utility in the path
+ ibv_devices = which("ibv_devices")
+ # Run it (silently) to ensure it works
+ ibv_devices(output=str, error=str)
+ # Get path to executable
+ path = ibv_devices.exe[0]
+ # Remove executable name and "bin" directory
+ path = os.path.dirname(path)
+ path = os.path.dirname(path)
+ return path
+ except:
+ return None
+
+
+class Openmpi(AutotoolsPackage):
+ """The Open MPI Project is an open source Message Passing Interface
+ implementation that is developed and maintained by a consortium
+ of academic, research, and industry partners. Open MPI is
+ therefore able to combine the expertise, technologies, and
+ resources from all across the High Performance Computing
+ community in order to build the best MPI library available.
+ Open MPI offers advantages for system and software vendors,
+ application developers and computer science researchers.
"""
homepage = "http://www.open-mpi.org"
@@ -41,6 +60,9 @@ class Openmpi(Package):
list_url = "http://www.open-mpi.org/software/ompi/"
list_depth = 3
+ version('2.0.1', '6f78155bd7203039d2448390f3b51c96')
+ version('2.0.0', 'cdacc800cb4ce690c1f1273cb6366674')
+ version('1.10.3', 'e2fe4513200e2aaa1500b762342c674b')
version('1.10.2', 'b2f43d9635d2d52826e5ef9feb97fd4c')
version('1.10.1', 'f0fcd77ed345b7eafb431968124ba16e')
version('1.10.0', '280cf952de68369cebaca886c5ce0304')
@@ -51,89 +73,170 @@ class Openmpi(Package):
patch('llnl-platforms.patch', when="@1.6.5")
patch('configure.patch', when="@1.10.0:1.10.1")
- variant('psm', default=False, description='Build support for the PSM library.')
- variant('psm2', default=False, description='Build support for the Intel PSM2 library.')
- variant('pmi', default=False, description='Build support for PMI-based launchers')
- variant('verbs', default=False, description='Build support for OpenFabrics verbs.')
+ # Fabrics
+ variant('psm', default=False, description='Build support for the PSM library')
+ variant('psm2', default=False,
+ description='Build support for the Intel PSM2 library')
+ variant('pmi', default=False,
+ description='Build support for PMI-based launchers')
+ variant('verbs', default=_verbs_dir() is not None,
+ description='Build support for OpenFabrics verbs')
variant('mxm', default=False, description='Build Mellanox Messaging support')
- variant('thread_multiple', default=False, description='Enable MPI_THREAD_MULTIPLE support')
-
- # TODO : variant support for alps, loadleveler is missing
- variant('tm', default=False, description='Build TM (Torque, PBSPro, and compatible) support')
- variant('slurm', default=False, description='Build SLURM scheduler component')
+ # Schedulers
+ # TODO: support for alps and loadleveler is missing
+ variant('tm', default=False,
+ description='Build TM (Torque, PBSPro, and compatible) support')
+ variant('slurm', default=False,
+ description='Build SLURM scheduler component')
- variant('sqlite3', default=False, description='Build sqlite3 support')
+ # Additional support options
+ variant('java', default=False, description='Build Java support')
+ variant('sqlite3', default=False, description='Build SQLite3 support')
+ variant('vt', default=True, description='Build VampirTrace support')
+ variant('thread_multiple', default=False,
+ description='Enable MPI_THREAD_MULTIPLE support')
- # TODO : support for CUDA is missing
+ # TODO: support for CUDA is missing
provides('mpi@:2.2', when='@1.6.5')
provides('mpi@:3.0', when='@1.7.5:')
+ provides('mpi@:3.1', when='@2.0.0:')
depends_on('hwloc')
+ depends_on('jdk', when='+java')
depends_on('sqlite', when='+sqlite3')
def url_for_version(self, version):
- return "http://www.open-mpi.org/software/ompi/v%s/downloads/openmpi-%s.tar.bz2" % (version.up_to(2), version)
-
+ return "http://www.open-mpi.org/software/ompi/v%s/downloads/openmpi-%s.tar.bz2" % (
+ version.up_to(2), version)
def setup_dependent_environment(self, spack_env, run_env, dependent_spec):
+ spack_env.set('MPICC', join_path(self.prefix.bin, 'mpicc'))
+ spack_env.set('MPICXX', join_path(self.prefix.bin, 'mpic++'))
+ spack_env.set('MPIF77', join_path(self.prefix.bin, 'mpif77'))
+ spack_env.set('MPIF90', join_path(self.prefix.bin, 'mpif90'))
+
spack_env.set('OMPI_CC', spack_cc)
spack_env.set('OMPI_CXX', spack_cxx)
spack_env.set('OMPI_FC', spack_fc)
spack_env.set('OMPI_F77', spack_f77)
def setup_dependent_package(self, module, dep_spec):
- self.spec.mpicc = join_path(self.prefix.bin, 'mpicc')
+ self.spec.mpicc = join_path(self.prefix.bin, 'mpicc')
self.spec.mpicxx = join_path(self.prefix.bin, 'mpic++')
- self.spec.mpifc = join_path(self.prefix.bin, 'mpif90')
+ self.spec.mpifc = join_path(self.prefix.bin, 'mpif90')
self.spec.mpif77 = join_path(self.prefix.bin, 'mpif77')
+ self.spec.mpicxx_shared_libs = [
+ join_path(self.prefix.lib, 'libmpi_cxx.{0}'.format(dso_suffix)),
+ join_path(self.prefix.lib, 'libmpi.{0}'.format(dso_suffix))
+ ]
@property
def verbs(self):
- # Up through version 1.6, this option was previously named --with-openib
+ # Up through version 1.6, this option was previously named
+ # --with-openib
if self.spec.satisfies('@:1.6'):
return 'openib'
# In version 1.7, it was renamed to be --with-verbs
elif self.spec.satisfies('@1.7:'):
return 'verbs'
- def install(self, spec, prefix):
- config_args = ["--prefix=%s" % prefix,
- "--with-hwloc=%s" % spec['hwloc'].prefix,
- "--enable-shared",
- "--enable-static"]
- # Variant based arguments
- config_args.extend([
+ @AutotoolsPackage.precondition('autoreconf')
+ def die_without_fortran(self):
+ # Until we can pass variants such as +fortran through virtual
+ # dependencies depends_on('mpi'), require Fortran compiler to
+ # avoid delayed build errors in dependents.
+ if (self.compiler.f77 is None) or (self.compiler.fc is None):
+ raise InstallError(
+ 'OpenMPI requires both C and Fortran compilers!'
+ )
+
+ def configure_args(self):
+ spec = self.spec
+
+ config_args = [
+ '--enable-shared',
+ '--enable-static',
+ '--enable-mpi-cxx',
# Schedulers
'--with-tm' if '+tm' in spec else '--without-tm',
'--with-slurm' if '+slurm' in spec else '--without-slurm',
# Fabrics
'--with-psm' if '+psm' in spec else '--without-psm',
- '--with-psm2' if '+psm2' in spec else '--without-psm2',
- ('--with-%s' % self.verbs) if '+verbs' in spec else ('--without-%s' % self.verbs),
- '--with-mxm' if '+mxm' in spec else '--without-mxm',
- # Other options
- '--enable-mpi-thread-multiple' if '+thread_multiple' in spec else '--disable-mpi-thread-multiple',
- '--with-pmi' if '+pmi' in spec else '--without-pmi',
- '--with-sqlite3' if '+sqlite3' in spec else '--without-sqlite3'
- ])
-
- # TODO: use variants for this, e.g. +lanl, +llnl, etc.
- # use this for LANL builds, but for LLNL builds, we need:
- # "--with-platform=contrib/platform/llnl/optimized"
- if self.version == ver("1.6.5") and '+lanl' in spec:
- config_args.append("--with-platform=contrib/platform/lanl/tlcc2/optimized-nopanasas")
-
- if not self.compiler.f77 and not self.compiler.fc:
- config_args.append("--enable-mpi-fortran=no")
-
- configure(*config_args)
- make()
- make("install")
-
- self.filter_compilers()
-
+ ]
+
+ # Intel PSM2 support
+ if spec.satisfies('@1.10:'):
+ if '+psm2' in spec:
+ config_args.append('--with-psm2')
+ else:
+ config_args.append('--without-psm2')
+
+ # PMI support
+ if spec.satisfies('@1.5.5:'):
+ if '+pmi' in spec:
+ config_args.append('--with-pmi')
+ else:
+ config_args.append('--without-pmi')
+
+ # Mellanox Messaging support
+ if spec.satisfies('@1.5.4:'):
+ if '+mxm' in spec:
+ config_args.append('--with-mxm')
+ else:
+ config_args.append('--without-mxm')
+
+ # OpenFabrics verbs support
+ if '+verbs' in spec:
+ path = _verbs_dir()
+ if path is not None and path not in ('/usr', '/usr/local'):
+ config_args.append('--with-{0}={1}'.format(self.verbs, path))
+ else:
+ config_args.append('--with-{0}'.format(self.verbs))
+ else:
+ config_args.append('--without-{0}'.format(self.verbs))
+
+ # Hwloc support
+ if spec.satisfies('@1.5.2:'):
+ config_args.append('--with-hwloc={0}'.format(spec['hwloc'].prefix))
+
+ # Java support
+ if spec.satisfies('@1.7.4:'):
+ if '+java' in spec:
+ config_args.extend([
+ '--enable-java',
+ '--enable-mpi-java',
+ '--with-jdk-dir={0}'.format(spec['jdk'].prefix)
+ ])
+ else:
+ config_args.extend([
+ '--disable-java',
+ '--disable-mpi-java'
+ ])
+
+ # SQLite3 support
+ if spec.satisfies('@1.7.3:1.999'):
+ if '+sqlite3' in spec:
+ config_args.append('--with-sqlite3')
+ else:
+ config_args.append('--without-sqlite3')
+
+ # VampirTrace support
+ if spec.satisfies('@1.3:1.999'):
+ if '+vt' not in spec:
+ config_args.append('--enable-contrib-no-build=vt')
+
+ # Multithreading support
+ if spec.satisfies('@1.5.4:'):
+ if '+thread_multiple' in spec:
+ config_args.append('--enable-mpi-thread-multiple')
+ else:
+ config_args.append('--disable-mpi-thread-multiple')
+
+ return config_args
+
+ @AutotoolsPackage.sanity_check('install')
def filter_compilers(self):
"""Run after install to make the MPI compilers use the
compilers that Spack built the package with.
@@ -143,40 +246,33 @@ class Openmpi(Package):
be bound to whatever compiler they were built with.
"""
kwargs = {'ignore_absent': True, 'backup': False, 'string': False}
- dir = os.path.join(self.prefix, 'share/openmpi/')
-
- cc_wrappers = ['mpicc-vt-wrapper-data.txt', 'mpicc-wrapper-data.txt',
- 'ortecc-wrapper-data.txt', 'shmemcc-wrapper-data.txt']
-
- cxx_wrappers = ['mpic++-vt-wrapper-data.txt', 'mpic++-wrapper-data.txt',
- 'ortec++-wrapper-data.txt']
-
- fc_wrappers = ['mpifort-vt-wrapper-data.txt',
- 'mpifort-wrapper-data.txt', 'shmemfort-wrapper-data.txt']
-
- for wrapper in cc_wrappers:
- filter_file('compiler=.*', 'compiler=%s' % self.compiler.cc,
- os.path.join(dir, wrapper), **kwargs)
-
- for wrapper in cxx_wrappers:
- filter_file('compiler=.*', 'compiler=%s' % self.compiler.cxx,
- os.path.join(dir, wrapper), **kwargs)
-
- for wrapper in fc_wrappers:
- filter_file('compiler=.*', 'compiler=%s' % self.compiler.fc,
- os.path.join(dir, wrapper), **kwargs)
-
- # These are symlinks in newer versions, so check that here
- f77_wrappers = ['mpif77-vt-wrapper-data.txt', 'mpif77-wrapper-data.txt']
- f90_wrappers = ['mpif90-vt-wrapper-data.txt', 'mpif90-wrapper-data.txt']
-
- for wrapper in f77_wrappers:
- path = os.path.join(dir, wrapper)
- if not os.path.islink(path):
- filter_file('compiler=.*', 'compiler=%s' % self.compiler.f77,
- path, **kwargs)
- for wrapper in f90_wrappers:
- path = os.path.join(dir, wrapper)
- if not os.path.islink(path):
- filter_file('compiler=.*', 'compiler=%s' % self.compiler.fc,
- path, **kwargs)
+ wrapper_basepath = join_path(self.prefix, 'share', 'openmpi')
+
+ wrappers = [
+ ('mpicc-vt-wrapper-data.txt', self.compiler.cc),
+ ('mpicc-wrapper-data.txt', self.compiler.cc),
+ ('ortecc-wrapper-data.txt', self.compiler.cc),
+ ('shmemcc-wrapper-data.txt', self.compiler.cc),
+ ('mpic++-vt-wrapper-data.txt', self.compiler.cxx),
+ ('mpic++-wrapper-data.txt', self.compiler.cxx),
+ ('ortec++-wrapper-data.txt', self.compiler.cxx),
+ ('mpifort-vt-wrapper-data.txt', self.compiler.fc),
+ ('mpifort-wrapper-data.txt', self.compiler.fc),
+ ('shmemfort-wrapper-data.txt', self.compiler.fc),
+ ('mpif90-vt-wrapper-data.txt', self.compiler.fc),
+ ('mpif90-wrapper-data.txt', self.compiler.fc),
+ ('mpif77-vt-wrapper-data.txt', self.compiler.f77),
+ ('mpif77-wrapper-data.txt', self.compiler.f77)
+ ]
+
+ for wrapper_name, compiler in wrappers:
+ wrapper = join_path(wrapper_basepath, wrapper_name)
+ if not os.path.islink(wrapper):
+ # Substitute Spack compile wrappers for the real
+ # underlying compiler
+ match = 'compiler=.*'
+ substitute = 'compiler={compiler}'.format(compiler=compiler)
+ filter_file(match, substitute, wrapper, **kwargs)
+ # Remove this linking flag if present
+ # (it turns RPATH into RUNPATH)
+ filter_file('-Wl,--enable-new-dtags', '', wrapper, **kwargs)
diff --git a/var/spack/repos/builtin/packages/openscenegraph/package.py b/var/spack/repos/builtin/packages/openscenegraph/package.py
new file mode 100644
index 0000000000..565941ff0e
--- /dev/null
+++ b/var/spack/repos/builtin/packages/openscenegraph/package.py
@@ -0,0 +1,77 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+
+from spack import *
+
+
+class Openscenegraph(Package):
+ """OpenSceneGraph is an open source, high performance 3D graphics toolkit
+ that's used in a variety of visual simulation applications."""
+
+ homepage = "http://www.openscenegraph.org"
+ url = "http://trac.openscenegraph.org/downloads/developer_releases/OpenSceneGraph-3.2.3.zip"
+
+ version('3.2.3', '02ffdad7744c747d8fad0d7babb58427')
+ version('3.1.5', '1c90b851b109849c985006486ef59822')
+
+ variant('debug', default=False, description='Builds a debug version of the library')
+ variant('shared', default=True, description='Builds a shared version of the library')
+
+ depends_on('cmake@2.8.7:', type='build')
+ depends_on('qt@4:')
+ depends_on('zlib')
+
+ def install(self, spec, prefix):
+ build_type = 'Debug' if '+debug' in spec else 'Release'
+ shared_status = 'ON' if '+shared' in spec else 'OFF'
+
+ cmake_args = std_cmake_args[:]
+ cmake_args.extend([
+ '-DCMAKE_BUILD_TYPE={0}'.format(build_type),
+ '-DDYNAMIC_OPENSCENEGRAPH={0}'.format(shared_status),
+ '-DDYNAMIC_OPENTHREADS={0}'.format(shared_status),
+ ])
+
+ # NOTE: This is necessary in order to allow OpenSceneGraph to compile
+ # despite containing a number of implicit bool to int conversions.
+ if spec.satisfies('%gcc'):
+ cmake_args.extend([
+ '-DCMAKE_C_FLAGS=-fpermissive',
+ '-DCMAKE_CXX_FLAGS=-fpermissive',
+ ])
+
+ with working_dir('spack-build', create=True):
+ cmake(
+ '..',
+ '-DZLIB_INCLUDE_DIR={0}'.format(spec['zlib'].prefix.include),
+ '-DZLIB_LIBRARY={0}/libz.{1}'.format(spec['zlib'].prefix.lib,
+ dso_suffix),
+ '-DBUILD_OSG_APPLICATIONS=OFF',
+ '-DOSG_NOTIFY_DISABLED=ON',
+ '-DLIB_POSTFIX=',
+ *cmake_args
+ )
+ make()
+ make('install')
diff --git a/var/spack/repos/builtin/packages/openspeedshop/package.py b/var/spack/repos/builtin/packages/openspeedshop/package.py
index c501ea063c..2e908b4099 100644
--- a/var/spack/repos/builtin/packages/openspeedshop/package.py
+++ b/var/spack/repos/builtin/packages/openspeedshop/package.py
@@ -22,75 +22,105 @@
# License along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
-################################################################################
+##############################################################################
# Copyright (c) 2015-2016 Krell Institute. All Rights Reserved.
#
-# This program is free software; you can redistribute it and/or modify it under
-# the terms of the GNU General Public License as published by the Free Software
-# Foundation; either version 2 of the License, or (at your option) any later
-# version.
+# This program is free software; you can redistribute it and/or modify it
+# under the terms of the GNU General Public License as published by the Free
+# Software Foundation; either version 2 of the License, or (at your option)
+# any later version.
#
# This program is distributed in the hope that it will be useful, but WITHOUT
-# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
-# FOR A PARTICULAR PURPOSE. See the GNU General Public License for more
-# details.
+# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
+# FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for
+# more details.
#
# You should have received a copy of the GNU General Public License along with
# this program; if not, write to the Free Software Foundation, Inc., 59 Temple
# Place, Suite 330, Boston, MA 02111-1307 USA
-################################################################################
+##############################################################################
from spack import *
+import os
+import os.path
+
class Openspeedshop(Package):
- """OpenSpeedShop is a community effort by The Krell Institute with current direct funding from DOEs NNSA.
- It builds on top of a broad list of community infrastructures, most notably Dyninst and MRNet from UW,
- libmonitor from Rice, and PAPI from UTK. OpenSpeedShop is an open source multi platform Linux performance
- tool which is targeted to support performance analysis of applications running on both single node and
- large scale IA64, IA32, EM64T, AMD64, PPC, ARM, Blue Gene and Cray platforms. OpenSpeedShop development
- is hosted by the Krell Institute. The infrastructure and base components of OpenSpeedShop are released
- as open source code primarily under LGPL.
+ """OpenSpeedShop is a community effort by The Krell Institute with
+ current direct funding from DOEs NNSA. It builds on top of a
+ broad list of community infrastructures, most notably Dyninst
+ and MRNet from UW, libmonitor from Rice, and PAPI from UTK.
+ OpenSpeedShop is an open source multi platform Linux performance
+ tool which is targeted to support performance analysis of
+ applications running on both single node and large scale IA64,
+ IA32, EM64T, AMD64, PPC, ARM, Power8, Intel Phi, Blue Gene and
+ Cray platforms. OpenSpeedShop development is hosted by the Krell
+ Institute. The infrastructure and base components of OpenSpeedShop
+ are released as open source code primarily under LGPL.
"""
homepage = "http://www.openspeedshop.org"
- url = "https://github.com/OpenSpeedShop"
+ url = "https://github.com/OpenSpeedShop"
version('2.2', '16cb051179c2038de4e8a845edf1d573')
# Use when the git repository is available
- version('2.2', branch='master', git='https://github.com/OpenSpeedShop/openspeedshop.git')
+ version('2.3', branch='master',
+ git='https://github.com/OpenSpeedShop/openspeedshop.git')
# Optional mirror template
- #url = "file:/home/jeg/OpenSpeedShop_ROOT/SOURCES/openspeedshop-2.2.tar.gz"
- #version('2.2', '643337740dc6c2faca60f42d3620b0e1')
+ # url = "file:/home/jeg/OpenSpeedShop_ROOT/SOURCES/openspeedshop-2.3.tar.gz"
+ # version('2.3', '517a7798507241ad8abd8b0626a4d2cf')
parallel = False
- variant('offline', default=True, description="build with offline instrumentor enabled.")
- variant('cbtf', default=False, description="build with cbtf instrumentor enabled.")
- variant('runtime', default=False, description="build only the runtime libraries and collectors.")
- variant('frontend', default=False, description="build only the front-end tool using the runtime_dir to point to the target build.")
- variant('cuda', default=False, description="build with cuda packages included.")
- variant('ptgf', default=False, description="build with the PTGF based gui package enabled.")
- variant('rtfe', default=False, description="build for generic cluster platforms that have different processors on the fe and be nodes.")
+ variant('offline', default=False,
+ description="build with offline instrumentor enabled.")
+ variant('cbtf', default=True,
+ description="build with cbtf instrumentor enabled.")
+ variant('runtime', default=False,
+ description="build only the runtime libraries and collectors.")
+ variant('frontend', default=False,
+ description="build only the FE tool using the runtime_dir \
+ to point to target build.")
+ variant('cuda', default=False,
+ description="build with cuda packages included.")
+ variant('ptgf', default=False,
+ description="build with the PTGF based gui package enabled.")
+ variant('rtfe', default=False,
+ description="build for clusters heterogeneous processors \
+ on fe/be nodes.")
# MPI variants
- variant('openmpi', default=False, description="Build mpi experiment collector for openmpi MPI when this variant is enabled.")
- variant('mpt', default=False, description="Build mpi experiment collector for SGI MPT MPI when this variant is enabled.")
- variant('mvapich2', default=False, description="Build mpi experiment collector for mvapich2 MPI when this variant is enabled.")
- variant('mvapich', default=False, description="Build mpi experiment collector for mvapich MPI when this variant is enabled.")
- variant('mpich2', default=False, description="Build mpi experiment collector for mpich2 MPI when this variant is enabled.")
- variant('mpich', default=False, description="Build mpi experiment collector for mpich MPI when this variant is enabled.")
-
- depends_on("cmake@3.0.2")
- # Dependencies for openspeedshop that are common to all the variants of the OpenSpeedShop build
- depends_on("bison")
- depends_on("flex")
- depends_on("binutils@2.24+krellpatch")
+ variant('openmpi', default=False,
+ description="Build mpi collector for openmpi \
+ MPI when variant is enabled.")
+ variant('mpt', default=False,
+ description="Build mpi collector for SGI \
+ MPT MPI when variant is enabled.")
+ variant('mvapich2', default=False,
+ description="Build mpi collector for mvapich2\
+ MPI when variant is enabled.")
+ variant('mvapich', default=False,
+ description="Build mpi collector for mvapich\
+ MPI when variant is enabled.")
+ variant('mpich2', default=False,
+ description="Build mpi collector for mpich2\
+ MPI when variant is enabled.")
+ variant('mpich', default=False,
+ description="Build mpi collector for mpich\
+ MPI when variant is enabled.")
+
+ depends_on("cmake@3.0.2:", type='build')
+ # Dependencies for openspeedshop that are common to all
+ # the variants of the OpenSpeedShop build
+ depends_on("bison", type='build')
+ depends_on("flex", type='build')
+ depends_on("binutils@2.24+krellpatch", type='build')
depends_on("libelf")
depends_on("libdwarf")
depends_on("sqlite")
- depends_on("boost@1.50.0:")
- depends_on("dyninst@9.1.0")
- depends_on("python")
+ depends_on("boost@1.50.0:1.59.0")
+ depends_on("dyninst@9.2.0")
+ depends_on("libxml2+python")
depends_on("qt@3.3.8b+krellpatch")
# Dependencies only for the openspeedshop offline package.
@@ -108,14 +138,16 @@ class Openspeedshop(Package):
depends_on("cbtf", when='+cbtf')
depends_on("cbtf-krell", when='+cbtf')
depends_on("cbtf-argonavis", when='+cbtf+cuda')
- depends_on("mrnet@5.0.1:+lwthreads+krellpatch", when='+cbtf')
+ depends_on("mrnet@5.0.1:+lwthreads", when='+cbtf')
def adjustBuildTypeParams_cmakeOptions(self, spec, cmakeOptions):
- # Sets build type parameters into cmakeOptions the options that will enable the cbtf-krell built type settings
+ # Sets build type parameters into cmakeOptions the
+ # options that will enable the cbtf-krell built type settings
- compile_flags="-O2 -g"
+ compile_flags = "-O2 -g"
BuildTypeOptions = []
- # Set CMAKE_BUILD_TYPE to what cbtf-krell wants it to be, not the stdcmakeargs
+ # Set CMAKE_BUILD_TYPE to what cbtf-krell wants it
+ # to be, not the stdcmakeargs
for word in cmakeOptions[:]:
if word.startswith('-DCMAKE_BUILD_TYPE'):
cmakeOptions.remove(word)
@@ -123,86 +155,150 @@ class Openspeedshop(Package):
cmakeOptions.remove(word)
if word.startswith('-DCMAKE_C_FLAGS'):
cmakeOptions.remove(word)
- BuildTypeOptions.extend([
- '-DCMAKE_BUILD_TYPE=None',
- '-DCMAKE_CXX_FLAGS=%s' % compile_flags,
- '-DCMAKE_C_FLAGS=%s' % compile_flags
- ])
+ BuildTypeOptions.extend(['-DCMAKE_BUILD_TYPE=None',
+ '-DCMAKE_CXX_FLAGS=%s' % compile_flags,
+ '-DCMAKE_C_FLAGS=%s' % compile_flags])
cmakeOptions.extend(BuildTypeOptions)
+ def set_defaultbase_cmakeOptions(self, spec, cmakeOptions):
+ # Appends to cmakeOptions the options that will enable
+ # the appropriate base level options to the openspeedshop
+ # cmake build.
+ python_vers = format(spec['python'].version.up_to(2))
+ python_pv = '/python' + python_vers
+ python_pvs = '/libpython' + python_vers + '.' + format(dso_suffix)
+
+ BaseOptions = []
+
+ BaseOptions.append('-DBINUTILS_DIR=%s' % spec['binutils'].prefix)
+ BaseOptions.append('-DLIBELF_DIR=%s' % spec['libelf'].prefix)
+ BaseOptions.append('-DLIBDWARF_DIR=%s' % spec['libdwarf'].prefix)
+ BaseOptions.append(
+ '-DPYTHON_EXECUTABLE=%s'
+ % join_path(spec['python'].prefix + '/bin/python'))
+ BaseOptions.append(
+ '-DPYTHON_INCLUDE_DIR=%s'
+ % join_path(spec['python'].prefix.include) + python_pv)
+ BaseOptions.append(
+ '-DPYTHON_LIBRARY=%s'
+ % join_path(spec['python'].prefix.lib) + python_pvs)
+ BaseOptions.append('-DBoost_NO_SYSTEM_PATHS=TRUE')
+ BaseOptions.append('-DBoost_NO_BOOST_CMAKE=TRUE')
+ BaseOptions.append('-DBOOST_ROOT=%s' % spec['boost'].prefix)
+ BaseOptions.append('-DBoost_DIR=%s' % spec['boost'].prefix)
+ BaseOptions.append('-DBOOST_LIBRARYDIR=%s' % spec['boost'].prefix.lib)
+ BaseOptions.append('-DDYNINST_DIR=%s' % spec['dyninst'].prefix)
+
+ cmakeOptions.extend(BaseOptions)
+
def set_mpi_cmakeOptions(self, spec, cmakeOptions):
- # Appends to cmakeOptions the options that will enable the appropriate MPI implementations
-
+ # Appends to cmakeOptions the options that will enable
+ # the appropriate MPI implementations
+
MPIOptions = []
# openmpi
if '+openmpi' in spec:
- MPIOptions.extend([
- '-DOPENMPI_DIR=%s' % spec['openmpi'].prefix
- ])
+ MPIOptions.append('-DOPENMPI_DIR=%s' % spec['openmpi'].prefix)
# mpich
if '+mpich' in spec:
- MPIOptions.extend([
- '-DMPICH_DIR=%s' % spec['mpich'].prefix
- ])
+ MPIOptions.append('-DMPICH_DIR=%s' % spec['mpich'].prefix)
# mpich2
if '+mpich2' in spec:
- MPIOptions.extend([
- '-DMPICH2_DIR=%s' % spec['mpich2'].prefix
- ])
+ MPIOptions.append('-DMPICH2_DIR=%s' % spec['mpich2'].prefix)
# mvapich
if '+mvapich' in spec:
- MPIOptions.extend([
- '-DMVAPICH_DIR=%s' % spec['mvapich'].prefix
- ])
+ MPIOptions.append('-DMVAPICH_DIR=%s' % spec['mvapich'].prefix)
# mvapich2
if '+mvapich2' in spec:
- MPIOptions.extend([
- '-DMVAPICH2_DIR=%s' % spec['mvapich2'].prefix
- ])
+ MPIOptions.append('-DMVAPICH2_DIR=%s' % spec['mvapich2'].prefix)
# mpt
if '+mpt' in spec:
- MPIOptions.extend([
- '-DMPT_DIR=%s' % spec['mpt'].prefix
- ])
+ MPIOptions.append('-DMPT_DIR=%s' % spec['mpt'].prefix)
cmakeOptions.extend(MPIOptions)
+ def setup_environment(self, spack_env, run_env):
+ """Set up the compile and runtime environments for a package."""
+
+ # Common settings to both offline and cbtf versions
+ # of OpenSpeedShop
+ run_env.prepend_path('PATH', self.prefix.bin)
+
+ # Find Dyninst library path, this is needed to
+ # set the DYNINSTAPI_RT_LIB library which is
+ # required for OpenSpeedShop to find loop level
+ # performance information
+ dyninst_libdir = find_libraries(['libdyninstAPI_RT'],
+ root=self.spec['dyninst'].prefix,
+ shared=True, recurse=True)
+
+ # Set Dyninst RT library path to support OSS loop resolution code
+ run_env.set('DYNINSTAPI_RT_LIB', dyninst_libdir)
+
+ # Find openspeedshop library path
+ oss_libdir = find_libraries(['libopenss-framework'],
+ root=self.spec['openspeedshop'].prefix,
+ shared=True, recurse=True)
+ run_env.prepend_path('LD_LIBRARY_PATH',
+ os.path.dirname(oss_libdir.joined()))
+
+ # Settings specific to the version, checking here
+ # for the cbtf instrumentor
+ if '+cbtf' in self.spec:
+ cbtf_mc = '/sbin/cbtf_mrnet_commnode'
+ cbtf_lmb = '/sbin/cbtf_libcbtf_mrnet_backend'
+ run_env.set('XPLAT_RSH', 'ssh')
+ run_env.set('MRNET_COMM_PATH',
+ join_path(self.spec['cbtf-krell'].prefix + cbtf_mc))
+
+ run_env.set('CBTF_MRNET_BACKEND_PATH',
+ join_path(self.spec['cbtf-krell'].prefix + cbtf_lmb))
+
+ run_env.prepend_path('PATH', self.spec['mrnet'].prefix.bin)
+ run_env.prepend_path('PATH', self.spec['cbtf-krell'].prefix.bin)
+ run_env.prepend_path('PATH', self.spec['cbtf-krell'].prefix.sbin)
+
+ elif '+offline' in self.spec:
+ # Had to use this form of syntax self.prefix.lib and
+ # self.prefix.lib64 returned None all the time
+ run_env.set('OPENSS_RAWDATA_DIR', '.')
+ run_env.set('OPENSS_PLUGIN_PATH',
+ join_path(oss_libdir + '/openspeedshop'))
+ run_env.prepend_path('PATH', self.spec['papi'].prefix.bin)
+ run_env.prepend_path('PATH', self.spec['libdwarf'].prefix.bin)
+
+ if '+mpich' in self.spec:
+ run_env.set('OPENSS_MPI_IMPLEMENTATION', 'mpich')
+ if '+mpich2' in self.spec:
+ run_env.set('OPENSS_MPI_IMPLEMENTATION', 'mpich2')
+ if '+mvapich2' in self.spec:
+ run_env.set('OPENSS_MPI_IMPLEMENTATION', 'mvapich2')
+ if '+openmpi' in self.spec:
+ run_env.set('OPENSS_MPI_IMPLEMENTATION', 'openmpi')
def install(self, spec, prefix):
- #openmpi_prefix_path = "/opt/openmpi-1.8.2"
- #mvapich_prefix_path = "/usr/local/tools/mvapich-gnu"
- #'-DOPENMPI_DIR=%s' % spec['openmpi'].prefix,
- #'-DOPENMPI_DIR=%s' % openmpi_prefix_path,
- #'-DMVAPICH_DIR=%s' % mvapich_prefix_path,
-
- # FIXME: How do we make this dynamic in spack? That is, can we specify the paths to cuda dynamically?
- # WAITING for external package support.
- #if '+cuda' in spec:
- # cuda_prefix_path = "/usr/local/cuda-6.0"
- # cupti_prefix_path = "/usr/local/cuda-6.0/extras/CUPTI"
-
if '+offline' in spec:
instrumentor_setting = "offline"
if '+runtime' in spec:
with working_dir('build_runtime', create=True):
cmakeOptions = []
- cmakeOptions.extend(['-DCMAKE_INSTALL_PREFIX=%s' % prefix,
- '-DCMAKE_LIBRARY_PATH=%s' % prefix.lib64,
- '-DINSTRUMENTOR=%s' % instrumentor_setting,
- '-DLIBMONITOR_DIR=%s' % spec['libmonitor'].prefix,
- '-DLIBUNWIND_DIR=%s' % spec['libunwind'].prefix,
- '-DPAPI_DIR=%s' % spec['papi'].prefix
- ])
-
+ cmakeOptions.extend([
+ '-DCMAKE_INSTALL_PREFIX=%s' % prefix,
+ '-DINSTRUMENTOR=%s' % instrumentor_setting,
+ '-DLIBMONITOR_DIR=%s' % spec['libmonitor'].prefix,
+ '-DLIBUNWIND_DIR=%s' % spec['libunwind'].prefix,
+ '-DPAPI_DIR=%s' % spec['papi'].prefix])
+
# Add any MPI implementations coming from variant settings
self.set_mpi_cmakeOptions(spec, cmakeOptions)
cmakeOptions.extend(std_cmake_args)
- # Adjust the build options to the favored ones for this build
+ # Adjust the build options to the favored
+ # ones for this build
self.adjustBuildTypeParams_cmakeOptions(spec, cmakeOptions)
cmake('..', *cmakeOptions)
@@ -213,44 +309,35 @@ class Openspeedshop(Package):
else:
cmake_prefix_path = join_path(spec['dyninst'].prefix)
with working_dir('build', create=True):
-
- #python_vers=join_path(spec['python'].version[:2])
- #'-DOPENMPI_DIR=%s' % openmpi_prefix_path,
- #'-DMVAPICH_DIR=%s' % mvapich_prefix_path,
- #'-DMPICH_DIR=%s' % spec['mpich'].prefix,
- #'-DMPICH2_DIR=%s' % spec['mpich2'].prefix,
- #'-DBoost_NO_SYSTEM_PATHS=TRUE',
- #'-DBOOST_ROOT=%s' % spec['boost'].prefix,
- #'-DOPENMPI_DIR=%s' % spec['openmpi'].prefix,
-
- python_vers='%d.%d' % spec['python'].version[:2]
-
cmakeOptions = []
- cmakeOptions.extend(['-DCMAKE_INSTALL_PREFIX=%s' % prefix,
- '-DCMAKE_LIBRARY_PATH=%s' % prefix.lib64,
- '-DCMAKE_PREFIX_PATH=%s' % cmake_prefix_path,
- '-DINSTRUMENTOR=%s' % instrumentor_setting,
- '-DBINUTILS_DIR=%s' % spec['binutils'].prefix,
- '-DLIBELF_DIR=%s' % spec['libelf'].prefix,
- '-DLIBDWARF_DIR=%s' % spec['libdwarf'].prefix,
- '-DLIBMONITOR_DIR=%s' % spec['libmonitor'].prefix,
- '-DLIBUNWIND_DIR=%s' % spec['libunwind'].prefix,
- '-DPAPI_DIR=%s' % spec['papi'].prefix,
- '-DSQLITE3_DIR=%s' % spec['sqlite'].prefix,
- '-DQTLIB_DIR=%s' % spec['qt'].prefix,
- '-DPYTHON_EXECUTABLE=%s' % join_path(spec['python'].prefix + '/bin/python'),
- '-DPYTHON_INCLUDE_DIR=%s' % join_path(spec['python'].prefix.include) + '/python' + python_vers,
- '-DPYTHON_LIBRARY=%s' % join_path(spec['python'].prefix.lib) + '/libpython' + python_vers + '.so',
- '-DBoost_NO_SYSTEM_PATHS=TRUE',
- '-DBOOST_ROOT=%s' % spec['boost'].prefix,
- '-DDYNINST_DIR=%s' % spec['dyninst'].prefix
- ])
+
+ # Appends base options to cmakeOptions
+ self.set_defaultbase_cmakeOptions(spec, cmakeOptions)
+
+ cmakeOptions.extend(
+ ['-DCMAKE_INSTALL_PREFIX=%s'
+ % prefix,
+ '-DCMAKE_PREFIX_PATH=%s'
+ % cmake_prefix_path,
+ '-DINSTRUMENTOR=%s'
+ % instrumentor_setting,
+ '-DLIBMONITOR_DIR=%s'
+ % spec['libmonitor'].prefix,
+ '-DLIBUNWIND_DIR=%s'
+ % spec['libunwind'].prefix,
+ '-DPAPI_DIR=%s'
+ % spec['papi'].prefix,
+ '-DSQLITE3_DIR=%s'
+ % spec['sqlite'].prefix,
+ '-DQTLIB_DIR=%s'
+ % spec['qt'].prefix])
# Add any MPI implementations coming from variant settings
self.set_mpi_cmakeOptions(spec, cmakeOptions)
cmakeOptions.extend(std_cmake_args)
- # Adjust the build options to the favored ones for this build
+ # Adjust the build options to the favored
+ # ones for this build
self.adjustBuildTypeParams_cmakeOptions(spec, cmakeOptions)
cmake('..', *cmakeOptions)
@@ -261,160 +348,73 @@ class Openspeedshop(Package):
elif '+cbtf' in spec:
instrumentor_setting = "cbtf"
- resolve_symbols = "symtabapi"
- cmake_prefix_path = join_path(spec['cbtf'].prefix) + ':' + join_path(spec['cbtf-krell'].prefix) + ':' + join_path(spec['dyninst'].prefix)
- #runtime_platform_cray = "cray"
- #if '+cray' in spec:
- # if '+runtime' in spec:
- # #-DCBTF_KRELL_CN_RUNTIME_DIR=${CBTF_KRELL_CN_INSTALL_DIR} \
- # with working_dir('build_cbtf_cray_runtime', create=True):
- # python_vers='%d.%d' % spec['python'].version[:2]
- # cmake('..',
- # '-DCMAKE_INSTALL_PREFIX=%s' % prefix,
- # '-DCMAKE_LIBRARY_PATH=%s' % prefix.lib64,
- # '-DRUNTIME_PLATFORM=%s' % runtime_platform_cray,
- # '-DCMAKE_PREFIX_PATH=%s' % cmake_prefix_path,
- # '-DRESOLVE_SYMBOLS=%s' % resolve_symbols,
- # '-DINSTRUMENTOR=%s' % instrumentor_setting,
- # '-DCBTF_DIR=%s' % spec['cbtf'].prefix,
- # '-DCBTF_KRELL_DIR=%s' % spec['cbtf-krell'].prefix,
- # '-DCBTF_KRELL_CN_RUNTIME_DIR=%s' % spec['cbtf-krell'].prefix,
- # '-DBINUTILS_DIR=%s' % spec['binutils'].prefix,
- # '-DLIBELF_DIR=%s' % spec['libelf'].prefix,
- # '-DLIBDWARF_DIR=%s' % spec['libdwarf'].prefix,
- # '-DLIBUNWIND_DIR=%s' % spec['libunwind'].prefix,
- # '-DPAPI_DIR=%s' % spec['papi'].prefix,
- # '-DDYNINST_DIR=%s' % spec['dyninst'].prefix,
- # '-DXERCESC_DIR=%s' % spec['xerces-c'].prefix,
- # '-DMRNET_DIR=%s' % spec['mrnet'].prefix,
- # '-DBoost_NO_SYSTEM_PATHS=TRUE',
- # '-DBOOST_ROOT=%s' % spec['boost'].prefix,
- # *std_cmake_args)
-
- # make("clean")
- # make()
- # make("install")
-
-
- #elif '+mic' in spec:
- # comment out else and shift over the default case below until arch detection is in
- #else:
+ # resolve_symbols = "symtabapi"
+ cmake_prefix_path = join_path(spec['cbtf'].prefix) \
+ + ':' + join_path(spec['cbtf-krell'].prefix)\
+ + ':' + join_path(spec['dyninst'].prefix)
if '+runtime' in spec:
with working_dir('build_cbtf_runtime', create=True):
- python_vers='%d.%d' % spec['python'].version[:2]
- cmake('..',
- '-DCMAKE_INSTALL_PREFIX=%s' % prefix,
- '-DCMAKE_LIBRARY_PATH=%s' % prefix.lib64,
- '-DCMAKE_PREFIX_PATH=%s' % cmake_prefix_path,
- '-DINSTRUMENTOR=%s' % instrumentor_setting,
- '-DBINUTILS_DIR=%s' % spec['binutils'].prefix,
- '-DLIBELF_DIR=%s' % spec['libelf'].prefix,
- '-DLIBDWARF_DIR=%s' % spec['libdwarf'].prefix,
- '-DCBTF_DIR=%s' % spec['cbtf'].prefix,
- '-DCBTF_KRELL_DIR=%s' % spec['cbtf-krell'].prefix,
- '-DPYTHON_EXECUTABLE=%s' % join_path(spec['python'].prefix + '/bin/python'),
- '-DPYTHON_INCLUDE_DIR=%s' % join_path(spec['python'].prefix.include) + '/python' + python_vers,
- '-DPYTHON_LIBRARY=%s' % join_path(spec['python'].prefix.lib) + '/libpython' + python_vers + '.so',
- '-DBoost_NO_SYSTEM_PATHS=TRUE',
- '-DBOOST_ROOT=%s' % spec['boost'].prefix,
- '-DDYNINST_DIR=%s' % spec['dyninst'].prefix,
- '-DMRNET_DIR=%s' % spec['mrnet'].prefix,
- *std_cmake_args)
+ cmakeOptions = []
+
+ # Appends base options to cmakeOptions
+ self.set_defaultbase_cmakeOptions(spec, cmakeOptions)
+
+ cmakeOptions.extend(
+ ['-DCMAKE_INSTALL_PREFIX=%s'
+ % prefix,
+ '-DCMAKE_PREFIX_PATH=%s'
+ % cmake_prefix_path,
+ '-DINSTRUMENTOR=%s'
+ % instrumentor_setting,
+ '-DCBTF_DIR=%s'
+ % spec['cbtf'].prefix,
+ '-DCBTF_KRELL_DIR=%s'
+ % spec['cbtf-krell'].prefix,
+ '-DMRNET_DIR=%s'
+ % spec['mrnet'].prefix])
+
+ # Adjust the build options to the
+ # favored ones for this build
+ self.adjustBuildTypeParams_cmakeOptions(spec, cmakeOptions)
+
+ cmake('..', *cmakeOptions)
+
make("clean")
make()
make("install")
else:
with working_dir('build_cbtf', create=True):
- python_vers='%d.%d' % spec['python'].version[:2]
- #python_vers=join_path(spec['python'].version[:2])
- cmake('..',
- '-DCMAKE_INSTALL_PREFIX=%s' % prefix,
- '-DCMAKE_LIBRARY_PATH=%s' % prefix.lib64,
- '-DCMAKE_PREFIX_PATH=%s' % cmake_prefix_path,
- '-DINSTRUMENTOR=%s' % instrumentor_setting,
- '-DBINUTILS_DIR=%s' % spec['binutils'].prefix,
- '-DLIBELF_DIR=%s' % spec['libelf'].prefix,
- '-DLIBDWARF_DIR=%s' % spec['libdwarf'].prefix,
- '-DSQLITE3_DIR=%s' % spec['sqlite'].prefix,
- '-DCBTF_DIR=%s' % spec['cbtf'].prefix,
- '-DCBTF_KRELL_DIR=%s' % spec['cbtf-krell'].prefix,
- '-DQTLIB_DIR=%s' % spec['qt'].prefix,
- '-DPYTHON_EXECUTABLE=%s' % join_path(spec['python'].prefix + '/bin/python'),
- '-DPYTHON_INCLUDE_DIR=%s' % join_path(spec['python'].prefix.include) + '/python' + python_vers,
- '-DPYTHON_LIBRARY=%s' % join_path(spec['python'].prefix.lib) + '/libpython' + python_vers + '.so',
- '-DBoost_NO_SYSTEM_PATHS=TRUE',
- '-DBOOST_ROOT=%s' % spec['boost'].prefix,
- '-DDYNINST_DIR=%s' % spec['dyninst'].prefix,
- '-DMRNET_DIR=%s' % spec['mrnet'].prefix,
- *std_cmake_args)
- make("clean")
- make()
- make("install")
-
- #if '+frontend' in spec:
- # with working_dir('build_frontend', create=True):
- # tbd
-
-
-
- #if '+cbtf' in spec:
- # if cray build type detected:
- # if '+runtime' in spec:
- # with working_dir('build_cray_cbtf_compute', create=True):
- # tbd
- # else:
- # with working_dir('build_cray_cbtf_frontend', create=True):
- # tbd
- # with working_dir('build_cray_osscbtf_frontend', create=True):
- # tbd
- # fi
- # elif '+intelmic' in spec:
- # if '+runtime' in spec:
- # with working_dir('build_intelmic_cbtf_compute', create=True):
- # tbd
- # else:
- # with working_dir('build_intelmic_cbtf_frontend', create=True):
- # tbd
- # with working_dir('build_intelmic_osscbtf_frontend', create=True):
- # fi
- # else
- # with working_dir('build_cluster_cbtf', create=True):
- # tbd
- # with working_dir('build_cluster osscbtf', create=True):
- # tbd
- # fi
- #elif '+offline' in spec:
- # if cray build type detected:
- # if '+runtime' in spec:
- # with working_dir('build_cray_ossoff_compute', create=True):
- # tbd
- # else:
- # with working_dir('build_cray_ossoff_frontend', create=True):
- # tbd
- # fi
- # elif '+intelmic' in spec:
- # if '+runtime' in spec:
- # with working_dir('build_intelmic_ossoff_compute', create=True):
- # tbd
- # else:
- # with working_dir('build_intelmic_ossoff_frontend', create=True):
- # tbd
- # fi
- # elif bgq build type detected:
- # if '+runtime' in spec:
- # with working_dir('build_bgq_ossoff_compute', create=True):
- # tbd
- # else:
- # with working_dir('build_bgq_ossoff_frontend', create=True):
- # tbd
- # fi
- # else
- # with working_dir('build_cluster ossoff', create=True):
- # tbd
- # fi
- #fi
+ cmakeOptions = []
+ # Appends base options to cmakeOptions
+ self.set_defaultbase_cmakeOptions(spec, cmakeOptions)
+
+ cmakeOptions.extend(
+ ['-DCMAKE_INSTALL_PREFIX=%s'
+ % prefix,
+ '-DCMAKE_PREFIX_PATH=%s'
+ % cmake_prefix_path,
+ '-DINSTRUMENTOR=%s'
+ % instrumentor_setting,
+ '-DSQLITE3_DIR=%s'
+ % spec['sqlite'].prefix,
+ '-DCBTF_DIR=%s'
+ % spec['cbtf'].prefix,
+ '-DCBTF_KRELL_DIR=%s'
+ % spec['cbtf-krell'].prefix,
+ '-DQTLIB_DIR=%s'
+ % spec['qt'].prefix,
+ '-DMRNET_DIR=%s'
+ % spec['mrnet'].prefix])
+
+ # Adjust the build options to the favored
+ # ones for this build
+ self.adjustBuildTypeParams_cmakeOptions(spec, cmakeOptions)
+ cmake('..', *cmakeOptions)
+ make("clean")
+ make()
+ make("install")
diff --git a/var/spack/repos/builtin/packages/openssl/package.py b/var/spack/repos/builtin/packages/openssl/package.py
index 119cdd83c2..12b5ed9c52 100644
--- a/var/spack/repos/builtin/packages/openssl/package.py
+++ b/var/spack/repos/builtin/packages/openssl/package.py
@@ -22,7 +22,6 @@
# License along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
-import urllib
import llnl.util.tty as tty
from spack import *
@@ -35,85 +34,50 @@ class Openssl(Package):
Transport Layer Security (TLS v1) protocols as well as a
full-strength general purpose cryptography library."""
homepage = "http://www.openssl.org"
- url = "http://www.openssl.org/source/openssl-1.0.1h.tar.gz"
+ url = "ftp://openssl.org/source/openssl-1.0.1h.tar.gz"
- version('1.0.1h', '8d6d684a9430d5cc98a62a5d8fbda8cf')
- version('1.0.1r', '1abd905e079542ccae948af37e393d28')
- version('1.0.1t', '9837746fcf8a6727d46d22ca35953da1')
- version('1.0.2d', '38dd619b2e77cbac69b99f52a053d25a')
- version('1.0.2e', '5262bfa25b60ed9de9f28d5d52d77fc5')
- version('1.0.2f', 'b3bf73f507172be9292ea2a8c28b659d')
- version('1.0.2g', 'f3c710c045cdee5fd114feb69feba7aa')
+ version('1.0.2j', '96322138f0b69e61b7212bc53d5e912b')
+ version('1.0.2i', '678374e63f8df456a697d3e5e5a931fb')
version('1.0.2h', '9392e65072ce4b614c1392eefc1f23d0')
+ version('1.0.2g', 'f3c710c045cdee5fd114feb69feba7aa')
+ version('1.0.2f', 'b3bf73f507172be9292ea2a8c28b659d')
+ version('1.0.2e', '5262bfa25b60ed9de9f28d5d52d77fc5')
+ version('1.0.2d', '38dd619b2e77cbac69b99f52a053d25a')
+ version('1.0.1u', '130bb19745db2a5a09f22ccbbf7e69d0')
+ version('1.0.1t', '9837746fcf8a6727d46d22ca35953da1')
+ version('1.0.1r', '1abd905e079542ccae948af37e393d28')
+ version('1.0.1h', '8d6d684a9430d5cc98a62a5d8fbda8cf')
depends_on("zlib")
parallel = False
- def url_for_version(self, version):
- # This URL is computed pinging the place where the latest version is stored. To avoid slowdown
- # due to repeated pinging, we store the URL in a private class attribute to do the job only once per version
- openssl_urls = getattr(Openssl, '_openssl_url', {})
- openssl_url = openssl_urls.get(version, None)
- # Same idea, but just to avoid issuing the same message multiple times
- warnings_given_to_user = getattr(Openssl, '_warnings_given', {})
- if openssl_url is None:
- if self.spec.satisfies('@external'):
- # The version @external is reserved to system openssl. In that case return a fake url and exit
- openssl_url = '@external (reserved version for system openssl)'
- if not warnings_given_to_user.get(version, False):
- tty.msg('Using openssl@external : the version @external is reserved for system openssl')
- warnings_given_to_user[version] = True
- else:
- openssl_url = self.check_for_outdated_release(version, warnings_given_to_user) # Store the computed URL
- openssl_urls[version] = openssl_url
- # Store the updated dictionary of URLS
- Openssl._openssl_url = openssl_urls
- # Store the updated dictionary of warnings
- Openssl._warnings_given = warnings_given_to_user
-
- return openssl_url
-
- def check_for_outdated_release(self, version, warnings_given_to_user):
- latest = 'ftp://ftp.openssl.org/source/openssl-{version}.tar.gz'
- older = 'http://www.openssl.org/source/old/{version_number}/openssl-{version_full}.tar.gz'
- # Try to use the url where the latest tarballs are stored. If the url does not exist (404), then
- # return the url for older format
- version_number = '.'.join([str(x) for x in version[:-1]])
- try:
- openssl_url = latest.format(version=version)
- urllib.urlopen(openssl_url)
- except IOError:
- openssl_url = older.format(version_number=version_number, version_full=version)
- # Checks if we already warned the user for this particular version of OpenSSL.
- # If not we display a warning message and mark this version
- if not warnings_given_to_user.get(version, False):
- tty.warn(
- 'This installation depends on an old version of OpenSSL, which may have known security issues. ')
- tty.warn('Consider updating to the latest version of this package.')
- tty.warn('More details at {homepage}'.format(homepage=Openssl.homepage))
- warnings_given_to_user[version] = True
-
- return openssl_url
+ def handle_fetch_error(self, error):
+ tty.warn("Fetching OpenSSL failed. This may indicate that OpenSSL has "
+ "been updated, and the version in your instance of Spack is "
+ "insecure. Consider updating to the latest OpenSSL version.")
def install(self, spec, prefix):
# OpenSSL uses a variable APPS in its Makefile. If it happens to be set
# in the environment, then this will override what is set in the
# Makefile, leading to build errors.
env.pop('APPS', None)
- if spec.satisfies("=darwin-x86_64") or spec.satisfies("=ppc64"):
+
+ if spec.satisfies('target=x86_64') or spec.satisfies('target=ppc64'):
# This needs to be done for all 64-bit architectures (except Linux,
# where it happens automatically?)
env['KERNEL_BITS'] = '64'
- config = Executable("./config")
- config("--prefix=%s" % prefix,
- "--openssldir=%s" % join_path(prefix, 'etc', 'openssl'),
- "zlib",
- "no-krb5",
- "shared")
+
+ options = ['zlib', 'no-krb5', 'shared']
+
+ config = Executable('./config')
+ config('--prefix=%s' % prefix,
+ '--openssldir=%s' % join_path(prefix, 'etc', 'openssl'),
+ *options)
+
# Remove non-standard compiler options if present. These options are
# present e.g. on Darwin. They are non-standard, i.e. most compilers
# (e.g. gcc) will not accept them.
filter_file(r'-arch x86_64', '', 'Makefile')
make()
- make("install")
+ make('install')
diff --git a/var/spack/repos/builtin/packages/opium/package.py b/var/spack/repos/builtin/packages/opium/package.py
new file mode 100644
index 0000000000..521f917230
--- /dev/null
+++ b/var/spack/repos/builtin/packages/opium/package.py
@@ -0,0 +1,51 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class Opium(Package):
+ """DFT pseudopotential generation project"""
+
+ homepage = "https://opium.sourceforge.net/index.html"
+ url = "https://downloads.sourceforge.net/project/opium/opium/opium-v3.8/opium-v3.8-src.tgz"
+
+ version('3.8', 'f710c0f869e70352b4a510c31e13bf9f')
+
+ depends_on('blas')
+ depends_on('lapack')
+
+ def install(self, spec, prefix):
+ libs = spec['lapack'].lapack_libs + spec['blas'].blas_libs
+ options = ['LDFLAGS=%s' % libs.ld_flags]
+
+ configure(*options)
+ with working_dir("src", create=False):
+ make("all-subdirs")
+ make("opium")
+
+ # opium not have a make install :-((
+ mkdirp(self.prefix.bin)
+ install(join_path(self.stage.source_path, 'opium'),
+ self.prefix.bin)
diff --git a/var/spack/repos/builtin/packages/osu-micro-benchmarks/package.py b/var/spack/repos/builtin/packages/osu-micro-benchmarks/package.py
index 2104bf842b..161ba6254a 100644
--- a/var/spack/repos/builtin/packages/osu-micro-benchmarks/package.py
+++ b/var/spack/repos/builtin/packages/osu-micro-benchmarks/package.py
@@ -24,6 +24,7 @@
##############################################################################
from spack import *
+
class OsuMicroBenchmarks(Package):
"""The Ohio MicroBenchmark suite is a collection of independent MPI
message passing performance microbenchmarks developed and written at
@@ -41,7 +42,6 @@ class OsuMicroBenchmarks(Package):
depends_on('mpi')
depends_on('cuda', when='+cuda')
-
def install(self, spec, prefix):
config_args = [
'CC=%s' % spec['mpi'].prefix.bin + '/mpicc',
diff --git a/var/spack/repos/builtin/packages/otf/package.py b/var/spack/repos/builtin/packages/otf/package.py
index 4a7a00b212..39eb5a85aa 100644
--- a/var/spack/repos/builtin/packages/otf/package.py
+++ b/var/spack/repos/builtin/packages/otf/package.py
@@ -24,6 +24,7 @@
##############################################################################
from spack import *
+
class Otf(Package):
"""To improve scalability for very large and massively parallel
traces the Open Trace Format (OTF) is developed at ZIH as a
diff --git a/var/spack/repos/builtin/packages/otf2/package.py b/var/spack/repos/builtin/packages/otf2/package.py
index 131836f8ac..ee39f448eb 100644
--- a/var/spack/repos/builtin/packages/otf2/package.py
+++ b/var/spack/repos/builtin/packages/otf2/package.py
@@ -27,8 +27,8 @@ from spack import *
class Otf2(Package):
- """
- The Open Trace Format 2 is a highly scalable, memory efficient event trace data format plus support library.
+ """The Open Trace Format 2 is a highly scalable, memory efficient event
+ trace data format plus support library.
"""
homepage = "http://www.vi-hps.org/score-p"
@@ -46,10 +46,10 @@ class Otf2(Package):
url="http://www.vi-hps.org/upload/packages/otf2/otf2-1.2.1.tar.gz")
def install(self, spec, prefix):
- configure_args=["--prefix=%s" % prefix,
- "--enable-shared",
- "CFLAGS=-fPIC",
- "CXXFLAGS=-fPIC"]
+ configure_args = ["--prefix=%s" % prefix,
+ "--enable-shared",
+ "CFLAGS=-fPIC",
+ "CXXFLAGS=-fPIC"]
configure(*configure_args)
make()
make("install")
diff --git a/var/spack/repos/builtin/packages/p4est/package.py b/var/spack/repos/builtin/packages/p4est/package.py
index d0db4f7f20..da58f5a7e7 100644
--- a/var/spack/repos/builtin/packages/p4est/package.py
+++ b/var/spack/repos/builtin/packages/p4est/package.py
@@ -24,46 +24,41 @@
##############################################################################
from spack import *
+
class P4est(Package):
- """Dynamic management of a collection (a forest) of adaptive octrees in parallel"""
+ """Dynamic management of a collection (a forest) of adaptive octrees in
+ parallel"""
homepage = "http://www.p4est.org"
url = "http://p4est.github.io/release/p4est-1.1.tar.gz"
version('1.1', '37ba7f4410958cfb38a2140339dbf64f')
- variant('tests', default=True, description='Run small tests')
-
# build dependencies
- depends_on('automake')
- depends_on('autoconf')
- depends_on('libtool@2.4.2:')
+ depends_on('automake', type='build')
+ depends_on('autoconf', type='build')
+ depends_on('libtool@2.4.2:', type='build')
# other dependencies
- depends_on('lua') # Needed for the submodule sc
depends_on('mpi')
depends_on('zlib')
def install(self, spec, prefix):
- options = ['--enable-mpi',
- '--enable-shared',
- '--disable-vtk-binary',
- '--without-blas',
- 'CPPFLAGS=-DSC_LOG_PRIORITY=SC_LP_ESSENTIAL',
- 'CFLAGS=-O2',
- 'CC=%s' % self.spec['mpi'].mpicc,
- 'CXX=%s' % self.spec['mpi'].mpicxx,
- 'FC=%s' % self.spec['mpi'].mpifc,
- 'F77=%s' % self.spec['mpi'].mpif77
- ]
+ options = [
+ '--enable-mpi',
+ '--enable-shared',
+ '--disable-vtk-binary',
+ '--without-blas',
+ 'CPPFLAGS=-DSC_LOG_PRIORITY=SC_LP_ESSENTIAL',
+ 'CFLAGS=-O2',
+ 'CC=%s' % self.spec['mpi'].mpicc,
+ 'CXX=%s' % self.spec['mpi'].mpicxx,
+ 'FC=%s' % self.spec['mpi'].mpifc,
+ 'F77=%s' % self.spec['mpi'].mpif77
+ ]
configure('--prefix=%s' % prefix, *options)
make()
- # Make tests optional as sometimes mpiexec can't be run with an error:
- # mpiexec has detected an attempt to run as root.
- # Running at root is *strongly* discouraged as any mistake (e.g., in
- # defining TMPDIR) or bug can result in catastrophic damage to the OS
- # file system, leaving your system in an unusable state.
- if '+tests' in self.spec:
- make("check")
+ if self.run_tests:
+ make("check")
make("install")
diff --git a/var/spack/repos/builtin/packages/panda/package.py b/var/spack/repos/builtin/packages/panda/package.py
new file mode 100644
index 0000000000..e30c2c869d
--- /dev/null
+++ b/var/spack/repos/builtin/packages/panda/package.py
@@ -0,0 +1,45 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+
+from spack import *
+
+
+class Panda(Package):
+ """PANDA: Parallel AdjaceNcy Decomposition Algorithm"""
+ homepage = "http://comopt.ifi.uni-heidelberg.de/software/PANDA/index.html"
+ url = "http://comopt.ifi.uni-heidelberg.de/software/PANDA/downloads/current_panda.tar"
+
+ version('current', 'b06dc312ee56e13eefea9c915b70fcef')
+
+ # Note: Panda can also be built without MPI support
+
+ depends_on("cmake", type="build")
+ depends_on("mpi")
+
+ def install(self, spec, prefix):
+ with working_dir('spack-build', create=True):
+ cmake("..", *std_cmake_args)
+ make()
+ make("install")
diff --git a/var/spack/repos/builtin/packages/pango/package.py b/var/spack/repos/builtin/packages/pango/package.py
index a04f6d64e0..c45054be58 100644
--- a/var/spack/repos/builtin/packages/pango/package.py
+++ b/var/spack/repos/builtin/packages/pango/package.py
@@ -24,18 +24,29 @@
##############################################################################
from spack import *
+
class Pango(Package):
"""Pango is a library for laying out and rendering of text, with
an emphasis on internationalization. It can be used anywhere
that text layout is needed, though most of the work on Pango so
far has been done in the context of the GTK+ widget toolkit."""
homepage = "http://www.pango.org"
- url = "http://ftp.gnome.org/pub/gnome/sources/pango/1.36/pango-1.36.8.tar.xz"
+ url = "http://ftp.gnome.org/pub/GNOME/sources/pango/1.40/pango-1.40.3.tar.xz"
+ list_url = "http://ftp.gnome.org/pub/gnome/sources/pango/"
+ list_depth = 2
+
+ version('1.40.3', 'abba8b5ce728520c3a0f1535eab19eac3c14aeef7faa5aded90017ceac2711d3')
+ version('1.40.1', 'e27af54172c72b3ac6be53c9a4c67053e16c905e02addcf3a603ceb2005c1a40')
+ version('1.36.8', '18dbb51b8ae12bae0ab7a958e7cf3317c9acfc8a1e1103ec2f147164a0fc2d07')
- version('1.36.8', '217a9a753006275215fa9fa127760ece')
+ variant('X', default=False, description="Enable an X toolkit")
+ depends_on("pkg-config", type="build")
depends_on("harfbuzz")
depends_on("cairo")
+ depends_on("cairo~X", when='~X')
+ depends_on("cairo+X", when='+X')
+ depends_on("glib")
def install(self, spec, prefix):
configure("--prefix=%s" % prefix)
diff --git a/var/spack/repos/builtin/packages/papi/package.py b/var/spack/repos/builtin/packages/papi/package.py
index ecd958407f..90a7490e75 100644
--- a/var/spack/repos/builtin/packages/papi/package.py
+++ b/var/spack/repos/builtin/packages/papi/package.py
@@ -28,6 +28,7 @@ import os
import sys
from llnl.util.filesystem import fix_darwin_install_name
+
class Papi(Package):
"""PAPI provides the tool designer and application engineer with a
consistent interface and methodology for use of the performance
@@ -40,6 +41,7 @@ class Papi(Package):
homepage = "http://icl.cs.utk.edu/papi/index.html"
url = "http://icl.cs.utk.edu/projects/papi/downloads/papi-5.4.1.tar.gz"
+ version('5.5.0', '5e1244a04ca031d4cc29b46ce3dd05b5')
version('5.4.3', '3211b5a5bb389fe692370f5cf4cc2412')
version('5.4.1', '9134a99219c79767a11463a76b0b01a2')
version('5.3.0', '367961dd0ab426e5ae367c2713924ffb')
@@ -47,7 +49,7 @@ class Papi(Package):
def install(self, spec, prefix):
with working_dir("src"):
- configure_args=["--prefix=%s" % prefix]
+ configure_args = ["--prefix=%s" % prefix]
# PAPI uses MPI if MPI is present; since we don't require
# an MPI package, we ensure that all attempts to use MPI
diff --git a/var/spack/repos/builtin/packages/paradiseo/package.py b/var/spack/repos/builtin/packages/paradiseo/package.py
index d6324b63e6..c91b01c964 100644
--- a/var/spack/repos/builtin/packages/paradiseo/package.py
+++ b/var/spack/repos/builtin/packages/paradiseo/package.py
@@ -23,40 +23,50 @@
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
from spack import *
-import sys
+
class Paradiseo(Package):
- """A C++ white-box object-oriented framework dedicated to the reusable design of metaheuristics."""
+ """A C++ white-box object-oriented framework dedicated to the reusable
+ design of metaheuristics."""
homepage = "http://paradiseo.gforge.inria.fr/"
- # Installing from the development version is a better option at this
+ # Installing from the development version is a better option at this
# point than using the very old supplied packages
version('head', git='https://gforge.inria.fr/git/paradiseo/paradiseo.git')
- # This is a version that the package formula author has tested successfully.
- # However, the clone is very large (~1Gb git history). The history in the
- # head version has been trimmed significantly.
- version('dev-safe', git='https://gforge.inria.fr/git/paradiseo/paradiseo.git',
- commit='dbb8fbe9a786efd4d1c26408ac1883442e7643a6')
- variant('mpi', default=True, description='Compile with parallel and distributed metaheuristics module')
- variant('smp', default=True, description='Compile with symmetric multi-processing module ')
- variant('edo', default=True, description='Compile with (Experimental) EDO module')
- #variant('tests', default=False, description='Compile with build tests')
- #variant('doc', default=False, description='Compile with documentation')
- variant('debug', default=False, description='Builds a debug version of the libraries')
+ # This is a version that the package formula author has tested
+ # successfully. However, the clone is very large (~1Gb git
+ # history). The history in the head version has been trimmed
+ # significantly.
+ version(
+ 'dev-safe', git='https://gforge.inria.fr/git/paradiseo/paradiseo.git',
+ commit='dbb8fbe9a786efd4d1c26408ac1883442e7643a6')
+
+ variant('mpi', default=True,
+ description='Compile with parallel and distributed '
+ 'metaheuristics module')
+ variant('smp', default=True,
+ description='Compile with symmetric multi-processing module ')
+ variant('edo', default=True,
+ description='Compile with (Experimental) EDO module')
+
+ # variant('tests', default=False, description='Compile with build tests')
+ # variant('doc', default=False, description='Compile with documentation')
+ variant('debug', default=False,
+ description='Builds a debug version of the libraries')
variant('openmp', default=False, description='Enable OpenMP support')
variant('gnuplot', default=False, description='Enable GnuPlot support')
-
+
# Required dependencies
- depends_on ("cmake")
+ depends_on("cmake", type='build')
# Optional dependencies
- depends_on ("mpi", when="+mpi")
- depends_on ("doxygen", when='+doc')
- depends_on ("gnuplot", when='+gnuplot')
- depends_on ("eigen", when='+edo')
- depends_on ("boost~mpi", when='+edo~mpi')
- depends_on ("boost+mpi", when='+edo+mpi')
+ depends_on("mpi", when="+mpi")
+ depends_on("doxygen", when='+doc', type='build')
+ depends_on("gnuplot", when='+gnuplot')
+ depends_on("eigen", when='+edo', type='build')
+ depends_on("boost~mpi", when='+edo~mpi')
+ depends_on("boost+mpi", when='+edo+mpi')
# Patches
patch('enable_eoserial.patch')
@@ -69,16 +79,21 @@ class Paradiseo(Package):
options.extend(std_cmake_args)
options.extend([
- '-DCMAKE_BUILD_TYPE:STRING=%s' % ('Debug' if '+debug' in spec else 'Release'),
+ '-DCMAKE_BUILD_TYPE:STRING=%s' % (
+ 'Debug' if '+debug' in spec else 'Release'),
'-DINSTALL_TYPE:STRING=MIN',
'-DMPI:BOOL=%s' % ('TRUE' if '+mpi' in spec else 'FALSE'),
- '-DSMP:BOOL=%s' % ('TRUE' if '+smp' in spec else 'FALSE'), # Note: This requires a C++11 compatible compiler
+ # Note: This requires a C++11 compatible compiler
+ '-DSMP:BOOL=%s' % ('TRUE' if '+smp' in spec else 'FALSE'),
'-DEDO:BOOL=%s' % ('TRUE' if '+edo' in spec else 'FALSE'),
- '-DENABLE_CMAKE_TESTING:BOOL=%s' % ('TRUE' if '+tests' in spec else 'FALSE'),
- '-DENABLE_OPENMP:BOOL=%s' % ('TRUE' if '+openmp' in spec else 'FALSE'),
- '-DENABLE_GNUPLOT:BOOL=%s' % ('TRUE' if '+gnuplot' in spec else 'FALSE')
+ '-DENABLE_CMAKE_TESTING:BOOL=%s' % (
+ 'TRUE' if '+tests' in spec else 'FALSE'),
+ '-DENABLE_OPENMP:BOOL=%s' % (
+ 'TRUE' if '+openmp' in spec else 'FALSE'),
+ '-DENABLE_GNUPLOT:BOOL=%s' % (
+ 'TRUE' if '+gnuplot' in spec else 'FALSE')
])
-
+
with working_dir('spack-build', create=True):
# Configure
cmake('..', *options)
diff --git a/var/spack/repos/builtin/packages/parallel-netcdf/package.py b/var/spack/repos/builtin/packages/parallel-netcdf/package.py
index 59c44c8a4a..65512017ef 100644
--- a/var/spack/repos/builtin/packages/parallel-netcdf/package.py
+++ b/var/spack/repos/builtin/packages/parallel-netcdf/package.py
@@ -24,7 +24,8 @@
##############################################################################
from spack import *
-class ParallelNetcdf(Package):
+
+class ParallelNetcdf(AutotoolsPackage):
"""Parallel netCDF (PnetCDF) is a library providing high-performance
parallel I/O while still maintaining file-format compatibility with
Unidata's NetCDF."""
@@ -37,14 +38,20 @@ class ParallelNetcdf(Package):
variant('cxx', default=True, description='Build the C++ Interface')
variant('fortran', default=True, description='Build the Fortran Interface')
- variant('fpic', default=True, description='Produce position-independent code (for use with shared libraries)')
+ variant('fpic', default=True,
+ description='Produce position-independent code (for shared libs)')
+
+ depends_on('mpi')
+
+ depends_on('m4', type='build')
+
+ # See:
+ # https://trac.mcs.anl.gov/projects/parallel-netcdf/browser/trunk/INSTALL
+ def configure_args(self):
+ spec = self.spec
- depends_on("m4")
- depends_on("mpi")
+ args = ['--with-mpi={0}'.format(spec['mpi'].prefix)]
- # See: https://trac.mcs.anl.gov/projects/parallel-netcdf/browser/trunk/INSTALL
- def install(self, spec, prefix):
- args = list()
if '+fpic' in spec:
args.extend(['CFLAGS=-fPIC', 'CXXFLAGS=-fPIC', 'FFLAGS=-fPIC'])
if '~cxx' in spec:
@@ -52,8 +59,4 @@ class ParallelNetcdf(Package):
if '~fortran' in spec:
args.append('--disable-fortran')
- args.extend(["--prefix=%s" % prefix,
- "--with-mpi=%s" % spec['mpi'].prefix])
- configure(*args)
- make()
- make("install")
+ return args
diff --git a/var/spack/repos/builtin/packages/parallel/package.py b/var/spack/repos/builtin/packages/parallel/package.py
new file mode 100644
index 0000000000..81c0195651
--- /dev/null
+++ b/var/spack/repos/builtin/packages/parallel/package.py
@@ -0,0 +1,44 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class Parallel(Package):
+ """GNU parallel is a shell tool for executing jobs in parallel using
+ one or more computers. A job can be a single command or a small
+ script that has to be run for each of the lines in the input.
+ """
+
+ homepage = "http://www.gnu.org/software/parallel/"
+ url = "http://ftp.gnu.org/gnu/parallel/parallel-20160422.tar.bz2"
+
+ version('20160422', '24621f684130472694333709bd4454cb')
+ version('20160322', '4e81e0d36902ab4c4e969ee6f35e6e57')
+
+ def install(self, spec, prefix):
+ configure('--prefix=%s' % prefix)
+
+ make()
+ make("install")
diff --git a/var/spack/repos/builtin/packages/paraver/package.py b/var/spack/repos/builtin/packages/paraver/package.py
index 50ce6b79fb..0a2ffdbb84 100644
--- a/var/spack/repos/builtin/packages/paraver/package.py
+++ b/var/spack/repos/builtin/packages/paraver/package.py
@@ -25,18 +25,21 @@
from spack import *
import os
+
class Paraver(Package):
""""A very powerful performance visualization and analysis tool
based on traces that can be used to analyse any information that
is expressed on its input trace format. Traces for parallel MPI,
OpenMP and other programs can be genereated with Extrae."""
homepage = "http://www.bsc.es/computer-sciences/performance-tools/paraver"
- url = "http://www.bsc.es/ssl/apps/performanceTools/files/paraver-sources-4.5.3.tar.gz"
+ url = "http://www.bsc.es/ssl/apps/performanceTools/files/paraver-sources-4.6.2.tar.gz"
- version('4.5.3', '625de9ec0d639acd18d1aaa644b38f72')
+ # NOTE: Paraver provides only latest version for download.
+ # Don't keep/add older versions.
+ version('4.6.2', 'c54e124382b597574628b00e31649803')
depends_on("boost")
- #depends_on("extrae")
+ # depends_on("extrae")
depends_on("wx")
depends_on("wxpropgrid")
@@ -47,8 +50,11 @@ class Paraver(Package):
make("install")
os.chdir("../paraver-kernel")
- #"--with-extrae=%s" % spec['extrae'].prefix,
- configure("--prefix=%s" % prefix, "--with-ptools-common-files=%s" % prefix, "--with-boost=%s" % spec['boost'].prefix, "--with-boost-serialization=boost_serialization")
+ # "--with-extrae=%s" % spec['extrae'].prefix,
+ configure("--prefix=%s" % prefix,
+ "--with-ptools-common-files=%s" % prefix,
+ "--with-boost=%s" % spec['boost'].prefix,
+ "--with-boost-serialization=boost_serialization")
make()
make("install")
@@ -58,8 +64,11 @@ class Paraver(Package):
make("install")
os.chdir("../wxparaver")
- #"--with-extrae=%s" % spec['extrae'].prefix,
- configure("--prefix=%s" % prefix, "--with-paraver=%s" % prefix, "--with-boost=%s" % spec['boost'].prefix, "--with-boost-serialization=boost_serialization", "--with-wxdir=%s" % spec['wx'].prefix.bin)
+ # "--with-extrae=%s" % spec['extrae'].prefix,
+ configure("--prefix=%s" % prefix,
+ "--with-paraver=%s" % prefix,
+ "--with-boost=%s" % spec['boost'].prefix,
+ "--with-boost-serialization=boost_serialization",
+ "--with-wxdir=%s" % spec['wx'].prefix.bin)
make()
make("install")
-
diff --git a/var/spack/repos/builtin/packages/paraview/package.py b/var/spack/repos/builtin/packages/paraview/package.py
index 711cbc98c9..252f58247f 100644
--- a/var/spack/repos/builtin/packages/paraview/package.py
+++ b/var/spack/repos/builtin/packages/paraview/package.py
@@ -24,6 +24,7 @@
##############################################################################
from spack import *
+
class Paraview(Package):
homepage = 'http://www.paraview.org'
url = 'http://www.paraview.org/files/v5.0/ParaView-v'
@@ -36,38 +37,38 @@ class Paraview(Package):
variant('tcl', default=False, description='Enable TCL support')
- variant('mpi', default=False, description='Enable MPI support')
+ variant('mpi', default=True, description='Enable MPI support')
variant('osmesa', default=False, description='Enable OSMesa support')
variant('qt', default=False, description='Enable Qt support')
variant('opengl2', default=False, description='Enable OpenGL2 backend')
depends_on('python@2:2.7', when='+python')
- depends_on('py-numpy', when='+python')
- depends_on('py-matplotlib', when='+python')
+ depends_on('py-numpy', when='+python', type='run')
+ depends_on('py-matplotlib', when='+python', type='run')
depends_on('tcl', when='+tcl')
depends_on('mpi', when='+mpi')
depends_on('qt@:4', when='+qt')
+ depends_on('cmake', type='build')
depends_on('bzip2')
depends_on('freetype')
- #depends_on('hdf5+mpi', when='+mpi')
- #depends_on('hdf5~mpi', when='~mpi')
+ # depends_on('hdf5+mpi', when='+mpi')
+ # depends_on('hdf5~mpi', when='~mpi')
depends_on('jpeg')
depends_on('libpng')
depends_on('libtiff')
depends_on('libxml2')
- #depends_on('netcdf')
- #depends_on('netcdf-cxx')
- #depends_on('protobuf') # version mismatches?
- #depends_on('sqlite') # external version not supported
+ # depends_on('netcdf')
+ # depends_on('netcdf-cxx')
+ # depends_on('protobuf') # version mismatches?
+ # depends_on('sqlite') # external version not supported
depends_on('zlib')
def url_for_version(self, version):
"""Handle ParaView version-based custom URLs."""
return self._url_str % (version.up_to(2), version)
-
def install(self, spec, prefix):
with working_dir('spack-build', create=True):
def feature_to_bool(feature, on='ON', off='OFF'):
@@ -79,34 +80,46 @@ class Paraview(Package):
return feature_to_bool(feature, on='OFF', off='ON')
feature_args = std_cmake_args[:]
- feature_args.append('-DPARAVIEW_BUILD_QT_GUI:BOOL=%s' % feature_to_bool('+qt'))
- feature_args.append('-DPARAVIEW_ENABLE_PYTHON:BOOL=%s' % feature_to_bool('+python'))
+ feature_args.append(
+ '-DPARAVIEW_BUILD_QT_GUI:BOOL=%s' % feature_to_bool('+qt'))
+ feature_args.append('-DPARAVIEW_ENABLE_PYTHON:BOOL=%s' %
+ feature_to_bool('+python'))
if '+python' in spec:
- feature_args.append('-DPYTHON_EXECUTABLE:FILEPATH=%s/bin/python' % spec['python'].prefix)
- feature_args.append('-DPARAVIEW_USE_MPI:BOOL=%s' % feature_to_bool('+mpi'))
+ feature_args.append(
+ '-DPYTHON_EXECUTABLE:FILEPATH=%s/bin/python'
+ % spec['python'].prefix)
+ feature_args.append('-DPARAVIEW_USE_MPI:BOOL=%s' %
+ feature_to_bool('+mpi'))
if '+mpi' in spec:
- feature_args.append('-DMPIEXEC:FILEPATH=%s/bin/mpiexec' % spec['mpi'].prefix)
- feature_args.append('-DVTK_ENABLE_TCL_WRAPPING:BOOL=%s' % feature_to_bool('+tcl'))
- feature_args.append('-DVTK_OPENGL_HAS_OSMESA:BOOL=%s' % feature_to_bool('+osmesa'))
- feature_args.append('-DVTK_USE_X:BOOL=%s' % nfeature_to_bool('+osmesa'))
- feature_args.append('-DVTK_RENDERING_BACKEND:STRING=%s' % feature_to_bool('+opengl2', 'OpenGL2', 'OpenGL'))
+ feature_args.append(
+ '-DMPIEXEC:FILEPATH=%s/bin/mpiexec' % spec['mpi'].prefix)
+ feature_args.append(
+ '-DVTK_ENABLE_TCL_WRAPPING:BOOL=%s' % feature_to_bool('+tcl'))
+ feature_args.append('-DVTK_OPENGL_HAS_OSMESA:BOOL=%s' %
+ feature_to_bool('+osmesa'))
+ feature_args.append('-DVTK_USE_X:BOOL=%s' %
+ nfeature_to_bool('+osmesa'))
+ feature_args.append(
+ '-DVTK_RENDERING_BACKEND:STRING=%s' %
+ feature_to_bool('+opengl2', 'OpenGL2', 'OpenGL'))
feature_args.extend(std_cmake_args)
if 'darwin' in self.spec.architecture:
feature_args.append('-DVTK_USE_X:BOOL=OFF')
- feature_args.append('-DPARAVIEW_DO_UNIX_STYLE_INSTALLS:BOOL=ON')
+ feature_args.append(
+ '-DPARAVIEW_DO_UNIX_STYLE_INSTALLS:BOOL=ON')
cmake('..',
- '-DCMAKE_INSTALL_PREFIX:PATH=%s' % prefix,
- '-DBUILD_TESTING:BOOL=OFF',
- '-DVTK_USE_SYSTEM_FREETYPE:BOOL=ON',
- '-DVTK_USE_SYSTEM_HDF5:BOOL=OFF',
- '-DVTK_USE_SYSTEM_JPEG:BOOL=ON',
- '-DVTK_USE_SYSTEM_LIBXML2:BOOL=ON',
- '-DVTK_USE_SYSTEM_NETCDF:BOOL=OFF',
- '-DVTK_USE_SYSTEM_TIFF:BOOL=ON',
- '-DVTK_USE_SYSTEM_ZLIB:BOOL=ON',
- *feature_args)
+ '-DCMAKE_INSTALL_PREFIX:PATH=%s' % prefix,
+ '-DBUILD_TESTING:BOOL=OFF',
+ '-DVTK_USE_SYSTEM_FREETYPE:BOOL=ON',
+ '-DVTK_USE_SYSTEM_HDF5:BOOL=OFF',
+ '-DVTK_USE_SYSTEM_JPEG:BOOL=ON',
+ '-DVTK_USE_SYSTEM_LIBXML2:BOOL=ON',
+ '-DVTK_USE_SYSTEM_NETCDF:BOOL=OFF',
+ '-DVTK_USE_SYSTEM_TIFF:BOOL=ON',
+ '-DVTK_USE_SYSTEM_ZLIB:BOOL=ON',
+ *feature_args)
make()
make('install')
diff --git a/var/spack/repos/builtin/packages/parmetis/package.py b/var/spack/repos/builtin/packages/parmetis/package.py
index 2dead4a76a..74e00cc9e0 100644
--- a/var/spack/repos/builtin/packages/parmetis/package.py
+++ b/var/spack/repos/builtin/packages/parmetis/package.py
@@ -26,64 +26,62 @@
from spack import *
import sys
+
class Parmetis(Package):
- """
- ParMETIS is an MPI-based parallel library that implements a variety of algorithms for partitioning unstructured
- graphs, meshes, and for computing fill-reducing orderings of sparse matrices.
- """
+ """ParMETIS is an MPI-based parallel library that implements a variety of
+ algorithms for partitioning unstructured graphs, meshes, and for
+ computing fill-reducing orderings of sparse matrices."""
+
homepage = 'http://glaros.dtc.umn.edu/gkhome/metis/parmetis/overview'
- url = 'http://glaros.dtc.umn.edu/gkhome/fetch/sw/parmetis/parmetis-4.0.3.tar.gz'
+ base_url = 'http://glaros.dtc.umn.edu/gkhome/fetch/sw/parmetis'
version('4.0.3', 'f69c479586bf6bb7aff6a9bc0c739628')
+ version('4.0.2', '0912a953da5bb9b5e5e10542298ffdce')
- variant('shared', default=True, description='Enables the build of shared libraries')
- variant('debug', default=False, description='Builds the library in debug mode')
- variant('gdb', default=False, description='Enables gdb support')
+ variant('shared', default=True, description='Enables the build of shared libraries.')
+ variant('debug', default=False, description='Builds the library in debug mode.')
+ variant('gdb', default=False, description='Enables gdb support.')
- depends_on('cmake @2.8:') # build dependency
+ depends_on('cmake@2.8:', type='build')
depends_on('mpi')
-
- patch('enable_external_metis.patch')
depends_on('metis@5:')
+ patch('enable_external_metis.patch')
# bug fixes from PETSc developers
- # https://bitbucket.org/petsc/pkg-parmetis/commits/1c1a9fd0f408dc4d42c57f5c3ee6ace411eb222b/raw/
+ # https://bitbucket.org/petsc/pkg-parmetis/commits/1c1a9fd0f408dc4d42c57f5c3ee6ace411eb222b/raw/ # NOQA: E501
patch('pkg-parmetis-1c1a9fd0f408dc4d42c57f5c3ee6ace411eb222b.patch')
- # https://bitbucket.org/petsc/pkg-parmetis/commits/82409d68aa1d6cbc70740d0f35024aae17f7d5cb/raw/
+ # https://bitbucket.org/petsc/pkg-parmetis/commits/82409d68aa1d6cbc70740d0f35024aae17f7d5cb/raw/ # NOQA: E501
patch('pkg-parmetis-82409d68aa1d6cbc70740d0f35024aae17f7d5cb.patch')
- depends_on('gdb', when='+gdb')
+ def url_for_version(self, version):
+ verdir = 'OLD/' if version < Version('3.2.0') else ''
+ return '%s/%sparmetis-%s.tar.gz' % (Parmetis.base_url, verdir, version)
def install(self, spec, prefix):
- options = []
- options.extend(std_cmake_args)
-
- build_directory = join_path(self.stage.path, 'spack-build')
source_directory = self.stage.source_path
- metis_source = join_path(source_directory, 'metis')
+ build_directory = join_path(source_directory, 'build')
- # FIXME : Once a contract is defined, MPI compilers should be retrieved indirectly via spec['mpi'] in case
- # FIXME : they use a non-standard name
- options.extend(['-DGKLIB_PATH:PATH={metis_source}/GKlib'.format(metis_source=spec['metis'].prefix.include),
- '-DMETIS_PATH:PATH={metis_source}'.format(metis_source=spec['metis'].prefix),
- '-DCMAKE_C_COMPILER:STRING=mpicc',
- '-DCMAKE_CXX_COMPILER:STRING=mpicxx'])
+ options = std_cmake_args[:]
+ options.extend([
+ '-DGKLIB_PATH:PATH=%s/GKlib' % spec['metis'].prefix.include,
+ '-DMETIS_PATH:PATH=%s' % spec['metis'].prefix,
+ '-DCMAKE_C_COMPILER:STRING=%s' % spec['mpi'].mpicc,
+ '-DCMAKE_CXX_COMPILER:STRING=%s' % spec['mpi'].mpicxx
+ ])
if '+shared' in spec:
options.append('-DSHARED:BOOL=ON')
-
if '+debug' in spec:
options.extend(['-DDEBUG:BOOL=ON',
'-DCMAKE_BUILD_TYPE:STRING=Debug'])
-
if '+gdb' in spec:
options.append('-DGDB:BOOL=ON')
with working_dir(build_directory, create=True):
cmake(source_directory, *options)
make()
- make("install")
+ make('install')
- # The shared library is not installed correctly on Darwin; correct this
+ # The shared library is not installed correctly on Darwin; fix this
if (sys.platform == 'darwin') and ('+shared' in spec):
fix_darwin_install_name(prefix.lib)
diff --git a/var/spack/repos/builtin/packages/parmgridgen/package.py b/var/spack/repos/builtin/packages/parmgridgen/package.py
new file mode 100644
index 0000000000..02be777c36
--- /dev/null
+++ b/var/spack/repos/builtin/packages/parmgridgen/package.py
@@ -0,0 +1,71 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+
+from spack import *
+
+
+class Parmgridgen(Package):
+ """MGRIDGEN is a serial library written entirely in ANSI C that implements
+ (serial) algorithms for obtaining a sequence of successive coarse grids
+ that are well-suited for geometric multigrid methods.
+ ParMGridGen is the parallel version of MGridGen
+ """
+
+ homepage = "http://www-users.cs.umn.edu/~moulitsa/software.html"
+ url = "http://www-users.cs.umn.edu/~moulitsa/download/ParMGridGen-1.0.tar.gz"
+
+ version('1.0', '2872fa95b7fb91d6bd525490eed62038')
+
+ depends_on('mpi')
+
+ def install(self, spec, prefix):
+ make_opts = [
+ 'make=make',
+ 'COPTIONS=-fPIC',
+ 'LDOPTIONS=-fPIC',
+ 'CC={0}'.format(self.compiler.cc),
+ 'PARCC={0}'.format(spec['mpi'].mpicc),
+ 'LD={0}'.format(self.compiler.cc),
+ 'PARLD={0}'.format(spec['mpi'].mpicc),
+ 'LIBDIR=-L../..',
+ 'PARLIBS=-L../../ -lparmgrid -lmgrid -lm',
+ 'LIBS=-L../../ -lmgrid -lm',
+ 'parallel'
+ ]
+
+ make(*make_opts, parallel=False)
+
+ mkdirp(prefix.include, prefix.lib, prefix.bin)
+
+ install("mgridgen.h", prefix.include)
+ install("parmgridgen.h", prefix.include)
+
+ install("MGridGen/IMlib/libIMlib.a",
+ join_path(prefix.lib, 'libIMlib.a'))
+ install("libmgrid.a", prefix.lib)
+ install("libparmgrid.a", prefix.lib)
+
+ install("mgridgen", prefix.bin)
+ install("parmgridgen", prefix.bin)
diff --git a/var/spack/repos/builtin/packages/parpack/package.py b/var/spack/repos/builtin/packages/parpack/package.py
index 5930dada85..84bc88b3b0 100644
--- a/var/spack/repos/builtin/packages/parpack/package.py
+++ b/var/spack/repos/builtin/packages/parpack/package.py
@@ -26,6 +26,7 @@ from spack import *
import os
import shutil
+
class Parpack(Package):
"""ARPACK is a collection of Fortran77 subroutines designed to solve large
scale eigenvalue problems."""
@@ -52,13 +53,13 @@ class Parpack(Package):
mf.filter('^PLAT.*', 'PLAT = ')
mf.filter('^home.*', 'home = %s' % os.getcwd())
mf.filter('^BLASdir.*', 'BLASdir = %s' % self.spec['blas'].prefix)
- mf.filter('^LAPACKdir.*', 'LAPACKdir = %s' % self.spec['lapack'].prefix)
+ mf.filter('^LAPACKdir.*', 'LAPACKdir = %s' %
+ self.spec['lapack'].prefix)
mf.filter('^MAKE.*', 'MAKE = make')
# build the library in our own prefix.
mf.filter('^ARPACKLIB.*', 'PARPACKLIB = %s/libparpack.a' % os.getcwd())
-
def install(self, spec, prefix):
with working_dir('PARPACK/SRC/MPI'):
make('all')
diff --git a/var/spack/repos/builtin/packages/patch/package.py b/var/spack/repos/builtin/packages/patch/package.py
new file mode 100644
index 0000000000..df890c057a
--- /dev/null
+++ b/var/spack/repos/builtin/packages/patch/package.py
@@ -0,0 +1,37 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class Patch(AutotoolsPackage):
+ """Patch takes a patch file containing a difference listing produced by
+ the diff program and applies those differences to one or more
+ original files, producing patched versions.
+ """
+
+ homepage = "http://savannah.gnu.org/projects/patch/"
+ url = "http://ftp.gnu.org/gnu/patch/patch-2.7.5.tar.xz"
+
+ version('2.7.5', 'e3da7940431633fb65a01b91d3b7a27a')
diff --git a/var/spack/repos/builtin/packages/patchelf/package.py b/var/spack/repos/builtin/packages/patchelf/package.py
index 3860875bcc..c391f491eb 100644
--- a/var/spack/repos/builtin/packages/patchelf/package.py
+++ b/var/spack/repos/builtin/packages/patchelf/package.py
@@ -24,14 +24,18 @@
##############################################################################
from spack import *
+
class Patchelf(Package):
- """PatchELF is a small utility to modify the dynamic linker and RPATH of ELF executables."""
+ """PatchELF is a small utility to modify the dynamic linker and RPATH of
+ ELF executables."""
homepage = "https://nixos.org/patchelf.html"
- url = "http://nixos.org/releases/patchelf/patchelf-0.8/patchelf-0.8.tar.gz"
+ url = "http://nixos.org/releases/patchelf/patchelf-0.8/patchelf-0.8.tar.gz"
+
list_url = "http://nixos.org/releases/patchelf/"
list_depth = 2
+ version('0.9', '3c265508526760f233620f35d79c79fc')
version('0.8', '407b229e6a681ffb0e2cdd5915cb2d01')
def install(self, spec, prefix):
diff --git a/var/spack/repos/builtin/packages/pcre/intel.patch b/var/spack/repos/builtin/packages/pcre/intel.patch
new file mode 100644
index 0000000000..f160f55e1b
--- /dev/null
+++ b/var/spack/repos/builtin/packages/pcre/intel.patch
@@ -0,0 +1,12 @@
+diff -up pcre-8.38/pcrecpp.cc.intel pcre-8.38/pcrecpp.cc
+--- pcre-8.38/pcrecpp.cc.intel 2014-09-15 07:48:59.000000000 -0600
++++ pcre-8.38/pcrecpp.cc 2016-06-08 16:16:56.702721214 -0600
+@@ -66,7 +66,7 @@ Arg RE::no_arg((void*)NULL);
+ // inclusive test if we ever needed it. (Note that not only the
+ // __attribute__ syntax, but also __USER_LABEL_PREFIX__, are
+ // gnu-specific.)
+-#if defined(__GNUC__) && __GNUC__ >= 3 && defined(__ELF__)
++#if defined(__GNUC__) && __GNUC__ >= 3 && defined(__ELF__) && !defined(__INTEL_COMPILER)
+ # define ULP_AS_STRING(x) ULP_AS_STRING_INTERNAL(x)
+ # define ULP_AS_STRING_INTERNAL(x) #x
+ # define USER_LABEL_PREFIX_STR ULP_AS_STRING(__USER_LABEL_PREFIX__)
diff --git a/var/spack/repos/builtin/packages/pcre/package.py b/var/spack/repos/builtin/packages/pcre/package.py
index 7a9f3b911d..a2236e682b 100644
--- a/var/spack/repos/builtin/packages/pcre/package.py
+++ b/var/spack/repos/builtin/packages/pcre/package.py
@@ -24,6 +24,7 @@
##############################################################################
from spack import *
+
class Pcre(Package):
"""The PCRE package contains Perl Compatible Regular Expression
libraries. These are useful for implementing regular expression
@@ -31,10 +32,21 @@ class Pcre(Package):
homepage = "http://www.pcre.org"""
url = "ftp://ftp.csx.cam.ac.uk/pub/software/programming/pcre/pcre-8.36.tar.bz2"
- version('8.36', 'b767bc9af0c20bc9c1fe403b0d41ad97')
+ version('8.39', 'e3fca7650a0556a2647821679d81f585')
version('8.38', '00aabbfe56d5a48b270f999b508c5ad2')
+ patch("intel.patch", when='@8.38')
+
+ variant('utf', default=True,
+ description='Enable support for UTF-8/16/32, '
+ 'incompatible with EBCDIC.')
+
def install(self, spec, prefix):
- configure("--prefix=%s" % prefix)
+ configure_args = ['--prefix=%s' % prefix]
+ if '+utf' in spec:
+ configure_args.append('--enable-utf')
+ configure_args.append('--enable-unicode-properties')
+
+ configure(*configure_args)
make()
make("install")
diff --git a/var/spack/repos/builtin/packages/pcre2/package.py b/var/spack/repos/builtin/packages/pcre2/package.py
index b013685f05..a2739e0584 100644
--- a/var/spack/repos/builtin/packages/pcre2/package.py
+++ b/var/spack/repos/builtin/packages/pcre2/package.py
@@ -24,6 +24,7 @@
##############################################################################
from spack import *
+
class Pcre2(Package):
"""The PCRE2 package contains Perl Compatible Regular Expression
libraries. These are useful for implementing regular expression
diff --git a/var/spack/repos/builtin/packages/pdt/package.py b/var/spack/repos/builtin/packages/pdt/package.py
index 60136fc0cd..bed01aeefb 100644
--- a/var/spack/repos/builtin/packages/pdt/package.py
+++ b/var/spack/repos/builtin/packages/pdt/package.py
@@ -22,24 +22,29 @@
# License along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
-
from spack import *
class Pdt(Package):
- """
- Program Database Toolkit (PDT) is a framework for analyzing source code written in several programming languages
- and for making rich program knowledge accessible to developers of static and dynamic analysis tools. PDT implements
- a standard program representation, the program database (PDB), that can be accessed in a uniform way through a
- class library supporting common PDB operations.
+ """Program Database Toolkit (PDT) is a framework for analyzing source
+ code written in several programming languages and for making rich
+ program knowledge accessible to developers of static and dynamic
+ analysis tools. PDT implements a standard program representation,
+ the program database (PDB), that can be accessed in a uniform way
+ through a class library supporting common PDB operations.
+
"""
homepage = "https://www.cs.uoregon.edu/research/pdt/home.php"
- url = "https://www.cs.uoregon.edu/research/tau/pdt_releases/pdt-3.21.tar.gz"
+ url = "http://www.cs.uoregon.edu/research/paracomp/pdtoolkit/Download/pdtoolkit-3.22.1.tar.gz"
- version('3.21', '8df94298b71703decf680709a4ddf68f')
- version('3.19', 'ba5591994998771fdab216699e362228')
+ version('3.22.1', 'b56b9b3e621161c7fd9e4908b944840d')
+ version('3.22', '982d667617802962a1f7fe6c4c31184f')
+ version('3.21', '3092ca0d8833b69992c17e63ae66c263')
+ version('3.20', 'c3edabe202926abe04552e33cd39672d')
+ version('3.19', '5c5e1e6607086aa13bf4b1b9befc5864')
+ version('3.18.1', 'e401534f5c476c3e77f05b7f73b6c4f2')
def install(self, spec, prefix):
configure('-prefix=%s' % prefix)
make()
- make("install")
+ make('install')
diff --git a/var/spack/repos/builtin/packages/perl/package.py b/var/spack/repos/builtin/packages/perl/package.py
new file mode 100644
index 0000000000..d71a7492ba
--- /dev/null
+++ b/var/spack/repos/builtin/packages/perl/package.py
@@ -0,0 +1,75 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+#
+# Author: George Hartzell <hartzell@alerce.com>
+# Date: July 21, 2016
+# Author: Justin Too <justin@doubleotoo.com>
+# Date: September 6, 2015
+#
+from spack import *
+
+
+class Perl(Package):
+ """Perl 5 is a highly capable, feature-rich programming language with over
+ 27 years of development."""
+ homepage = "http://www.perl.org"
+ url = "http://www.cpan.org/src/5.0/perl-5.22.2.tar.gz"
+
+ version('5.24.0', 'c5bf7f3285439a2d3b6a488e14503701')
+ version('5.22.2', '5767e2a10dd62a46d7b57f74a90d952b')
+ version('5.20.3', 'd647d0ea5a7a8194c34759ab9f2610cd')
+ # 5.18.4 fails with gcc-5
+ # https://rt.perl.org/Public/Bug/Display.html?id=123784
+ # version('5.18.4' , '1f9334ff730adc05acd3dd7130d295db')
+
+ # Installing cpanm alongside the core makes it safe and simple for
+ # people/projects to install their own sets of perl modules. Not
+ # having it in core increases the "energy of activation" for doing
+ # things cleanly.
+ variant('cpanm', default=True,
+ description='Optionally install cpanm with the core packages.')
+
+ resource(
+ name="cpanm",
+ url="http://search.cpan.org/CPAN/authors/id/M/MI/MIYAGAWA/App-cpanminus-1.7042.tar.gz",
+ md5="e87f55fbcb3c13a4754500c18e89219f",
+ destination="cpanm",
+ placement="cpanm"
+ )
+
+ def install(self, spec, prefix):
+ configure = Executable('./Configure')
+ configure("-des", "-Dprefix=" + prefix)
+ make()
+ if self.run_tests:
+ make("test")
+ make("install")
+
+ if '+cpanm' in spec:
+ with working_dir(join_path('cpanm', 'cpanm')):
+ perl = Executable(join_path(prefix.bin, 'perl'))
+ perl('Makefile.PL')
+ make()
+ make('install')
diff --git a/var/spack/repos/builtin/packages/petsc/package.py b/var/spack/repos/builtin/packages/petsc/package.py
index 6456a1aabf..a9d4ff6065 100644
--- a/var/spack/repos/builtin/packages/petsc/package.py
+++ b/var/spack/repos/builtin/packages/petsc/package.py
@@ -22,75 +22,112 @@
# License along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
+
import os
+import sys
from spack import *
class Petsc(Package):
- """
- PETSc is a suite of data structures and routines for the scalable (parallel) solution of scientific applications
- modeled by partial differential equations.
+ """PETSc is a suite of data structures and routines for the scalable
+ (parallel) solution of scientific applications modeled by partial
+ differential equations.
"""
homepage = "http://www.mcs.anl.gov/petsc/index.html"
url = "http://ftp.mcs.anl.gov/pub/petsc/release-snapshots/petsc-3.5.3.tar.gz"
+ version('develop', git='https://bitbucket.org/petsc/petsc.git', tag='master')
+ version('for-pflotran-0.1.0', git='https://bitbucket.org/petsc/petsc.git',
+ commit='7943f4e1472fff9cf1fc630a1100136616e4970f')
+
+ version('3.7.5', 'f00f6e6a3bac39052350dd47194b58a3')
+ version('3.7.4', 'aaf94fa54ef83022c14091f10866eedf')
+ version('3.7.2', '50da49867ce7a49e7a0c1b37f4ec7b34')
+ version('3.6.4', '7632da2375a3df35b8891c9526dbdde7')
version('3.6.3', '91dd3522de5a5ef039ff8f50800db606')
version('3.5.3', 'd4fd2734661e89f18ac6014b5dd1ef2f')
version('3.5.2', 'ad170802b3b058b5deb9cd1f968e7e13')
version('3.5.1', 'a557e029711ebf425544e117ffa44d8f')
version('3.4.4', '7edbc68aa6d8d6a3295dd5f6c2f6979d')
- variant('shared', default=True, description='Enables the build of shared libraries')
+ variant('shared', default=True,
+ description='Enables the build of shared libraries')
variant('mpi', default=True, description='Activates MPI support')
- variant('double', default=True, description='Switches between single and double precision')
+ variant('double', default=True,
+ description='Switches between single and double precision')
variant('complex', default=False, description='Build with complex numbers')
variant('debug', default=False, description='Compile in debug mode')
- variant('metis', default=True, description='Activates support for metis and parmetis')
- variant('hdf5', default=True, description='Activates support for HDF5 (only parallel)')
+ variant('metis', default=True,
+ description='Activates support for metis and parmetis')
+ variant('hdf5', default=True,
+ description='Activates support for HDF5 (only parallel)')
variant('boost', default=True, description='Activates support for Boost')
- variant('hypre', default=True, description='Activates support for Hypre (only parallel)')
- variant('mumps', default=True, description='Activates support for MUMPS (only parallel)')
- variant('superlu-dist', default=True, description='Activates support for SuperluDist (only parallel)')
+ variant('hypre', default=True,
+ description='Activates support for Hypre (only parallel)')
+ variant('mumps', default=True,
+ description='Activates support for MUMPS (only parallel'
+ ' and 32bit indices)')
+ variant('superlu-dist', default=True,
+ description='Activates support for SuperluDist (only parallel)')
+ variant('int64', default=False,
+ description='Compile with 64bit indices')
# Virtual dependencies
+ # Git repository needs sowing to build Fortran interface
+ depends_on('sowing', when='@develop')
+
+ # PETSc, hypre, superlu_dist when built with int64 use 32 bit integers
+ # with BLAS/LAPACK
depends_on('blas')
depends_on('lapack')
depends_on('mpi', when='+mpi')
# Build dependencies
- depends_on('python @2.6:2.7')
+ depends_on('python @2.6:2.7', type='build')
# Other dependencies
- depends_on('boost', when='+boost')
- depends_on('metis@5:', when='+metis')
+ depends_on('boost', when='@:3.5+boost')
+ depends_on('metis@5:~int64', when='+metis~int64')
+ depends_on('metis@5:+int64', when='+metis+int64')
depends_on('hdf5+mpi', when='+hdf5+mpi')
depends_on('parmetis', when='+metis+mpi')
# Hypre does not support complex numbers.
- # Also PETSc prefer to build it without internal superlu, likely due to conflict in headers
- # see https://bitbucket.org/petsc/petsc/src/90564b43f6b05485163c147b464b5d6d28cde3ef/config/BuildSystem/config/packages/hypre.py
- depends_on('hypre~internal-superlu', when='+hypre+mpi~complex')
- depends_on('superlu-dist', when='+superlu-dist+mpi')
- depends_on('mumps+mpi', when='+mumps+mpi')
- depends_on('scalapack', when='+mumps+mpi')
+ # Also PETSc prefer to build it without internal superlu, likely due to
+ # conflict in headers see
+ # https://bitbucket.org/petsc/petsc/src/90564b43f6b05485163c147b464b5d6d28cde3ef/config/BuildSystem/config/packages/hypre.py
+ depends_on('hypre~internal-superlu~int64', when='+hypre+mpi~complex~int64')
+ depends_on('hypre~internal-superlu+int64', when='+hypre+mpi~complex+int64')
+ depends_on('superlu-dist@:4.3~int64', when='@3.4.4:3.6.4+superlu-dist+mpi~int64')
+ depends_on('superlu-dist@:4.3+int64', when='@3.4.4:3.6.4+superlu-dist+mpi+int64')
+ depends_on('superlu-dist@5.0.0:~int64', when='@3.7:+superlu-dist+mpi~int64')
+ depends_on('superlu-dist@5.0.0:+int64', when='@3.7:+superlu-dist+mpi+int64')
+ depends_on('superlu-dist@5.0.0:~int64', when='@for-pflotran-0.1.0+superlu-dist+mpi~int64')
+ depends_on('superlu-dist@5.0.0:+int64', when='@for-pflotran-0.1.0+superlu-dist+mpi+int64')
+ depends_on('mumps+mpi', when='+mumps+mpi~int64')
+ depends_on('scalapack', when='+mumps+mpi~int64')
def mpi_dependent_options(self):
if '~mpi' in self.spec:
compiler_opts = [
'--with-cc=%s' % os.environ['CC'],
- '--with-cxx=%s' % (os.environ['CXX'] if self.compiler.cxx is not None else '0'),
- '--with-fc=%s' % (os.environ['FC'] if self.compiler.fc is not None else '0'),
+ '--with-cxx=%s' % (os.environ['CXX']
+ if self.compiler.cxx is not None else '0'),
+ '--with-fc=%s' % (os.environ['FC']
+ if self.compiler.fc is not None else '0'),
'--with-mpi=0'
]
- error_message_fmt = '\t{library} support requires "+mpi" to be activated'
-
- # If mpi is disabled (~mpi), it's an error to have any of these enabled.
- # This generates a list of any such errors.
- errors = [error_message_fmt.format(library=x)
- for x in ('hdf5', 'hypre', 'parmetis','mumps','superlu-dist')
- if ('+'+x) in self.spec]
+ error_message_fmt = \
+ '\t{library} support requires "+mpi" to be activated'
+
+ # If mpi is disabled (~mpi), it's an error to have any of these
+ # enabled. This generates a list of any such errors.
+ errors = [
+ error_message_fmt.format(library=x)
+ for x in ('hdf5', 'hypre', 'parmetis', 'mumps', 'superlu-dist')
+ if ('+' + x) in self.spec]
if errors:
errors = ['incompatible variants given'] + errors
raise RuntimeError('\n'.join(errors))
@@ -99,32 +136,57 @@ class Petsc(Package):
'--with-mpi=1',
'--with-mpi-dir=%s' % self.spec['mpi'].prefix,
]
+ if sys.platform != "darwin":
+ compiler_opts.extend([
+ '--with-cpp=cpp',
+ '--with-cxxcpp=cpp',
+ ])
return compiler_opts
def install(self, spec, prefix):
- options = ['--with-ssl=0']
+ options = ['--with-ssl=0',
+ '--with-x=0',
+ '--download-c2html=0',
+ '--download-sowing=0',
+ '--download-hwloc=0']
options.extend(self.mpi_dependent_options())
options.extend([
- '--with-precision=%s' % ('double' if '+double' in spec else 'single'),
- '--with-scalar-type=%s' % ('complex' if '+complex' in spec else 'real'),
+ '--with-precision=%s' % (
+ 'double' if '+double' in spec else 'single'),
+ '--with-scalar-type=%s' % (
+ 'complex' if '+complex' in spec else 'real'),
'--with-shared-libraries=%s' % ('1' if '+shared' in spec else '0'),
'--with-debugging=%s' % ('1' if '+debug' in spec else '0'),
- '--with-blas-lapack-dir=%s' % spec['lapack'].prefix
+ '--with-64-bit-indices=%s' % ('1' if '+int64' in spec else '0')
+ ])
+ # Make sure we use exactly the same Blas/Lapack libraries
+ # across the DAG. To that end list them explicitly
+ lapack_blas = spec['lapack'].lapack_libs + spec['blas'].blas_libs
+ options.extend([
+ '--with-blas-lapack-lib=%s' % lapack_blas.joined()
])
+
# Activates library support if needed
- for library in ('metis', 'boost', 'hdf5', 'hypre', 'parmetis','mumps','scalapack'):
+ for library in ('metis', 'boost', 'hdf5', 'hypre', 'parmetis',
+ 'mumps', 'scalapack'):
options.append(
- '--with-{library}={value}'.format(library=library, value=('1' if library in spec else '0'))
+ '--with-{library}={value}'.format(
+ library=library, value=('1' if library in spec else '0'))
)
if library in spec:
options.append(
- '--with-{library}-dir={path}'.format(library=library, path=spec[library].prefix)
+ '--with-{library}-dir={path}'.format(
+ library=library, path=spec[library].prefix)
)
- # PETSc does not pick up SuperluDist from the dir as they look for superlu_dist_4.1.a
+ # PETSc does not pick up SuperluDist from the dir as they look for
+ # superlu_dist_4.1.a
if 'superlu-dist' in spec:
options.extend([
- '--with-superlu_dist-include=%s' % spec['superlu-dist'].prefix.include,
- '--with-superlu_dist-lib=%s' % join_path(spec['superlu-dist'].prefix.lib, 'libsuperlu_dist.a'),
+ '--with-superlu_dist-include=%s' %
+ spec['superlu-dist'].prefix.include,
+ '--with-superlu_dist-lib=%s' %
+ join_path(spec['superlu-dist'].prefix.lib,
+ 'libsuperlu_dist.a'),
'--with-superlu_dist=1'
])
else:
@@ -138,6 +200,46 @@ class Petsc(Package):
make('MAKE_NP=%s' % make_jobs, parallel=False)
make("install")
+ # solve Poisson equation in 2D to make sure nothing is broken:
+ if ('mpi' in spec) and self.run_tests:
+ with working_dir('src/ksp/ksp/examples/tutorials'):
+ env['PETSC_DIR'] = self.prefix
+ cc = Executable(spec['mpi'].mpicc)
+ cc('ex50.c', '-I%s' % prefix.include, '-L%s' % prefix.lib,
+ '-lpetsc', '-lm', '-o', 'ex50')
+ run = Executable(join_path(spec['mpi'].prefix.bin, 'mpirun'))
+ run('ex50', '-da_grid_x', '4', '-da_grid_y', '4')
+ if 'superlu-dist' in spec:
+ run('ex50',
+ '-da_grid_x', '4',
+ '-da_grid_y', '4',
+ '-pc_type', 'lu',
+ '-pc_factor_mat_solver_package', 'superlu_dist')
+
+ if 'mumps' in spec:
+ run('ex50',
+ '-da_grid_x', '4',
+ '-da_grid_y', '4',
+ '-pc_type', 'lu',
+ '-pc_factor_mat_solver_package', 'mumps')
+
+ if 'hypre' in spec:
+ run('ex50',
+ '-da_grid_x', '4',
+ '-da_grid_y', '4',
+ '-pc_type', 'hypre',
+ '-pc_hypre_type', 'boomeramg')
+
+ def setup_environment(self, spack_env, run_env):
+ # configure fails if these env vars are set outside of Spack
+ spack_env.unset('PETSC_DIR')
+ spack_env.unset('PETSC_ARCH')
+
+ # Set PETSC_DIR in the module file
+ run_env.set('PETSC_DIR', self.prefix)
+ run_env.unset('PETSC_ARCH')
+
def setup_dependent_environment(self, spack_env, run_env, dependent_spec):
- # set up PETSC_DIR for everyone using PETSc package
+ # Set up PETSC_DIR for everyone using PETSc package
spack_env.set('PETSC_DIR', self.prefix)
+ spack_env.unset('PETSC_ARCH')
diff --git a/var/spack/repos/builtin/packages/pexsi/make.inc b/var/spack/repos/builtin/packages/pexsi/make.inc
new file mode 100644
index 0000000000..a8020fb370
--- /dev/null
+++ b/var/spack/repos/builtin/packages/pexsi/make.inc
@@ -0,0 +1,79 @@
+# Different compiling and linking options.
+SUFFIX = linux
+
+# Compiler and tools
+################################################################
+CC = @MPICC
+CXX = @MPICXX
+FC = @MPIFC
+LOADER = @MPICXX
+
+
+AR = ar
+ARFLAGS = rvcu
+# For System V based machine without ranlib, like Cray and SGI,
+# use touch instead.
+RANLIB = @RANLIB
+
+CP = cp
+RM = rm
+RMFLAGS = -f
+################################################################
+
+# PEXSI directory
+PEXSI_DIR = @PEXSI_STAGE
+
+# Required libraries directories
+DSUPERLU_DIR = @SUPERLU_PREFIX
+METIS_DIR = @METIS_PREFIX
+PARMETIS_DIR = @PARMETIS_PREFIX
+LAPACK_DIR = @LAPACK_PREFIX
+BLAS_DIR = @BLAS_PREFIX
+
+# Includes
+PEXSI_INCLUDE = -I${PEXSI_DIR}/include
+DSUPERLU_INCLUDE = -I${DSUPERLU_DIR}/include
+INCLUDES = ${PEXSI_INCLUDE} ${DSUPERLU_INCLUDE}
+
+# Libraries
+CPP_LIB = @STDCXX_LIB @MPICXX_LIB
+#GFORTRAN_LIB = /usr/lib/gcc/x86_64-linux-gnu/4.8/libgfortran.a
+LAPACK_LIB = @LAPACK_LIBS
+BLAS_LIB = @BLAS_LIBS
+DSUPERLU_LIB = ${DSUPERLU_DIR}/lib/libsuperlu_dist.a
+PEXSI_LIB = ${PEXSI_DIR}/src/libpexsi_${SUFFIX}.a
+
+# Graph partitioning libraries
+METIS_LIB = -L${METIS_DIR}/lib -lmetis
+PARMETIS_LIB = -L${PARMETIS_DIR}/libparmetis -lparmetis
+
+# Different compiling and linking options.
+COMPILE_DEF = -DDEBUG=0 -DRELEASE
+COMPILE_FLAG = -O3 -w
+
+LIBS = ${PEXSI_LIB} ${DSUPERLU_LIB} ${PARMETIS_LIB} ${METIS_LIB} ${LAPACK_LIB} ${BLAS_LIB} ${GFORTRAN_LIB}
+
+COMPILE_DEF += -DAdd_
+
+CPPFLAG = -std=c++11
+
+CFLAGS = ${COMPILE_FLAG} ${PROFILE_FLAG} ${INCLUDES}
+FFLAGS = ${COMPILE_FLAG} ${PROFILE_FLAG} ${INCLUDES}
+CXXFLAGS = ${COMPILE_FLAG} ${CPPFLAG} ${PROFILE_FLAG} ${INCLUDES}
+CCDEFS = ${COMPILE_DEF}
+CPPDEFS = ${COMPILE_DEF}
+LOADOPTS = ${PROFILE_FLAG} ${LIBS}
+FLOADOPTS = ${PROFILE_FLAG} ${LIBS} ${CPP_LIB}
+
+# Generate auto-dependencies
+%.d: %.c
+ @set -e; rm -f $@; \
+ $(CC) -M $(CCDEFS) $(CFLAGS) $< > $@.$$$$; \
+ sed 's,\($*\)\.o[ :]*,\1.o $@ : ,g' < $@.$$$$ > $@;\
+ rm -f $@.$$$$
+
+%.d: %.cpp
+ @set -e; rm -f $@; \
+ $(CXX) -M $(CPPDEFS) $(CXXFLAGS) $< > $@.$$$$; \
+ sed 's,\($*\)\.o[ :]*,\1.o $@ : ,g' < $@.$$$$ > $@;\
+ rm -f $@.$$$$
diff --git a/var/spack/repos/builtin/packages/pexsi/package.py b/var/spack/repos/builtin/packages/pexsi/package.py
new file mode 100644
index 0000000000..9fc71d4c52
--- /dev/null
+++ b/var/spack/repos/builtin/packages/pexsi/package.py
@@ -0,0 +1,103 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+
+import inspect
+import os.path
+import shutil
+
+from spack import *
+
+
+class Pexsi(Package):
+ """The PEXSI library is written in C++, and uses message passing interface
+ (MPI) to parallelize the computation on distributed memory computing
+ systems and achieve scalability on more than 10,000 processors.
+
+ The Pole EXpansion and Selected Inversion (PEXSI) method is a fast
+ method for electronic structure calculation based on Kohn-Sham density
+ functional theory. It efficiently evaluates certain selected elements
+ of matrix functions, e.g., the Fermi-Dirac function of the KS Hamiltonian,
+ which yields a density matrix. It can be used as an alternative to
+ diagonalization methods for obtaining the density, energy and forces
+ in electronic structure calculations.
+ """
+ homepage = 'https://math.berkeley.edu/~linlin/pexsi/index.html'
+ url = 'https://math.berkeley.edu/~linlin/pexsi/download/pexsi_v0.9.0.tar.gz'
+
+ version('0.9.0', '0c1a2de891ba1445dfc184b2fa270ed8')
+
+ depends_on('parmetis')
+ depends_on('superlu-dist@3.3', when='@0.9.0')
+
+ parallel = False
+
+ def install(self, spec, prefix):
+
+ substitutions = {
+ '@MPICC': self.spec['mpi'].mpicc,
+ '@MPICXX': self.spec['mpi'].mpicxx,
+ '@MPIFC': self.spec['mpi'].mpifc,
+ '@MPICXX_LIB': ' '.join(self.spec['mpi'].mpicxx_shared_libs),
+ '@RANLIB': 'ranlib',
+ '@PEXSI_STAGE': self.stage.source_path,
+ '@SUPERLU_PREFIX': self.spec['superlu-dist'].prefix,
+ '@METIS_PREFIX': self.spec['metis'].prefix,
+ '@PARMETIS_PREFIX': self.spec['parmetis'].prefix,
+ '@LAPACK_PREFIX': self.spec['lapack'].prefix,
+ '@BLAS_PREFIX': self.spec['blas'].prefix,
+ '@LAPACK_LIBS': self.spec['lapack'].lapack_libs.joined(),
+ '@BLAS_LIBS': self.spec['lapack'].blas_libs.joined(),
+ '@STDCXX_LIB': ' '.join(self.compiler.stdcxx_libs)
+ }
+
+ template = join_path(
+ os.path.dirname(inspect.getmodule(self).__file__),
+ 'make.inc'
+ )
+ makefile = join_path(
+ self.stage.source_path,
+ 'make.inc'
+ )
+ shutil.copy(template, makefile)
+ for key, value in substitutions.items():
+ filter_file(key, value, makefile)
+
+ make()
+ # 'make install' does not exist, despite what documentation says
+ mkdirp(self.prefix.lib)
+ install(
+ join_path(self.stage.source_path, 'src', 'libpexsi_linux.a'),
+ join_path(self.prefix.lib, 'libpexsi.a')
+ )
+ install_tree(
+ join_path(self.stage.source_path, 'include'),
+ self.prefix.include
+ )
+ # fortran "interface"
+ make('-C', 'fortran')
+ install_tree(
+ join_path(self.stage.source_path, 'fortran'),
+ join_path(self.prefix, 'fortran')
+ )
diff --git a/var/spack/repos/builtin/packages/pfft/package.py b/var/spack/repos/builtin/packages/pfft/package.py
new file mode 100644
index 0000000000..575f0af3c5
--- /dev/null
+++ b/var/spack/repos/builtin/packages/pfft/package.py
@@ -0,0 +1,64 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class Pfft(AutotoolsPackage):
+ """PFFT is a software library for computing massively parallel,
+ fast Fourier transformations on distributed memory architectures.
+ PFFT can be understood as a generalization of FFTW-MPI to
+ multidimensional data decomposition."""
+
+ homepage = "https://www-user.tu-chemnitz.de/~potts/workgroup/pippig/software.php.en"
+ url = "https://www-user.tu-chemnitz.de/~potts/workgroup/pippig/software/pfft-1.0.8-alpha.tar.gz"
+
+ version('1.0.8-alpha', '46457fbe8e38d02ff87d439b63dc0709')
+
+ depends_on('fftw+mpi+pfft_patches')
+ depends_on('mpi')
+
+ def install(self, spec, prefix):
+ options = ['--prefix={0}'.format(prefix)]
+ if not self.compiler.f77 or not self.compiler.fc:
+ options.append("--disable-fortran")
+
+ configure(*options)
+ make()
+ if self.run_tests:
+ make("check")
+ make("install")
+
+ if '+float' in spec['fftw']:
+ configure('--enable-float', *options)
+ make()
+ if self.run_tests:
+ make("check")
+ make("install")
+ if '+long_double' in spec['fftw']:
+ configure('--enable-long-double', *options)
+ make()
+ if self.run_tests:
+ make("check")
+ make("install")
diff --git a/var/spack/repos/builtin/packages/pflotran/package.py b/var/spack/repos/builtin/packages/pflotran/package.py
new file mode 100644
index 0000000000..347f792453
--- /dev/null
+++ b/var/spack/repos/builtin/packages/pflotran/package.py
@@ -0,0 +1,45 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+
+from spack import *
+
+
+class Pflotran(AutotoolsPackage):
+ """PFLOTRAN is an open source, state-of-the-art massively parallel
+ subsurface flow and reactive transport code.
+ """
+
+ homepage = "http://www.pflotran.org"
+
+ version('develop', hg='https://bitbucket.org/pflotran/pflotran-xsdk')
+ version('0.1.0', hg='https://bitbucket.org/pflotran/pflotran-xsdk',
+ commmit='4734cf5e606b')
+
+ depends_on('mpi')
+ depends_on('hdf5@1.8.12+mpi+fortran')
+ depends_on('petsc@develop+hdf5+metis',when='@develop')
+ depends_on('petsc@for-pflotran-0.1.0+hdf5+metis',when='@0.1.0')
+
+ parallel = False
diff --git a/var/spack/repos/builtin/packages/pgi/package.py b/var/spack/repos/builtin/packages/pgi/package.py
index 7170c65303..e8a2f53497 100644
--- a/var/spack/repos/builtin/packages/pgi/package.py
+++ b/var/spack/repos/builtin/packages/pgi/package.py
@@ -36,12 +36,13 @@ class Pgi(Package):
architecture) to the format: pgi-<version>.tar.gz. Spack will search your
current directory for a file of this format. Alternatively, add this
file to a mirror so that Spack can find it. For instructions on how to
- set up a mirror, see http://software.llnl.gov/spack/mirrors.html"""
+ set up a mirror, see http://spack.readthedocs.io/en/latest/mirrors.html"""
homepage = "http://www.pgroup.com/"
- url = "file://%s/pgi-16.3.tar.gz" % os.getcwd()
+ version('16.5', 'a40e8852071b5d600cb42f31631b3de1')
version('16.3', '618cb7ddbc57d4e4ed1f21a0ab25f427')
+ version('15.7', '84a689217b17cdaf78c39270c70bea5d')
variant('network', default=True,
description="Perform a network install")
@@ -63,6 +64,10 @@ class Pgi(Package):
license_vars = ['PGROUPD_LICENSE_FILE', 'LM_LICENSE_FILE']
license_url = 'http://www.pgroup.com/doc/pgiinstall.pdf'
+ def url_for_version(self, version):
+ return "file://{0}/pgilinux-20{1}-{2}-x86_64.tar.gz".format(
+ os.getcwd(), version.up_to(1), version.joined)
+
def install(self, spec, prefix):
# Enable the silent installation feature
os.environ['PGI_SILENT'] = "true"
diff --git a/var/spack/repos/builtin/packages/pidx/package.py b/var/spack/repos/builtin/packages/pidx/package.py
index d38dcd7b96..e19bb9e470 100644
--- a/var/spack/repos/builtin/packages/pidx/package.py
+++ b/var/spack/repos/builtin/packages/pidx/package.py
@@ -24,6 +24,7 @@
##############################################################################
from spack import *
+
class Pidx(Package):
"""PIDX Parallel I/O Library.
@@ -36,6 +37,7 @@ class Pidx(Package):
version('1.0', git='https://github.com/sci-visus/PIDX.git',
commit='6afa1cf71d1c41263296dc049c8fabaf73c296da')
+ depends_on('cmake', type='build')
depends_on("mpi")
def install(self, spec, prefix):
diff --git a/var/spack/repos/builtin/packages/piranha/package.py b/var/spack/repos/builtin/packages/piranha/package.py
new file mode 100644
index 0000000000..dbf949f000
--- /dev/null
+++ b/var/spack/repos/builtin/packages/piranha/package.py
@@ -0,0 +1,73 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class Piranha(Package):
+ """Piranha is a computer-algebra library for the symbolic manipulation of
+ sparse multivariate polynomials and other closely-related symbolic objects
+ (such as Poisson series)."""
+
+ homepage = "https://bluescarni.github.io/piranha/sphinx/"
+ url = "https://github.com/bluescarni/piranha/archive/v0.5.tar.gz"
+
+ version('0.5', '99546bae2be115737b6316751eb0b84d')
+ version('develop', git='https://github.com/bluescarni/piranha.git')
+
+ variant('python', default=True,
+ description='Build the Python bindings')
+
+ # Build dependencies
+ depends_on('cmake@3.0:', type='build')
+ extends('python', when='+pyranha')
+ depends_on('python@2.6:', type='build', when='+pyranha')
+
+ # Other dependencies
+ depends_on('boost+iostreams+regex+serialization',
+ when='~python')
+ depends_on('boost+iostreams+regex+serialization+python',
+ when='+python')
+ depends_on('bzip2')
+ depends_on('gmp') # mpir is a drop-in replacement for this
+ depends_on('mpfr') # Could also be built against mpir
+
+ def install(self, spec, prefix):
+ options = []
+ options.extend(std_cmake_args)
+
+ # Python bindings
+ options.extend([
+ '-DBUILD_PYRANHA=%s' % (
+ 'ON' if '+python' in spec else 'OFF'),
+ '-DBUILD_TESTS:BOOL=ON',
+ ])
+
+ with working_dir('spack-build', create=True):
+ cmake('..', *options)
+
+ make()
+ make('install')
+ if self.run_tests:
+ make('test')
diff --git a/var/spack/repos/builtin/packages/pixman/package.py b/var/spack/repos/builtin/packages/pixman/package.py
index 34d8dfea0d..c780fd64aa 100644
--- a/var/spack/repos/builtin/packages/pixman/package.py
+++ b/var/spack/repos/builtin/packages/pixman/package.py
@@ -23,20 +23,32 @@
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
from spack import *
+import sys
+
class Pixman(Package):
"""The Pixman package contains a library that provides low-level
- pixel manipulation features such as image compositing and
- trapezoid rasterization."""
+ pixel manipulation features such as image compositing and
+ trapezoid rasterization."""
+
homepage = "http://www.pixman.org"
url = "http://cairographics.org/releases/pixman-0.32.6.tar.gz"
+ version('0.34.0', 'e80ebae4da01e77f68744319f01d52a3')
version('0.32.6', '3a30859719a41bd0f5cccffbfefdd4c2')
- depends_on("libpng")
+ depends_on('pkg-config', type='build')
+ depends_on('libpng')
def install(self, spec, prefix):
- configure("--prefix=%s" % prefix,
- "--disable-gtk")
+ config_args = ["--prefix=" + prefix,
+ "--disable-gtk"]
+
+ if sys.platform == "darwin":
+ config_args.append("--disable-mmx")
+
+ configure(*config_args)
+
make()
- make("install")
+ make('check')
+ make('install')
diff --git a/var/spack/repos/builtin/packages/pkg-config/g_date_strftime.patch b/var/spack/repos/builtin/packages/pkg-config/g_date_strftime.patch
new file mode 100644
index 0000000000..9538f23875
--- /dev/null
+++ b/var/spack/repos/builtin/packages/pkg-config/g_date_strftime.patch
@@ -0,0 +1,34 @@
+From 00148329967adb196138372771052a3f606a6ea3 Mon Sep 17 00:00:00 2001
+From: coypu <coypu@sdf.org>
+Date: Wed, 2 Mar 2016 19:43:10 +0200
+Subject: [PATCH 2/2] gdate: Suppress string format literal warning
+
+Newer versions of GCC emit an error here, but we know it's safe.
+https://bugzilla.gnome.org/761550
+---
+ glib/glib/gdate.c | 5 +++++
+ 1 file changed, 5 insertions(+)
+
+diff --git a/glib/glib/gdate.c b/glib/glib/gdate.c
+index 4aece02..92c34d2 100644
+--- a/glib/glib/gdate.c
++++ b/glib/glib/gdate.c
+@@ -2439,6 +2439,9 @@ win32_strftime_helper (const GDate *d,
+ *
+ * Returns: number of characters written to the buffer, or 0 the buffer was too small
+ */
++#pragma GCC diagnostic push
++#pragma GCC diagnostic ignored "-Wformat-nonliteral"
++
+ gsize
+ g_date_strftime (gchar *s,
+ gsize slen,
+@@ -2549,3 +2552,5 @@ g_date_strftime (gchar *s,
+ return retval;
+ #endif
+ }
++
++#pragma GCC diagnostic pop
+--
+2.7.1
+
diff --git a/var/spack/repos/builtin/packages/pkg-config/package.py b/var/spack/repos/builtin/packages/pkg-config/package.py
index ddbc151767..a98f65fb07 100644
--- a/var/spack/repos/builtin/packages/pkg-config/package.py
+++ b/var/spack/repos/builtin/packages/pkg-config/package.py
@@ -24,23 +24,33 @@
##############################################################################
from spack import *
+
class PkgConfig(Package):
- """pkg-config is a helper tool used when compiling applications and libraries"""
+ """pkg-config is a helper tool used when compiling applications
+ and libraries"""
+
homepage = "http://www.freedesktop.org/wiki/Software/pkg-config/"
- url = "http://pkgconfig.freedesktop.org/releases/pkg-config-0.28.tar.gz"
+ url = "http://pkgconfig.freedesktop.org/releases/pkg-config-0.28.tar.gz"
- version('0.28', 'aa3c86e67551adc3ac865160e34a2a0d')
+ version('0.29.1', 'f739a28cae4e0ca291f82d1d41ef107d')
+ version('0.28', 'aa3c86e67551adc3ac865160e34a2a0d')
parallel = False
+ variant('internal_glib', default=True,
+ description='Builds with internal glib')
+
+ # The following patch is needed for gcc-6.1
+ patch('g_date_strftime.patch')
def install(self, spec, prefix):
- configure("--prefix=%s" %prefix,
- "--enable-shared",
- "--with-internal-glib") # There's a bootstrapping problem here;
- # glib uses pkg-config as well, so
- # break the cycle by using the internal
- # glib.
+ args = ["--prefix={0}".format(prefix),
+ "--enable-shared"]
+ if "+internal_glib" in spec:
+ # There's a bootstrapping problem here;
+ # glib uses pkg-config as well, so break
+ # the cycle by using the internal glib.
+ args.append("--with-internal-glib")
+ configure(*args)
make()
make("install")
-
diff --git a/var/spack/repos/builtin/packages/plumed/package.py b/var/spack/repos/builtin/packages/plumed/package.py
new file mode 100644
index 0000000000..60dfdf7405
--- /dev/null
+++ b/var/spack/repos/builtin/packages/plumed/package.py
@@ -0,0 +1,158 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+import subprocess
+
+from spack import *
+
+
+class Plumed(Package):
+ """PLUMED is an open source library for free energy calculations in
+ molecular systems which works together with some of the most popular
+ molecular dynamics engines.
+
+ Free energy calculations can be performed as a function of many order
+ parameters with a particular focus on biological problems, using state
+ of the art methods such as metadynamics, umbrella sampling and
+ Jarzynski-equation based steered MD.
+
+ The software, written in C++, can be easily interfaced with both fortran
+ and C/C++ codes.
+ """
+ homepage = 'http://www.plumed.org/'
+ url = 'https://github.com/plumed/plumed2/archive/v2.2.3.tar.gz'
+
+ version('2.2.3', 'a6e3863e40aac07eb8cf739cbd14ecf8')
+
+ # Variants. PLUMED by default builds a number of optional modules.
+ # The ones listed here are not built by default for various reasons,
+ # such as stability, lack of testing, or lack of demand.
+ variant('crystallization', default=False,
+ description='Build support for optional crystallization module.')
+ variant('imd', default=False,
+ description='Build support for optional imd module.')
+ variant('manyrestraints', default=False,
+ description='Build support for optional manyrestraints module.')
+ variant('shared', default=True, description='Builds shared libraries')
+ variant('mpi', default=True, description='Activates MPI support')
+ variant('gsl', default=True, description='Activates GSL support')
+
+ # Dependencies. LAPACK and BLAS are recommended but not essential.
+ depends_on('zlib')
+ depends_on('blas')
+ depends_on('lapack')
+
+ depends_on('mpi', when='+mpi')
+ depends_on('gsl', when='+gsl')
+
+ depends_on('autoconf', type='build')
+
+ # Dictionary mapping PLUMED versions to the patches it provides
+ # interactively
+ plumed_patches = {
+ '2.2.3': {
+ 'amber-14': '1',
+ 'gromacs-4.5.7': '2',
+ 'gromacs-4.6.7': '3',
+ 'gromacs-5.0.7': '4',
+ 'gromacs-5.1.2': '5',
+ 'lammps-6Apr13': '6',
+ 'namd-2.8': '7',
+ 'namd-2.9': '8',
+ 'espresso-5.0.2': '9'
+ }
+ }
+
+ def apply_patch(self, other):
+ plumed = subprocess.Popen(
+ [join_path(self.spec.prefix.bin, 'plumed'), 'patch', '-p'],
+ stdin=subprocess.PIPE
+ )
+ opts = Plumed.plumed_patches[str(self.version)]
+ search = '{0.name}-{0.version}'.format(other)
+ choice = opts[search] + '\n'
+ plumed.stdin.write(choice)
+ plumed.wait()
+
+ def setup_dependent_package(self, module, ext_spec):
+ # Make plumed visible from dependent packages
+ module.plumed = Executable(join_path(self.spec.prefix.bin, 'plumed'))
+
+ def install(self, spec, prefix):
+ # This part is needed to avoid linking with gsl cblas
+ # interface which will mask the cblas interface
+ # provided by optimized libraries due to linking order
+ filter_file('-lgslcblas', '', 'configure.ac')
+ autoreconf('-ivf')
+
+ # From plumed docs :
+ # Also consider that this is different with respect to what some other
+ # configure script does in that variables such as MPICXX are
+ # completely ignored here. In case you work on a machine where CXX is
+ # set to a serial compiler and MPICXX to a MPI compiler, to compile
+ # with MPI you should use:
+ #
+ # > ./configure CXX="$MPICXX"
+ configure_opts = ['--prefix=' + prefix]
+
+ # If using MPI then ensure the correct compiler wrapper is used.
+ if '+mpi' in spec:
+ configure_opts.extend([
+ '--enable-mpi',
+ 'CXX={0}'.format(spec['mpi'].mpicxx)
+ ])
+
+ # If the MPI dependency is provided by the intel-mpi package then
+ # the following additional argument is required to allow it to
+ # build.
+ if spec.satisfies('^intel-mpi'):
+ configure_opts.extend([
+ 'STATIC_LIBS=-mt_mpi'
+ ])
+
+ # Additional arguments
+ configure_opts.extend([
+ '--enable-shared={0}'.format('yes' if '+shared' in spec else 'no'),
+ '--enable-gsl={0}'.format('yes' if '+gsl' in spec else 'no')
+ ])
+
+ # Construct list of optional modules
+ module_opts = []
+ module_opts.extend([
+ '+crystallization' if (
+ '+crystallization' in spec) else '-crystallization',
+ '+imd' if '+imd' in spec else '-imd',
+ '+manyrestraints' if (
+ '+manyrestraints' in spec) else '-manyrestraints'
+ ])
+
+ # If we have specified any optional modules then add the argument to
+ # enable or disable them.
+ if module_opts:
+ configure_opts.extend([
+ '--enable-modules={0}'.format("".join(module_opts))])
+
+ configure(*configure_opts)
+ make()
+ make('install')
diff --git a/var/spack/repos/builtin/packages/pmgr_collective/package.py b/var/spack/repos/builtin/packages/pmgr-collective/package.py
index a6e3b8e2a2..f6466a7954 100644
--- a/var/spack/repos/builtin/packages/pmgr_collective/package.py
+++ b/var/spack/repos/builtin/packages/pmgr-collective/package.py
@@ -24,6 +24,7 @@
##############################################################################
from spack import *
+
class PmgrCollective(Package):
"""PMGR_COLLECTIVE provides a scalable network for bootstrapping
MPI jobs."""
diff --git a/var/spack/repos/builtin/packages/pngwriter/package.py b/var/spack/repos/builtin/packages/pngwriter/package.py
new file mode 100644
index 0000000000..4c0370a7ef
--- /dev/null
+++ b/var/spack/repos/builtin/packages/pngwriter/package.py
@@ -0,0 +1,58 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class Pngwriter(Package):
+ """PNGwriter is a very easy to use open source graphics library that uses
+ PNG as its output format. The interface has been designed to be as simple
+ and intuitive as possible. It supports plotting and reading pixels in the
+ RGB (red, green, blue), HSV (hue, saturation, value/brightness) and CMYK
+ (cyan, magenta, yellow, black) colour spaces, basic shapes, scaling,
+ bilinear interpolation, full TrueType antialiased and rotated text support,
+ bezier curves, opening existing PNG images and more.
+ """
+
+ homepage = "http://pngwriter.sourceforge.net/"
+ url = "https://github.com/pngwriter/pngwriter/archive/0.5.6.tar.gz"
+
+ version('dev', branch='dev',
+ git='https://github.com/pngwriter/pngwriter.git')
+ version('master', branch='master',
+ git='https://github.com/pngwriter/pngwriter.git')
+ version('0.5.6', 'c13bd1fdc0e331a246e6127b5f262136')
+
+ depends_on('cmake', type='build')
+ depends_on('libpng')
+ depends_on('zlib')
+ depends_on('freetype')
+
+ def install(self, spec, prefix):
+ with working_dir('spack-build', create=True):
+ cmake('-DCMAKE_INSTALL_PREFIX=%s' % prefix,
+ '..', *std_cmake_args)
+
+ make()
+ make('install')
diff --git a/var/spack/repos/builtin/packages/polymake/package.py b/var/spack/repos/builtin/packages/polymake/package.py
new file mode 100644
index 0000000000..c0bb9082ae
--- /dev/null
+++ b/var/spack/repos/builtin/packages/polymake/package.py
@@ -0,0 +1,57 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+
+from spack import *
+
+
+class Polymake(Package):
+ """polymake is open source software for research in polyhedral geometry"""
+ homepage = "https://polymake.org/doku.php"
+ url = "https://polymake.org/lib/exe/fetch.php/download/polymake-3.0r1.tar.bz2"
+
+ version('3.0r2', '08584547589f052ea50e2148109202ab')
+ version('3.0r1', '63ecbecf9697c6826724d8a041d2cac0')
+
+ # Note: Could also be built with nauty instead of bliss
+
+ depends_on("bliss")
+ depends_on("boost")
+ depends_on("cddlib")
+ depends_on("gmp")
+ depends_on("lrslib")
+ depends_on("mpfr")
+ depends_on("ppl")
+
+ def install(self, spec, prefix):
+ configure("--prefix=%s" % prefix,
+ "--with-bliss=%s" % spec["bliss"].prefix,
+ "--with-boost=%s" % spec["boost"].prefix,
+ "--with-cdd=%s" % spec["cddlib"].prefix,
+ "--with-gmp=%s" % spec["gmp"].prefix,
+ "--with-lrs=%s" % spec["lrslib"].prefix,
+ "--with-mpfr=%s" % spec["mpfr"].prefix,
+ "--with-ppl=%s" % spec["ppl"].prefix)
+ make()
+ make("install")
diff --git a/var/spack/repos/builtin/packages/porta/Makefile.spack.patch b/var/spack/repos/builtin/packages/porta/Makefile.spack.patch
new file mode 100644
index 0000000000..1cd8fcc3c0
--- /dev/null
+++ b/var/spack/repos/builtin/packages/porta/Makefile.spack.patch
@@ -0,0 +1,23 @@
+--- old/src/Makefile.spack
++++ new/src/Makefile.spack
+@@ -0,0 +1,20 @@
++# Set PREFIX to the install location for both building and installing
++
++all: valid xporta
++
++valid: common.lo arith.lo inout.lo log.lo valid.lo
++ libtool --mode=link --tag=CC cc -g -O3 -o $@ $^
++
++xporta: common.lo arith.lo inout.lo log.lo \
++ porta.lo four_mot.lo portsort.lo largecalc.lo mp.lo
++ libtool --mode=link --tag=CC cc -g -O3 -o $@ $^
++
++%.lo: %.c
++ libtool --mode=compile --tag=CC cc -g -O3 -c $*.c
++
++install:
++ mkdir -p $(PREFIX)/bin
++ libtool --mode=install cp valid $(PREFIX)/bin/valid
++ libtool --mode=install cp xporta $(PREFIX)/bin/xporta
++
++.PHONY: all install
diff --git a/var/spack/repos/builtin/packages/porta/package.py b/var/spack/repos/builtin/packages/porta/package.py
new file mode 100644
index 0000000000..b620daf78f
--- /dev/null
+++ b/var/spack/repos/builtin/packages/porta/package.py
@@ -0,0 +1,44 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+
+from spack import *
+
+
+class Porta(Package):
+ """PORTA is a collection of routines for analyzing polytopes and
+ polyhedra"""
+ homepage = "http://porta.zib.de"
+ url = "http://porta.zib.de/porta-1.4.1.tgz"
+
+ version('1.4.1', '585179bf19d214ed364663a5d17bd5fc')
+
+ depends_on("libtool", type="build")
+
+ patch("Makefile.spack.patch")
+
+ def install(self, spec, prefix):
+ with working_dir("src"):
+ make("-f", "Makefile.spack", "PREFIX=%s" % prefix)
+ make("-f", "Makefile.spack", "PREFIX=%s" % prefix, "install")
diff --git a/var/spack/repos/builtin/packages/postgresql/package.py b/var/spack/repos/builtin/packages/postgresql/package.py
index 9362234881..caf3768362 100644
--- a/var/spack/repos/builtin/packages/postgresql/package.py
+++ b/var/spack/repos/builtin/packages/postgresql/package.py
@@ -24,18 +24,21 @@
##############################################################################
from spack import *
+
class Postgresql(Package):
- """PostgreSQL is a powerful, open source object-relational
- database system. It has more than 15 years of active
- development and a proven architecture that has earned it a
- strong reputation for reliability, data integrity, and
- correctness."""
+ """PostgreSQL is a powerful, open source object-relational database system.
+ It has more than 15 years of active development and a proven architecture
+ that has earned it a strong reputation for reliability, data integrity, and
+ correctness."""
+
homepage = "http://www.postgresql.org/"
url = "http://ftp.postgresql.org/pub/source/v9.3.4/postgresql-9.3.4.tar.bz2"
version('9.3.4', 'd0a41f54c377b2d2fab4a003b0dac762')
+ version('9.5.3', '3f0c388566c688c82b01a0edf1e6b7a0')
- depends_on("openssl")
+ depends_on('openssl')
+ depends_on('readline')
def install(self, spec, prefix):
configure("--prefix=%s" % prefix,
diff --git a/var/spack/repos/builtin/packages/ppl/package.py b/var/spack/repos/builtin/packages/ppl/package.py
index a54c6ce221..73404103f0 100644
--- a/var/spack/repos/builtin/packages/ppl/package.py
+++ b/var/spack/repos/builtin/packages/ppl/package.py
@@ -24,6 +24,7 @@
##############################################################################
from spack import *
+
class Ppl(Package):
"""The Parma Polyhedra Library (PPL) provides numerical
abstractions especially targeted at applications in the field of
diff --git a/var/spack/repos/builtin/packages/prank/package.py b/var/spack/repos/builtin/packages/prank/package.py
new file mode 100644
index 0000000000..d627e8a0b6
--- /dev/null
+++ b/var/spack/repos/builtin/packages/prank/package.py
@@ -0,0 +1,44 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class Prank(Package):
+ """A powerful multiple sequence alignment browser."""
+
+ homepage = "http://wasabiapp.org/software/prank/"
+ url = "http://wasabiapp.org/download/prank/prank.source.140603.tgz"
+
+ version('150803', '71ac2659e91c385c96473712c0a23e8a')
+
+ depends_on('mafft')
+ depends_on('exonerate')
+ depends_on('bpp-suite') # for bppancestor
+
+ def install(self, spec, prefix):
+ with working_dir('src'):
+ make()
+ mkdirp(prefix.bin)
+ install('prank', prefix.bin)
diff --git a/var/spack/repos/builtin/packages/presentproto/package.py b/var/spack/repos/builtin/packages/presentproto/package.py
new file mode 100644
index 0000000000..ca145abb6b
--- /dev/null
+++ b/var/spack/repos/builtin/packages/presentproto/package.py
@@ -0,0 +1,42 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class Presentproto(Package):
+ """Present protocol specification and Xlib/Xserver headers."""
+
+ homepage = "https://cgit.freedesktop.org/xorg/proto/presentproto/"
+ url = "https://www.x.org/archive/individual/proto/presentproto-1.0.tar.gz"
+
+ version('1.0', '57eaf4bb58e86476ec89cfb42d675961')
+
+ depends_on('pkg-config@0.9.0:', type='build')
+ depends_on('util-macros', type='build')
+
+ def install(self, spec, prefix):
+ configure('--prefix={0}'.format(prefix))
+
+ make('install')
diff --git a/var/spack/repos/builtin/packages/printproto/package.py b/var/spack/repos/builtin/packages/printproto/package.py
new file mode 100644
index 0000000000..151924dd49
--- /dev/null
+++ b/var/spack/repos/builtin/packages/printproto/package.py
@@ -0,0 +1,43 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class Printproto(Package):
+ """Xprint extension to the X11 protocol - a portable, network-transparent
+ printing system."""
+
+ homepage = "http://cgit.freedesktop.org/xorg/proto/printproto"
+ url = "https://www.x.org/archive/individual/proto/printproto-1.0.5.tar.gz"
+
+ version('1.0.5', '5afeb3a7de8a14b417239a14ea724268')
+
+ depends_on('pkg-config@0.9.0:', type='build')
+ depends_on('util-macros', type='build')
+
+ def install(self, spec, prefix):
+ configure('--prefix={0}'.format(prefix))
+
+ make('install')
diff --git a/var/spack/repos/builtin/packages/proj/package.py b/var/spack/repos/builtin/packages/proj/package.py
index 20dd4f5f69..06ab6108b6 100644
--- a/var/spack/repos/builtin/packages/proj/package.py
+++ b/var/spack/repos/builtin/packages/proj/package.py
@@ -24,6 +24,7 @@
##############################################################################
from spack import *
+
class Proj(Package):
"""Cartographic Projections"""
homepage = "https://github.com/OSGeo/proj.4/wiki"
diff --git a/var/spack/repos/builtin/packages/protobuf/package.py b/var/spack/repos/builtin/packages/protobuf/package.py
index d4befc34ab..bf0073b16a 100644
--- a/var/spack/repos/builtin/packages/protobuf/package.py
+++ b/var/spack/repos/builtin/packages/protobuf/package.py
@@ -22,9 +22,9 @@
# License along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
-import os
from spack import *
+
class Protobuf(Package):
"""Google's data interchange format."""
diff --git a/var/spack/repos/builtin/packages/proxymngr/package.py b/var/spack/repos/builtin/packages/proxymngr/package.py
new file mode 100644
index 0000000000..896f4a516b
--- /dev/null
+++ b/var/spack/repos/builtin/packages/proxymngr/package.py
@@ -0,0 +1,52 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class Proxymngr(Package):
+ """The proxy manager (proxymngr) is responsible for resolving requests from
+ xfindproxy (and other similar clients), starting new proxies when
+ appropriate, and keeping track of all of the available proxy services.
+ The proxy manager strives to reuse existing proxies whenever possible."""
+
+ homepage = "http://cgit.freedesktop.org/xorg/app/proxymngr"
+ url = "https://www.x.org/archive/individual/app/proxymngr-1.0.4.tar.gz"
+
+ version('1.0.4', 'a165cf704f6a413f0bacf65ea470331f')
+
+ depends_on('libice')
+ depends_on('libxt')
+ depends_on('lbxproxy')
+
+ depends_on('xproto@7.0.17:', type='build')
+ depends_on('xproxymanagementprotocol', type='build')
+ depends_on('pkg-config@0.9.0:', type='build')
+ depends_on('util-macros', type='build')
+
+ def install(self, spec, prefix):
+ configure('--prefix={0}'.format(prefix))
+
+ make()
+ make('install')
diff --git a/var/spack/repos/builtin/packages/psi4/package.py b/var/spack/repos/builtin/packages/psi4/package.py
new file mode 100644
index 0000000000..4248c24646
--- /dev/null
+++ b/var/spack/repos/builtin/packages/psi4/package.py
@@ -0,0 +1,124 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+import os
+
+
+class Psi4(Package):
+ """Psi4 is an open-source suite of ab initio quantum chemistry
+ programs designed for efficient, high-accuracy simulations of
+ a variety of molecular properties."""
+
+ homepage = "http://www.psicode.org/"
+ url = "https://github.com/psi4/psi4/archive/0.5.tar.gz"
+
+ version('0.5', '53041b8a9be3958384171d0d22f9fdd0')
+
+ # Required dependencies
+ depends_on('blas')
+ depends_on('lapack')
+ depends_on('boost'
+ '+chrono'
+ '+filesystem'
+ '+python'
+ '+regex'
+ '+serialization'
+ '+system'
+ '+timer'
+ '+thread')
+ depends_on('python')
+ depends_on('cmake', type='build')
+ depends_on('py-numpy', type=('build', 'run'))
+
+ # Optional dependencies
+ # TODO: add packages for these
+ # depends_on('perl')
+ # depends_on('erd')
+ # depends_on('pcm-solver')
+ # depends_on('chemps2')
+
+ def install(self, spec, prefix):
+ cmake_args = [
+ '-DBLAS_TYPE={0}'.format(spec['blas'].name.upper()),
+ '-DBLAS_LIBRARIES={0}'.format(spec['blas'].blas_libs.joined()),
+ '-DLAPACK_TYPE={0}'.format(spec['lapack'].name.upper()),
+ '-DLAPACK_LIBRARIES={0}'.format(
+ spec['lapack'].lapack_libs.joined()),
+ '-DBOOST_INCLUDEDIR={0}'.format(spec['boost'].prefix.include),
+ '-DBOOST_LIBRARYDIR={0}'.format(spec['boost'].prefix.lib),
+ '-DENABLE_CHEMPS2=OFF'
+ ]
+
+ cmake_args.extend(std_cmake_args)
+
+ with working_dir('spack-build', create=True):
+ cmake('..', *cmake_args)
+
+ make()
+ make('install')
+
+ self.filter_compilers(spec, prefix)
+
+ def filter_compilers(self, spec, prefix):
+ """Run after install to tell the configuration files to
+ use the compilers that Spack built the package with.
+
+ If this isn't done, they'll have PLUGIN_CXX set to
+ Spack's generic cxx. We want it to be bound to
+ whatever compiler it was built with."""
+
+ kwargs = {'ignore_absent': True, 'backup': False, 'string': True}
+
+ cc_files = ['bin/psi4-config']
+ cxx_files = ['bin/psi4-config', 'include/psi4/psiconfig.h']
+ template = 'share/psi4/plugin/Makefile.template'
+
+ for filename in cc_files:
+ filter_file(os.environ['CC'], self.compiler.cc,
+ os.path.join(prefix, filename), **kwargs)
+
+ for filename in cxx_files:
+ filter_file(os.environ['CXX'], self.compiler.cxx,
+ os.path.join(prefix, filename), **kwargs)
+
+ # The binary still keeps track of the compiler used to install Psi4
+ # and uses it when creating a plugin template
+ filter_file('@PLUGIN_CXX@', self.compiler.cxx,
+ os.path.join(prefix, template), **kwargs)
+
+ # The binary links to the build include directory instead of the
+ # installation include directory:
+ # https://github.com/psi4/psi4/issues/410
+ filter_file('@PLUGIN_INCLUDES@', '-I{0}'.format(
+ ' -I'.join([
+ os.path.join(spec['psi4'].prefix.include, 'psi4'),
+ os.path.join(spec['boost'].prefix.include, 'boost'),
+ os.path.join(spec['python'].prefix.include, 'python{0}'.format(
+ spec['python'].version.up_to(2))),
+ spec['lapack'].prefix.include,
+ spec['blas'].prefix.include,
+ '/usr/include'
+ ])
+ ), os.path.join(prefix, template), **kwargs)
diff --git a/var/spack/repos/builtin/packages/py-3to2/package.py b/var/spack/repos/builtin/packages/py-3to2/package.py
new file mode 100644
index 0000000000..80b95fcbfd
--- /dev/null
+++ b/var/spack/repos/builtin/packages/py-3to2/package.py
@@ -0,0 +1,35 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class Py3to2(PythonPackage):
+ """lib3to2 is a set of fixers that are intended to backport code written
+ for Python version 3.x into Python version 2.x."""
+
+ homepage = "https://pypi.python.org/pypi/3to2"
+ url = "https://pypi.python.org/packages/source/3/3to2/3to2-1.1.1.zip"
+
+ version('1.1.1', 'cbeed28e350dbdaef86111ace3052824')
diff --git a/var/spack/repos/builtin/packages/py-alabaster/package.py b/var/spack/repos/builtin/packages/py-alabaster/package.py
new file mode 100644
index 0000000000..f2402c9bc6
--- /dev/null
+++ b/var/spack/repos/builtin/packages/py-alabaster/package.py
@@ -0,0 +1,38 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class PyAlabaster(PythonPackage):
+ """Alabaster is a visually (c)lean, responsive, configurable theme
+ for the Sphinx documentation system."""
+
+ homepage = "https://pypi.python.org/pypi/alabaster"
+ url = "https://pypi.python.org/packages/source/a/alabaster/alabaster-0.7.9.tar.gz"
+
+ version('0.7.9', 'b29646a8bbe7aa52830375b7d17b5d7a',
+ url="https://pypi.python.org/packages/71/c3/70da7d8ac18a4f4c502887bd2549e05745fa403e2cd9d06a8a9910a762bc/alabaster-0.7.9.tar.gz")
+
+ depends_on('py-setuptools', type='build')
diff --git a/var/spack/repos/builtin/packages/py-argcomplete/package.py b/var/spack/repos/builtin/packages/py-argcomplete/package.py
index ace9320424..585540f23b 100644
--- a/var/spack/repos/builtin/packages/py-argcomplete/package.py
+++ b/var/spack/repos/builtin/packages/py-argcomplete/package.py
@@ -24,7 +24,8 @@
##############################################################################
from spack import *
-class PyArgcomplete(Package):
+
+class PyArgcomplete(PythonPackage):
"""Bash tab completion for argparse."""
homepage = "https://pypi.python.org/pypi/argcomplete"
@@ -32,7 +33,4 @@ class PyArgcomplete(Package):
version('1.1.1', '89a3839096c9f991ad33828e72d21abf')
- extends('python')
-
- def install(self, spec, prefix):
- python('setup.py', 'install', '--prefix=%s' % prefix)
+ depends_on('py-setuptools', type='build')
diff --git a/var/spack/repos/builtin/packages/py-astroid/package.py b/var/spack/repos/builtin/packages/py-astroid/package.py
index 8ba6696b0c..f275813d86 100644
--- a/var/spack/repos/builtin/packages/py-astroid/package.py
+++ b/var/spack/repos/builtin/packages/py-astroid/package.py
@@ -22,11 +22,10 @@
# License along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
-from spack import depends_on, extends, version
-from spack import Package
+from spack import *
-class PyAstroid(Package):
+class PyAstroid(PythonPackage):
homepage = "https://www.astroid.org/"
url = "https://github.com/PyCQA/astroid/archive/astroid-1.4.5.tar.gz"
@@ -36,11 +35,6 @@ class PyAstroid(Package):
version('1.4.2', '677f7965840f375af51b0e86403bee6a')
version('1.4.1', 'ed70bfed5e4b25be4292e7fe72da2c02')
- extends('python')
- depends_on('py-logilab-common')
- depends_on('py-setuptools')
- depends_on('py-six')
-
- def install(self, spec, prefix):
- python('setup.py', 'install', '--prefix=%s' % prefix)
-
+ depends_on('py-logilab-common', type=('build', 'run'))
+ depends_on('py-setuptools', type='build')
+ depends_on('py-six', type=('build', 'run'))
diff --git a/var/spack/repos/builtin/packages/py-astropy/package.py b/var/spack/repos/builtin/packages/py-astropy/package.py
index 86875bbcae..8688e0ee7e 100644
--- a/var/spack/repos/builtin/packages/py-astropy/package.py
+++ b/var/spack/repos/builtin/packages/py-astropy/package.py
@@ -24,29 +24,35 @@
##############################################################################
from spack import *
-class PyAstropy(Package):
- """
- The Astropy Project is a community effort to develop a single core
+
+class PyAstropy(PythonPackage):
+ """The Astropy Project is a community effort to develop a single core
package for Astronomy in Python and foster interoperability between
- Python astronomy packages.
- """
+ Python astronomy packages."""
+
homepage = 'http://www.astropy.org/'
+ url = 'https://pypi.python.org/packages/source/a/astropy/astropy-1.1.2.tar.gz'
+ version('1.1.2', 'cbe32023b5b1177d1e2498a0d00cda51')
version('1.1.post1', 'b52919f657a37d45cc45f5cb0f58c44d')
- def url_for_version(self, version):
- return 'https://pypi.python.org/packages/source/a/astropy/astropy-{0}.tar.gz'.format(version)
+ # Required dependencies
+ depends_on('py-numpy', type=('build', 'run'))
- extends('python')
+ # Optional dependencies
+ depends_on('py-h5py', type=('build', 'run'))
+ depends_on('py-beautifulsoup4', type=('build', 'run'))
+ depends_on('py-pyyaml', type=('build', 'run'))
+ depends_on('py-scipy', type=('build', 'run'))
+ depends_on('libxml2')
+ depends_on('py-matplotlib', type=('build', 'run'))
+ depends_on('py-pytz', type=('build', 'run'))
+ depends_on('py-scikit-image', type=('build', 'run'))
+ depends_on('py-pandas', type=('build', 'run'))
+ # System dependencies
depends_on('cfitsio')
depends_on('expat')
- depends_on('py-h5py')
- depends_on('py-numpy')
- depends_on('py-scipy')
-
- def install(self, spec, prefix):
- python('setup.py', 'build', '--use-system-cfitsio',
- '--use-system-expat')
- python('setup.py', 'install', '--prefix=' + prefix)
+ def build_args(self, spec, prefix):
+ return ['--use-system-cfitsio', '--use-system-expat']
diff --git a/var/spack/repos/builtin/packages/py-autopep8/package.py b/var/spack/repos/builtin/packages/py-autopep8/package.py
new file mode 100644
index 0000000000..c892e2979c
--- /dev/null
+++ b/var/spack/repos/builtin/packages/py-autopep8/package.py
@@ -0,0 +1,50 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class PyAutopep8(PythonPackage):
+ """autopep8 automatically formats Python code to conform to the
+ PEP 8 style guide."""
+
+ homepage = "https://github.com/hhatto/autopep8"
+ url = "https://github.com/hhatto/autopep8/archive/v1.2.4.tar.gz"
+
+ version('1.2.4', '0458db85159a9e1b45f3e71ce6c158da')
+ version('1.2.2', 'def3d023fc9dfd1b7113602e965ad8e1')
+
+ extends('python', ignore='bin/pep8')
+ depends_on('python@2.6:2.7,3.2:')
+
+ depends_on('py-pycodestyle@1.5.7:1.7.0', type=('build', 'run'))
+
+ depends_on('py-setuptools', type='build')
+
+ def url_for_version(self, version):
+ url = "https://github.com/hhatto/autopep8/archive/{0}{1}.tar.gz"
+ if version >= Version('1.2.3'):
+ return url.format('v', version)
+ else:
+ return url.format('ver', version)
diff --git a/var/spack/repos/builtin/packages/py-babel/package.py b/var/spack/repos/builtin/packages/py-babel/package.py
new file mode 100644
index 0000000000..844ceab34e
--- /dev/null
+++ b/var/spack/repos/builtin/packages/py-babel/package.py
@@ -0,0 +1,40 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class PyBabel(PythonPackage):
+ """Babel is an integrated collection of utilities that assist in
+ internationalizing and localizing Python applications, with an
+ emphasis on web-based applications."""
+
+ homepage = "http://babel.pocoo.org/en/latest/"
+ url = "https://pypi.python.org/packages/source/B/Babel/Babel-2.3.4.tar.gz"
+
+ version('2.3.4', 'afa20bc55b0e991833030129ad498f35',
+ url="https://pypi.python.org/packages/6e/96/ba2a2462ed25ca0e651fb7b66e7080f5315f91425a07ea5b34d7c870c114/Babel-2.3.4.tar.gz")
+
+ depends_on('py-setuptools', type='build')
+ depends_on('py-pytz', type=('build', 'run'))
diff --git a/var/spack/repos/builtin/packages/py-backports-abc/package.py b/var/spack/repos/builtin/packages/py-backports-abc/package.py
new file mode 100644
index 0000000000..7d062bff6a
--- /dev/null
+++ b/var/spack/repos/builtin/packages/py-backports-abc/package.py
@@ -0,0 +1,36 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class PyBackportsAbc(PythonPackage):
+ """Backports_ABC: A backport of recent additions to the 'collections.abc'
+ module."""
+ homepage = "https://github.com/cython/backports_abc"
+ url = "https://github.com/cython/backports_abc/archive/0.4.tar.gz"
+
+ version('0.4', 'e4246ae689221c9cbe84369fdb59e8c74d02b298')
+
+ depends_on('py-setuptools', type='build')
diff --git a/var/spack/repos/builtin/packages/py-backports-shutil-get-terminal-size/package.py b/var/spack/repos/builtin/packages/py-backports-shutil-get-terminal-size/package.py
new file mode 100644
index 0000000000..5950faa765
--- /dev/null
+++ b/var/spack/repos/builtin/packages/py-backports-shutil-get-terminal-size/package.py
@@ -0,0 +1,38 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class PyBackportsShutilGetTerminalSize(PythonPackage):
+ """A backport of the get_terminal_size function
+ from Python 3.3's shutil."""
+
+ homepage = "https://pypi.python.org/pypi/backports.shutil_get_terminal_size"
+ url = "https://pypi.io/packages/source/b/backports.shutil_get_terminal_size/backports.shutil_get_terminal_size-1.0.0.tar.gz"
+
+ version('1.0.0', '03267762480bd86b50580dc19dff3c66')
+
+ depends_on('py-setuptools', type='build')
+ depends_on('python@:3.2.999')
diff --git a/var/spack/repos/builtin/packages/py-backports-ssl-match-hostname/package.py b/var/spack/repos/builtin/packages/py-backports-ssl-match-hostname/package.py
new file mode 100644
index 0000000000..bf4679556b
--- /dev/null
+++ b/var/spack/repos/builtin/packages/py-backports-ssl-match-hostname/package.py
@@ -0,0 +1,36 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class PyBackportsSslMatchHostname(PythonPackage):
+ """The ssl.match_hostname() function from Python 3.5"""
+
+ homepage = "https://pypi.python.org/pypi/backports.ssl_match_hostname"
+ url = "https://pypi.io/packages/source/b/backports.ssl_match_hostname/backports.ssl_match_hostname-3.5.0.1.tar.gz"
+
+ version('3.5.0.1', 'c03fc5e2c7b3da46b81acf5cbacfe1e6')
+
+ depends_on('py-setuptools', type='build')
diff --git a/var/spack/repos/builtin/packages/py-basemap/package.py b/var/spack/repos/builtin/packages/py-basemap/package.py
index 95d02bce91..4a35134e40 100644
--- a/var/spack/repos/builtin/packages/py-basemap/package.py
+++ b/var/spack/repos/builtin/packages/py-basemap/package.py
@@ -25,20 +25,97 @@
from spack import *
import os
-class PyBasemap(Package):
- """The matplotlib basemap toolkit is a library for plotting 2D data on maps in Python."""
+
+class PyBasemap(PythonPackage):
+ """The matplotlib basemap toolkit is a library for plotting
+ 2D data on maps in Python."""
+
homepage = "http://matplotlib.org/basemap/"
url = "https://downloads.sourceforge.net/project/matplotlib/matplotlib-toolkits/basemap-1.0.7/basemap-1.0.7.tar.gz"
version('1.0.7', '48c0557ced9e2c6e440b28b3caff2de8')
- extends('python')
- depends_on('py-setuptools')
- depends_on('py-numpy')
- depends_on('py-matplotlib+gui')
- depends_on('py-pillow')
- depends_on("geos")
+ depends_on('py-setuptools', type='build')
+ depends_on('py-numpy', type=('build', 'run'))
+ depends_on('py-matplotlib', type=('build', 'run'))
+ depends_on('pil', type=('build', 'run'))
+ depends_on('geos')
+
+ def setup_environment(self, spack_env, run_env):
+ spack_env.set('GEOS_DIR', self.spec['geos'].prefix)
+
+ @PythonPackage.sanity_check('install')
+ def post_install_patch(self):
+ spec = self.spec
+ # We are not sure if this fix is needed before Python 3.5.2.
+ # If it is needed, this test should be changed.
+ # See: https://github.com/LLNL/spack/pull/1964
+ if spec['python'].version >= Version('3.5.2'):
+ # Use symlinks to join the two mpl_toolkits/ directories into
+ # one, inside of basemap. This is because Basemap tries to
+ # "add to" an existing package in Matplotlib, which is only
+ # legal Python for "Implicit Namespace Packages":
+ # https://www.python.org/dev/peps/pep-0420/
+ # https://github.com/Homebrew/homebrew-python/issues/112
+ # In practice, Python will see only the basemap version of
+ # mpl_toolkits
+ path_m = find_package_dir(
+ spec['py-matplotlib'].prefix, 'mpl_toolkits')
+ path_b = find_package_dir(spec.prefix, 'mpl_toolkits')
+ link_dir(path_m, path_b)
+
+
+def find_package_dir(spack_package_root, name):
+
+ """Finds directory with a specific name, somewhere inside a Spack
+ package.
+
+ spack_package_root:
+ Root directory to start searching
+ oldname:
+ Original name of package (not fully qualified, just the leaf)
+ newname:
+ What to rename it to
+
+ """
+ for root, dirs, files in os.walk(spack_package_root):
+ path = os.path.join(root, name)
+
+ # Make sure it's a directory
+ if not os.path.isdir(path):
+ continue
+
+ # Make sure it's really a package
+ if not os.path.exists(os.path.join(path, '__init__.py')):
+ continue
+
+ return path
+
+ return None
+
+
+def link_dir(src_root, dest_root, link=os.symlink):
+ """Link all files in src_root into directory dest_root"""
+
+ for src_path, dirnames, filenames in os.walk(src_root):
+ if not filenames:
+ continue # avoid explicitly making empty dirs
+
+ # Avoid internal Python stuff
+ src_leaf = os.path.split(src_path)[1]
+ if src_leaf.startswith('__'):
+ continue
+
+ # Make sure the destination directory exists
+ dest_path = os.path.join(dest_root, src_path[len(src_root) + 1:])
+ try:
+ os.makedirs(dest_path)
+ except:
+ pass
- def install(self, spec, prefix):
- env['GEOS_DIR'] = spec['geos'].prefix
- python('setup.py', 'install', '--prefix=%s' % prefix)
+ # Link all files from src to dest directory
+ for fname in filenames:
+ src = os.path.join(src_path, fname)
+ dst = os.path.join(dest_path, fname)
+ if not os.path.exists(dst):
+ link(src, dst)
diff --git a/var/spack/repos/builtin/packages/py-beautifulsoup4/package.py b/var/spack/repos/builtin/packages/py-beautifulsoup4/package.py
new file mode 100644
index 0000000000..3a90d02127
--- /dev/null
+++ b/var/spack/repos/builtin/packages/py-beautifulsoup4/package.py
@@ -0,0 +1,41 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class PyBeautifulsoup4(PythonPackage):
+ """Beautiful Soup is a Python library for pulling data out of HTML and
+ XML files. It works with your favorite parser to provide idiomatic ways
+ of navigating, searching, and modifying the parse tree."""
+
+ homepage = "https://www.crummy.com/software/BeautifulSoup"
+ url = "https://pypi.python.org/packages/source/b/beautifulsoup4/beautifulsoup4-4.4.1.tar.gz"
+
+ version('4.5.1', '994abd90e691beaf7d42c00ffb2f3a67',
+ url='https://www.crummy.com/software/BeautifulSoup/bs4/'
+ 'download/4.5/beautifulsoup4-4.5.1.tar.gz')
+ version('4.4.1', '8fbd9a7cac0704645fa20d1419036815')
+
+ depends_on('py-setuptools', type='build')
diff --git a/var/spack/repos/builtin/packages/py-biopython/package.py b/var/spack/repos/builtin/packages/py-biopython/package.py
index 26f42e060b..3411e244f9 100644
--- a/var/spack/repos/builtin/packages/py-biopython/package.py
+++ b/var/spack/repos/builtin/packages/py-biopython/package.py
@@ -24,16 +24,17 @@
##############################################################################
from spack import *
-class PyBiopython(Package):
- """It is a distributed collaborative effort to develop Python libraries and applications which address the needs of current and future work in bioinformatics."""
+
+class PyBiopython(PythonPackage):
+ """A distributed collaborative effort to develop Python libraries and
+ applications which address the needs of current and future work in
+ bioinformatics.
+
+ """
homepage = "http://biopython.org/wiki/Main_Page"
url = "http://biopython.org/DIST/biopython-1.65.tar.gz"
version('1.65', '143e7861ade85c0a8b5e2bbdd1da1f67')
- extends('python')
- depends_on('py-mx')
- depends_on('py-numpy')
-
- def install(self, spec, prefix):
- python('setup.py', 'install', '--prefix=%s' % prefix)
+ depends_on('py-mx', type=('build', 'run'))
+ depends_on('py-numpy', type=('build', 'run'))
diff --git a/var/spack/repos/builtin/packages/py-bleach/package.py b/var/spack/repos/builtin/packages/py-bleach/package.py
new file mode 100644
index 0000000000..4a6d7ed9d2
--- /dev/null
+++ b/var/spack/repos/builtin/packages/py-bleach/package.py
@@ -0,0 +1,39 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class PyBleach(PythonPackage):
+ """An easy whitelist-based HTML-sanitizing tool."""
+
+ homepage = "http://github.com/mozilla/bleach"
+ url = "https://pypi.python.org/packages/99/00/25a8fce4de102bf6e3cc76bc4ea60685b2fee33bde1b34830c70cacc26a7/bleach-1.5.0.tar.gz"
+
+ version('1.5.0', 'b663300efdf421b3b727b19d7be9c7e7')
+
+ depends_on('python@2.6:2.7,3.2:3.5')
+ depends_on('py-setuptools', type='build')
+ depends_on('py-six', type=('build', 'run'))
+ depends_on('py-html5lib@0.999,0.999999:0.9999999', type=('build', 'run'))
diff --git a/var/spack/repos/builtin/packages/py-blessings/package.py b/var/spack/repos/builtin/packages/py-blessings/package.py
index 05a1f5bcbb..b38f34b412 100644
--- a/var/spack/repos/builtin/packages/py-blessings/package.py
+++ b/var/spack/repos/builtin/packages/py-blessings/package.py
@@ -24,16 +24,12 @@
##############################################################################
from spack import *
-class PyBlessings(Package):
+
+class PyBlessings(PythonPackage):
"""A nicer, kinder way to write to the terminal """
homepage = "https://github.com/erikrose/blessings"
url = "https://pypi.python.org/packages/source/b/blessings/blessings-1.6.tar.gz"
version('1.6', '4f552a8ebcd4982693c92571beb99394')
- depends_on('py-setuptools')
-
- extends("python")
-
- def install(self, spec, prefix):
- python('setup.py', 'install', '--prefix=%s' % prefix)
+ depends_on('py-setuptools', type='build')
diff --git a/var/spack/repos/builtin/packages/py-bottleneck/package.py b/var/spack/repos/builtin/packages/py-bottleneck/package.py
index 1a186b219c..a1215ce39e 100644
--- a/var/spack/repos/builtin/packages/py-bottleneck/package.py
+++ b/var/spack/repos/builtin/packages/py-bottleneck/package.py
@@ -24,15 +24,12 @@
##############################################################################
from spack import *
-class PyBottleneck(Package):
- """Bottleneck is a collection of fast NumPy array functions written in Cython."""
+
+class PyBottleneck(PythonPackage):
+ """A collection of fast NumPy array functions written in Cython."""
homepage = "https://pypi.python.org/pypi/Bottleneck/1.0.0"
url = "https://pypi.python.org/packages/source/B/Bottleneck/Bottleneck-1.0.0.tar.gz"
version('1.0.0', '380fa6f275bd24f27e7cf0e0d752f5d2')
- extends('python')
- depends_on('py-numpy')
-
- def install(self, spec, prefix):
- python('setup.py', 'install', '--prefix=%s' % prefix)
+ depends_on('py-numpy', type=('build', 'run'))
diff --git a/var/spack/repos/builtin/packages/py-cclib/package.py b/var/spack/repos/builtin/packages/py-cclib/package.py
new file mode 100644
index 0000000000..b59376d7b8
--- /dev/null
+++ b/var/spack/repos/builtin/packages/py-cclib/package.py
@@ -0,0 +1,37 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class PyCclib(PythonPackage):
+ """Open source library for parsing and interpreting the results of
+ computational chemistry packages"""
+
+ homepage = "https://cclib.github.io/"
+
+ version('1.5.post1', '1a50be48e4597b1a6dabe943da82a43c',
+ url="https://github.com/cclib/cclib/releases/download/v1.5/cclib-1.5.post1.tar.gz")
+
+ depends_on('py-numpy@1.5:', type=('build', 'run'))
diff --git a/var/spack/repos/builtin/packages/py-cdo/package.py b/var/spack/repos/builtin/packages/py-cdo/package.py
new file mode 100644
index 0000000000..5eb8f414a8
--- /dev/null
+++ b/var/spack/repos/builtin/packages/py-cdo/package.py
@@ -0,0 +1,42 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class PyCdo(PythonPackage):
+ """The cdo package provides an interface to the Climate Data
+ Operators from Python."""
+
+ homepage = "https://pypi.python.org/pypi/cdo"
+ url = "https://pypi.python.org/packages/sources/c/cdo/cdo-1.3.2.tar.gz"
+
+ version('1.3.2', '4b3686ec1b9b891f166c1c466c6db745',
+ url="https://pypi.python.org/packages/d6/13/908e7c1451e1f5fb68405f341cdcb3196a16952ebfe1f172cb788f864aa9/cdo-1.3.2.tar.gz")
+
+ depends_on('cdo')
+
+ depends_on('py-setuptools', type='build')
+ depends_on('py-scipy', type=('build', 'run'))
+ depends_on('py-netcdf', type=('build', 'run'))
diff --git a/var/spack/repos/builtin/packages/py-certifi/package.py b/var/spack/repos/builtin/packages/py-certifi/package.py
new file mode 100644
index 0000000000..959c0221ed
--- /dev/null
+++ b/var/spack/repos/builtin/packages/py-certifi/package.py
@@ -0,0 +1,37 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class PyCertifi(PythonPackage):
+ """Certifi: A carefully curated collection of Root Certificates for validating
+ the trustworthiness of SSL certificates while verifying the identity of TLS
+ hosts."""
+ homepage = "https://github.com/certifi/python-certifi"
+ url = "https://github.com/certifi/python-certifi/archive/2016.02.28.tar.gz"
+
+ version('2016.02.28', '5ccfc23bd5e931863f0b01ef3e9d2dbd3bef0e1b')
+
+ depends_on('py-setuptools', type='build')
diff --git a/var/spack/repos/builtin/packages/py-cffi/package.py b/var/spack/repos/builtin/packages/py-cffi/package.py
index 58d0a81990..c0fbae639b 100644
--- a/var/spack/repos/builtin/packages/py-cffi/package.py
+++ b/var/spack/repos/builtin/packages/py-cffi/package.py
@@ -24,7 +24,8 @@
##############################################################################
from spack import *
-class PyCffi(Package):
+
+class PyCffi(PythonPackage):
"""Foreign Function Interface for Python calling C code"""
homepage = "http://cffi.readthedocs.org/en/latest/"
# base https://pypi.python.org/pypi/cffi
@@ -32,10 +33,15 @@ class PyCffi(Package):
version('1.1.2', 'ca6e6c45b45caa87aee9adc7c796eaea')
- extends('python')
- depends_on('py-setuptools')
- depends_on('py-pycparser')
+ depends_on('py-setuptools', type='build')
+ depends_on('py-pycparser', type=('build', 'run'))
depends_on('libffi')
- def install(self, spec, prefix):
- python('setup.py', 'install', '--prefix=%s' % prefix)
+ def setup_environment(self, spack_env, run_env):
+ # This sets the compiler (and flags) that distutils will use
+ # to create the final shared library. It will use the
+ # compiler specified by the environment variable 'CC' for all
+ # other compilation. We are setting the 'LDSHARED" to the
+ # spack compiler wrapper plus a few extra flags necessary for
+ # building the shared library.
+ spack_env.set('LDSHARED', "{0} -shared -pthread".format(spack_cc))
diff --git a/var/spack/repos/builtin/packages/py-configparser/package.py b/var/spack/repos/builtin/packages/py-configparser/package.py
new file mode 100644
index 0000000000..c9ba7ac15c
--- /dev/null
+++ b/var/spack/repos/builtin/packages/py-configparser/package.py
@@ -0,0 +1,44 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class PyConfigparser(PythonPackage):
+ """This library brings the updated configparser from Python 3.5 to
+ Python 2.6-3.5."""
+
+ homepage = "https://pypi.python.org/pypi/configparser"
+ url = "https://pypi.python.org/packages/source/c/configparser/configparser-3.5.0.tar.gz"
+
+ version('3.5.0', 'cfdd915a5b7a6c09917a64a573140538',
+ url="https://pypi.python.org/packages/7c/69/c2ce7e91c89dc073eb1aa74c0621c3eefbffe8216b3f9af9d3885265c01c/configparser-3.5.0.tar.gz")
+
+ depends_on('python@2.6:2.7,3.4:')
+
+ # This dependency breaks concretization
+ # See https://github.com/LLNL/spack/issues/2793
+ # depends_on('py-ordereddict', when='^python@2.6:2.6.999', type=('build', 'run')) # noqa
+
+ depends_on('py-setuptools', type='build')
diff --git a/var/spack/repos/builtin/packages/py-coverage/package.py b/var/spack/repos/builtin/packages/py-coverage/package.py
index 92a5bd0088..dc8fc08e24 100644
--- a/var/spack/repos/builtin/packages/py-coverage/package.py
+++ b/var/spack/repos/builtin/packages/py-coverage/package.py
@@ -24,17 +24,13 @@
##############################################################################
from spack import *
-class PyCoverage(Package):
+
+class PyCoverage(PythonPackage):
""" Testing coverage checker for python """
- # FIXME: add a proper url for your package's homepage here.
+
homepage = "http://nedbatchelder.com/code/coverage/"
url = "https://pypi.python.org/packages/source/c/coverage/coverage-4.0a6.tar.gz"
version('4.0a6', '1bb4058062646148965bef0796b61efc')
- depends_on('py-setuptools')
-
- extends('python')
-
- def install(self, spec, prefix):
- python('setup.py', 'install', '--prefix=%s' % prefix)
+ depends_on('py-setuptools', type='build')
diff --git a/var/spack/repos/builtin/packages/py-csvkit/package.py b/var/spack/repos/builtin/packages/py-csvkit/package.py
index 7fe2f953f7..5bcda9f449 100644
--- a/var/spack/repos/builtin/packages/py-csvkit/package.py
+++ b/var/spack/repos/builtin/packages/py-csvkit/package.py
@@ -24,7 +24,8 @@
##############################################################################
from spack import *
-class PyCsvkit(Package):
+
+class PyCsvkit(PythonPackage):
"""A library of utilities for working with CSV, the king of tabular file
formats"""
@@ -33,14 +34,9 @@ class PyCsvkit(Package):
version('0.9.1', '48d78920019d18846933ee969502fff6')
- extends('python')
-
- depends_on('py-dateutil')
- depends_on('py-dbf')
- depends_on('py-xlrd')
- depends_on('py-SQLAlchemy')
- depends_on('py-six')
- depends_on('py-openpyxl')
-
- def install(self, spec, prefix):
- python('setup.py', 'install', '--prefix=%s' % prefix)
+ depends_on('py-dateutil', type=('build', 'run'))
+ depends_on('py-dbf', type=('build', 'run'))
+ depends_on('py-xlrd', type=('build', 'run'))
+ depends_on('py-sqlalchemy', type=('build', 'run'))
+ depends_on('py-six', type=('build', 'run'))
+ depends_on('py-openpyxl', type=('build', 'run'))
diff --git a/var/spack/repos/builtin/packages/py-cycler/package.py b/var/spack/repos/builtin/packages/py-cycler/package.py
new file mode 100644
index 0000000000..f2b2a15018
--- /dev/null
+++ b/var/spack/repos/builtin/packages/py-cycler/package.py
@@ -0,0 +1,37 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class PyCycler(PythonPackage):
+ """Composable style cycles."""
+
+ homepage = "http://matplotlib.org/cycler/"
+ url = "https://github.com/matplotlib/cycler/archive/v0.10.0.tar.gz"
+
+ version('0.10.0', '83dd0df7810e838b59e4dd9fa6e2d198')
+
+ depends_on('py-setuptools', type='build')
+ depends_on('py-six', type=('build', 'run'))
diff --git a/var/spack/repos/builtin/packages/py-cython/package.py b/var/spack/repos/builtin/packages/py-cython/package.py
index 5ccc508697..c84728cf3e 100644
--- a/var/spack/repos/builtin/packages/py-cython/package.py
+++ b/var/spack/repos/builtin/packages/py-cython/package.py
@@ -24,10 +24,13 @@
##############################################################################
from spack import *
-class PyCython(Package):
+
+class PyCython(PythonPackage):
"""The Cython compiler for writing C extensions for the Python language."""
homepage = "https://pypi.python.org/pypi/cython"
- url = "https://pypi.python.org/packages/source/C/Cython/Cython-0.22.tar.gz"
+ url = "https://pypi.io/packages/source/c/cython/Cython-0.25.2.tar.gz"
+
+ version('0.25.2', '642c81285e1bb833b14ab3f439964086')
version('0.23.5', '66b62989a67c55af016c916da36e7514')
version('0.23.4', '157df1f69bcec6b56fd97e0f2e057f6e')
@@ -35,8 +38,3 @@ class PyCython(Package):
# These versions contain illegal Python3 code...
version('0.22', '1ae25add4ef7b63ee9b4af697300d6b6')
version('0.21.2', 'd21adb870c75680dc857cd05d41046a4')
-
- extends('python')
-
- def install(self, spec, prefix):
- python('setup.py', 'install', '--prefix=%s' % prefix)
diff --git a/var/spack/repos/builtin/packages/py-dask/package.py b/var/spack/repos/builtin/packages/py-dask/package.py
index 831f86e81c..4113c2ac0b 100644
--- a/var/spack/repos/builtin/packages/py-dask/package.py
+++ b/var/spack/repos/builtin/packages/py-dask/package.py
@@ -24,16 +24,12 @@
##############################################################################
from spack import *
-class PyDask(Package):
+
+class PyDask(PythonPackage):
"""Minimal task scheduling abstraction"""
homepage = "https://github.com/dask/dask/"
url = "https://pypi.python.org/packages/source/d/dask/dask-0.8.1.tar.gz"
version('0.8.1', '5dd8e3a3823b3bc62c9a6d192e2cb5b4')
- extends('python')
-
- depends_on('py-setuptools')
-
- def install(self, spec, prefix):
- python('setup.py', 'install', '--prefix=%s' % prefix)
+ depends_on('py-setuptools', type='build')
diff --git a/var/spack/repos/builtin/packages/py-dateutil/package.py b/var/spack/repos/builtin/packages/py-dateutil/package.py
index eb8eb09e75..3ab5ad029c 100644
--- a/var/spack/repos/builtin/packages/py-dateutil/package.py
+++ b/var/spack/repos/builtin/packages/py-dateutil/package.py
@@ -24,7 +24,8 @@
##############################################################################
from spack import *
-class PyDateutil(Package):
+
+class PyDateutil(PythonPackage):
"""Extensions to the standard Python datetime module."""
homepage = "https://pypi.python.org/pypi/dateutil"
url = "https://pypi.python.org/packages/source/p/python-dateutil/python-dateutil-2.4.0.tar.gz"
@@ -33,9 +34,5 @@ class PyDateutil(Package):
version('2.4.2', '4ef68e1c485b09e9f034e10473e5add2')
version('2.5.2', 'eafe168e8f404bf384514f5116eedbb6')
- extends('python')
- depends_on('py-setuptools')
- depends_on('py-six')
-
- def install(self, spec, prefix):
- python('setup.py', 'install', '--prefix=%s' % prefix)
+ depends_on('py-setuptools', type='build')
+ depends_on('py-six', type=('build', 'run'))
diff --git a/var/spack/repos/builtin/packages/py-dbf/package.py b/var/spack/repos/builtin/packages/py-dbf/package.py
index 09c93de428..56403405e8 100644
--- a/var/spack/repos/builtin/packages/py-dbf/package.py
+++ b/var/spack/repos/builtin/packages/py-dbf/package.py
@@ -24,7 +24,8 @@
##############################################################################
from spack import *
-class PyDbf(Package):
+
+class PyDbf(PythonPackage):
"""Pure python package for reading/writing dBase, FoxPro, and Visual FoxPro
.dbf files (including memos)"""
@@ -32,8 +33,3 @@ class PyDbf(Package):
url = "https://pypi.python.org/packages/source/d/dbf/dbf-0.96.005.tar.gz"
version('0.96.005', 'bce1a1ed8b454a30606e7e18dd2f8277')
-
- extends('python')
-
- def install(self, spec, prefix):
- python('setup.py', 'install', '--prefix=%s' % prefix)
diff --git a/var/spack/repos/builtin/packages/py-decorator/package.py b/var/spack/repos/builtin/packages/py-decorator/package.py
index 90a95cd0cb..e5734866ec 100644
--- a/var/spack/repos/builtin/packages/py-decorator/package.py
+++ b/var/spack/repos/builtin/packages/py-decorator/package.py
@@ -24,16 +24,14 @@
##############################################################################
from spack import *
-class PyDecorator(Package):
- """The aim of the decorator module it to simplify the usage of decorators for the average programmer, and to popularize decorators by showing various non-trivial examples."""
+
+class PyDecorator(PythonPackage):
+ """The aim of the decorator module it to simplify the usage of decorators
+ for the average programmer, and to popularize decorators by showing
+ various non-trivial examples."""
homepage = "https://github.com/micheles/decorator"
url = "https://pypi.python.org/packages/source/d/decorator/decorator-4.0.9.tar.gz"
version('4.0.9', 'f12c5651ccd707e12a0abaa4f76cd69a')
- extends('python')
-
- depends_on('py-setuptools')
-
- def install(self, spec, prefix):
- python('setup.py', 'install', '--prefix=%s' % prefix)
+ depends_on('py-setuptools', type='build')
diff --git a/var/spack/repos/builtin/packages/py-docutils/package.py b/var/spack/repos/builtin/packages/py-docutils/package.py
new file mode 100644
index 0000000000..00741284df
--- /dev/null
+++ b/var/spack/repos/builtin/packages/py-docutils/package.py
@@ -0,0 +1,40 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class PyDocutils(PythonPackage):
+ """Docutils is an open-source text processing system for processing
+ plaintext documentation into useful formats, such as HTML, LaTeX,
+ man-pages, open-document or XML. It includes reStructuredText, the
+ easy to read, easy to use, what-you-see-is-what-you-get plaintext
+ markup language."""
+
+ homepage = "http://docutils.sourceforge.net/"
+ url = "https://pypi.python.org/packages/source/d/docutils/docutils-0.12.tar.gz"
+
+ version('0.13.1', 'ea4a893c633c788be9b8078b6b305d53',
+ url="https://pypi.python.org/packages/05/25/7b5484aca5d46915493f1fd4ecb63c38c333bd32aa9ad6e19da8d08895ae/docutils-0.13.1.tar.gz")
+ version('0.12', '4622263b62c5c771c03502afa3157768')
diff --git a/var/spack/repos/builtin/packages/py-emcee/package.py b/var/spack/repos/builtin/packages/py-emcee/package.py
new file mode 100644
index 0000000000..6419a9c40e
--- /dev/null
+++ b/var/spack/repos/builtin/packages/py-emcee/package.py
@@ -0,0 +1,37 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class PyEmcee(PythonPackage):
+ """emcee is an MIT licensed pure-Python implementation of Goodman & Weare's
+ Affine Invariant Markov chain Monte Carlo (MCMC) Ensemble sampler."""
+
+ homepage = "http://dan.iel.fm/emcee/current/"
+ url = "https://pypi.python.org/packages/source/e/emcee/emcee-2.1.0.tar.gz"
+
+ version('2.1.0', 'c6b6fad05c824d40671d4a4fc58dfff7')
+
+ depends_on('py-numpy', type=('build', 'run'))
diff --git a/var/spack/repos/builtin/packages/py-entrypoints/package.py b/var/spack/repos/builtin/packages/py-entrypoints/package.py
new file mode 100644
index 0000000000..944999d69b
--- /dev/null
+++ b/var/spack/repos/builtin/packages/py-entrypoints/package.py
@@ -0,0 +1,40 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class PyEntrypoints(Package):
+ """Discover and load entry points from installed packages."""
+
+ homepage = "https://pypi.python.org/pypi/entrypoints"
+ url = "https://files.pythonhosted.org/packages/f8/ad/0e77a853c745a15981ab51fa9a0cb4eca7a7a007b4c1970106ee6ba01e0c/entrypoints-0.2.2-py2.py3-none-any.whl"
+
+ version('0.2.2', '73bd7ce92c19b25dc5a20aff41be996a', expand=False)
+
+ depends_on('py-pip', type='build')
+
+ def install(self, spec, prefix):
+ pip = which('pip')
+ pip('install', self.stage.archive_file, '--prefix={0}'.format(prefix))
diff --git a/var/spack/repos/builtin/packages/py-enum34/package.py b/var/spack/repos/builtin/packages/py-enum34/package.py
new file mode 100644
index 0000000000..cc111ce092
--- /dev/null
+++ b/var/spack/repos/builtin/packages/py-enum34/package.py
@@ -0,0 +1,43 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class PyEnum34(PythonPackage):
+ """Python 3.4 Enum backported to 3.3, 3.2, 3.1, 2.7, 2.6, 2.5, and 2.4."""
+
+ homepage = "https://pypi.python.org/pypi/enum34"
+ url = "https://pypi.python.org/packages/source/e/enum34/enum34-1.1.6.tar.gz"
+
+ version('1.1.6', '5f13a0841a61f7fc295c514490d120d0',
+ url="https://pypi.python.org/packages/bf/3e/31d502c25302814a7c2f1d3959d2a3b3f78e509002ba91aea64993936876/enum34-1.1.6.tar.gz")
+
+ depends_on('python@2.4:2.8,3.3:')
+
+ # This dependency breaks concretization
+ # See https://github.com/LLNL/spack/issues/2793
+ # depends_on('py-ordereddict', when='^python@:2.6.999', type=('build', 'run')) # noqa
+
+ depends_on('py-setuptools', type='build')
diff --git a/var/spack/repos/builtin/packages/py-epydoc/package.py b/var/spack/repos/builtin/packages/py-epydoc/package.py
index b370075e02..e13d431f91 100644
--- a/var/spack/repos/builtin/packages/py-epydoc/package.py
+++ b/var/spack/repos/builtin/packages/py-epydoc/package.py
@@ -24,14 +24,11 @@
##############################################################################
from spack import *
-class PyEpydoc(Package):
- """Epydoc is a tool for generating API documentation documentation for Python modules, based on their docstrings."""
+
+class PyEpydoc(PythonPackage):
+ """Epydoc is a tool for generating API documentation documentation for
+ Python modules, based on their docstrings."""
homepage = "https://pypi.python.org/pypi/epydoc"
url = "https://pypi.python.org/packages/source/e/epydoc/epydoc-3.0.1.tar.gz"
version('3.0.1', '36407974bd5da2af00bf90ca27feeb44')
-
- extends('python')
-
- def install(self, spec, prefix):
- python('setup.py', 'install', '--prefix=%s' % prefix)
diff --git a/var/spack/repos/builtin/packages/py-flake8/package.py b/var/spack/repos/builtin/packages/py-flake8/package.py
index b261467d84..17d7cea343 100644
--- a/var/spack/repos/builtin/packages/py-flake8/package.py
+++ b/var/spack/repos/builtin/packages/py-flake8/package.py
@@ -25,16 +25,44 @@
from spack import *
-class PyFlake8(Package):
+class PyFlake8(PythonPackage):
"""Flake8 is a wrapper around PyFlakes, pep8 and Ned Batchelder's
McCabe script."""
- homepage = "http://flake8.readthedocs.io/en/latest/"
- url = "https://pypi.python.org/packages/source/f/flake8/flake8-2.5.4.tar.gz"
- version('2.5.4', 'a4585b3569b95c3f66acb8294a7f06ef')
+ homepage = "https://github.com/PyCQA/flake8"
+ url = "https://github.com/PyCQA/flake8/archive/3.0.4.tar.gz"
- extends('python')
- depends_on('py-setuptools')
+ version('3.0.4', 'cf2a7d8c92070f7b62253404ffb54df7')
+ version('2.5.4', '366dd1de6c300254c830b81e66979f06')
- def install(self, spec, prefix):
- python('setup.py', 'install', '--prefix=%s' % prefix)
+ extends('python', ignore='bin/(pyflakes|pycodestyle)')
+ depends_on('python@2.7:2.8,3.4:')
+
+ # Most Python packages only require py-setuptools as a build dependency.
+ # However, py-flake8 requires py-setuptools during runtime as well.
+ depends_on('py-setuptools', type=('build', 'run'))
+
+ # pyflakes >= 0.8.1, != 1.2.0, != 1.2.1, != 1.2.2, < 1.3.0
+ depends_on('py-pyflakes@0.8.1:1.1.0,1.2.3:1.2.3', when='@3.0.4', type=('build', 'run'))
+ # pyflakes >= 0.8.1, < 1.1
+ depends_on('py-pyflakes@0.8.1:1.0.0', when='@2.5.4', type=('build', 'run'))
+
+ # pycodestyle >= 2.0.0, < 2.1.0
+ depends_on('py-pycodestyle@2.0.0:2.0.999', when='@3.0.4', type=('build', 'run'))
+ # pep8 >= 1.5.7, != 1.6.0, != 1.6.1, != 1.6.2
+ depends_on('py-pycodestyle@1.5.7,1.7.0:', when='@2.5.4', type=('build', 'run'))
+
+ # mccabe >= 0.5.0, < 0.6.0
+ depends_on('py-mccabe@0.5.0:0.5.999', when='@3.0.4', type=('build', 'run'))
+ # mccabe >= 0.2.1, < 0.5
+ depends_on('py-mccabe@0.2.1:0.4.0', when='@2.5.4', type=('build', 'run'))
+
+ # These dependencies breaks concretization
+ # See https://github.com/LLNL/spack/issues/2793
+ # depends_on('py-configparser', when='^python@:3.3.999', type=('build', 'run')) # noqa
+ # depends_on('py-enum34', when='^python@:3.1.999', type=('build', 'run'))
+ depends_on('py-configparser', type=('build', 'run'))
+ depends_on('py-enum34', type=('build', 'run'))
+
+ # TODO: Add test dependencies
+ # depends_on('py-nose', type='test')
diff --git a/var/spack/repos/builtin/packages/py-funcsigs/package.py b/var/spack/repos/builtin/packages/py-funcsigs/package.py
index 9d987b284e..ea8b71f25f 100644
--- a/var/spack/repos/builtin/packages/py-funcsigs/package.py
+++ b/var/spack/repos/builtin/packages/py-funcsigs/package.py
@@ -23,21 +23,13 @@
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
from spack import *
-import os
-class PyFuncsigs(Package):
+
+class PyFuncsigs(PythonPackage):
"""Python function signatures from PEP362 for Python 2.6, 2.7 and 3.2."""
homepage = "https://pypi.python.org/pypi/funcsigs"
url = "https://pypi.python.org/packages/source/f/funcsigs/funcsigs-0.4.tar.gz"
version('0.4', 'fb1d031f284233e09701f6db1281c2a5')
- extends('python')
-
- depends_on('py-setuptools')
-
- def install(self, spec, prefix):
- python('setup.py', 'install', '--prefix=%s' % prefix)
-
-
-
+ depends_on('py-setuptools', type='build')
diff --git a/var/spack/repos/builtin/packages/py-functools32/package.py b/var/spack/repos/builtin/packages/py-functools32/package.py
new file mode 100644
index 0000000000..f2fb0df555
--- /dev/null
+++ b/var/spack/repos/builtin/packages/py-functools32/package.py
@@ -0,0 +1,35 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class PyFunctools32(PythonPackage):
+ """Backport of the functools module from Python 3.2.3 for use on 2.7 and
+ PyPy."""
+
+ homepage = "https://github.com/MiCHiLU/python-functools32"
+ url = "https://pypi.python.org/packages/source/f/functools32/functools32-3.2.3-2.tar.gz"
+
+ version('3.2.3-2', '09f24ffd9af9f6cd0f63cb9f4e23d4b2')
diff --git a/var/spack/repos/builtin/packages/py-futures/package.py b/var/spack/repos/builtin/packages/py-futures/package.py
new file mode 100644
index 0000000000..c6c1d8134f
--- /dev/null
+++ b/var/spack/repos/builtin/packages/py-futures/package.py
@@ -0,0 +1,36 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class PyFutures(PythonPackage):
+ """Backport of the concurrent.futures package from Python 3.2"""
+
+ homepage = "https://pypi.python.org/pypi/futures"
+ url = "https://pypi.io/packages/source/f/futures/futures-3.0.5.tar.gz"
+
+ version('3.0.5', 'ced2c365e518242512d7a398b515ff95')
+
+ depends_on('py-setuptools', type='build')
diff --git a/var/spack/repos/builtin/packages/py-genders/package.py b/var/spack/repos/builtin/packages/py-genders/package.py
index 44f6cb1ef4..2123f4eb3f 100644
--- a/var/spack/repos/builtin/packages/py-genders/package.py
+++ b/var/spack/repos/builtin/packages/py-genders/package.py
@@ -24,16 +24,22 @@
##############################################################################
from spack import *
+
class PyGenders(Package):
- """Genders is a static cluster configuration database used for cluster configuration management. It is used by a variety of tools and scripts for management of large clusters."""
+ """Genders is a static cluster configuration database used for cluster
+ configuration management. It is used by a variety of tools and scripts
+ for management of large clusters."""
homepage = "https://github.com/chaos/genders"
url = "https://github.com/chaos/genders/releases/download/genders-1-22-1/genders-1.22.tar.gz"
- version('1.22', '9ea59a024dcbddb85b0ed25ddca9bc8e', url='https://github.com/chaos/genders/releases/download/genders-1-22-1/genders-1.22.tar.gz')
+ version('1.22', '9ea59a024dcbddb85b0ed25ddca9bc8e',
+ url='https://github.com/chaos/genders/releases/download/genders-1-22-1/genders-1.22.tar.gz')
extends('python')
+ # FIXME: Missing a dependency on genders
+ # #include <genders.h>
+
def install(self, spec, prefix):
- configure("--prefix=%s" %prefix)
+ configure("--prefix=%s" % prefix)
make(parallel=False)
make("install")
-
diff --git a/var/spack/repos/builtin/packages/py-genshi/package.py b/var/spack/repos/builtin/packages/py-genshi/package.py
index cba479a88f..462dbfe802 100644
--- a/var/spack/repos/builtin/packages/py-genshi/package.py
+++ b/var/spack/repos/builtin/packages/py-genshi/package.py
@@ -22,11 +22,10 @@
# License along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
-from spack import version, extends, depends_on
-from spack import Package
+from spack import *
-class PyGenshi(Package):
+class PyGenshi(PythonPackage):
"""Python toolkit for generation of output for the web"""
homepage = "https://genshi.edgewall.org/"
url = "http://ftp.edgewall.com/pub/genshi/Genshi-0.7.tar.gz"
@@ -35,8 +34,4 @@ class PyGenshi(Package):
version('0.6.1', '372c368c8931110b0a521fa6091742d7')
version('0.6', '604e8b23b4697655d36a69c2d8ef7187')
- extends("python")
- depends_on("py-setuptools")
-
- def install(self, spec, prefix):
- python('setup.py', 'install', '--prefix=%s' % prefix)
+ depends_on("py-setuptools", type='build')
diff --git a/var/spack/repos/builtin/packages/py-gnuplot/package.py b/var/spack/repos/builtin/packages/py-gnuplot/package.py
index 3381c4a5ac..a23aa2585f 100644
--- a/var/spack/repos/builtin/packages/py-gnuplot/package.py
+++ b/var/spack/repos/builtin/packages/py-gnuplot/package.py
@@ -24,15 +24,13 @@
##############################################################################
from spack import *
-class PyGnuplot(Package):
- """Gnuplot.py is a Python package that allows you to create graphs from within Python using the gnuplot plotting program."""
+
+class PyGnuplot(PythonPackage):
+ """Gnuplot.py is a Python package that allows you to create graphs from
+ within Python using the gnuplot plotting program."""
homepage = "http://gnuplot-py.sourceforge.net/"
url = "http://downloads.sourceforge.net/project/gnuplot-py/Gnuplot-py/1.8/gnuplot-py-1.8.tar.gz"
version('1.8', 'abd6f571e7aec68ae7db90a5217cd5b1')
- extends('python')
- depends_on('py-numpy')
-
- def install(self, spec, prefix):
- python('setup.py', 'install', '--prefix=%s' % prefix)
+ depends_on('py-numpy', type=('build', 'run'))
diff --git a/var/spack/repos/builtin/packages/py-h5py/package.py b/var/spack/repos/builtin/packages/py-h5py/package.py
index de72bac44e..666905e5c0 100644
--- a/var/spack/repos/builtin/packages/py-h5py/package.py
+++ b/var/spack/repos/builtin/packages/py-h5py/package.py
@@ -23,21 +23,41 @@
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
from spack import *
-import re
-class PyH5py(Package):
- """The h5py package provides both a high- and low-level interface to the HDF5 library from Python."""
+
+class PyH5py(PythonPackage):
+ """The h5py package provides both a high- and low-level interface to the
+ HDF5 library from Python."""
+
homepage = "https://pypi.python.org/pypi/h5py"
url = "https://pypi.python.org/packages/source/h/h5py/h5py-2.4.0.tar.gz"
- version('2.4.0', '80c9a94ae31f84885cc2ebe1323d6758')
+ version('2.6.0', 'ec476211bd1de3f5ac150544189b0bf4')
version('2.5.0', '6e4301b5ad5da0d51b0a1e5ac19e3b74')
+ version('2.4.0', '80c9a94ae31f84885cc2ebe1323d6758')
+
+ variant('mpi', default=True, description='Build with MPI support')
+
+ # Build dependencies
+ depends_on('py-cython@0.19:', type='build')
+ depends_on('pkg-config', type='build')
+ depends_on('py-setuptools', type='build')
+ depends_on('hdf5@1.8.4:')
+ depends_on('hdf5+mpi', when='+mpi')
+ depends_on('mpi', when='+mpi')
+ depends_on('py-mpi4py', when='+mpi')
+
+ # Build and runtime dependencies
+ depends_on('py-numpy@1.6.1:', type=('build', 'run'))
+
+ # Runtime dependencies
+ depends_on('py-six', type=('build', 'run'))
+
+ phases = ['configure', 'install']
- extends('python', ignore=lambda f: re.match(r'bin/cy*', f))
- depends_on('hdf5')
- depends_on('py-numpy')
- depends_on('py-cython')
+ def configure(self, spec, prefix):
+ self.setup_py('configure', '--hdf5={0}'.format(spec['hdf5'].prefix))
- def install(self, spec, prefix):
- python('setup.py', 'configure', '--hdf5=%s' % spec['hdf5'].prefix)
- python('setup.py', 'install', '--prefix=%s' % prefix)
+ if '+mpi' in spec:
+ env['CC'] = spec['mpi'].mpicc
+ self.setup_py('configure', '--mpi')
diff --git a/var/spack/repos/builtin/packages/py-html5lib/package.py b/var/spack/repos/builtin/packages/py-html5lib/package.py
new file mode 100644
index 0000000000..1757b44297
--- /dev/null
+++ b/var/spack/repos/builtin/packages/py-html5lib/package.py
@@ -0,0 +1,37 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class PyHtml5lib(PythonPackage):
+ """HTML parser based on the WHATWG HTML specification."""
+
+ homepage = "https://github.com/html5lib/html5lib-python"
+ url = "https://pypi.python.org/packages/ae/ae/bcb60402c60932b32dfaf19bb53870b29eda2cd17551ba5639219fb5ebf9/html5lib-0.9999999.tar.gz"
+
+ version('0.9999999', 'ef43cb05e9e799f25d65d1135838a96f')
+
+ depends_on('python@2.6:2.7,3.2:3.4')
+ depends_on('py-six', type=('build', 'run'))
diff --git a/var/spack/repos/builtin/packages/py-imagesize/package.py b/var/spack/repos/builtin/packages/py-imagesize/package.py
new file mode 100644
index 0000000000..a2d08f6502
--- /dev/null
+++ b/var/spack/repos/builtin/packages/py-imagesize/package.py
@@ -0,0 +1,38 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class PyImagesize(PythonPackage):
+ """Parses image file headers and returns image size. Supports PNG, JPEG,
+ JPEG2000, and GIF image file formats."""
+
+ homepage = "https://pypi.python.org/pypi/imagesize"
+ url = "https://pypi.python.org/packages/source/i/imagesize/imagesize-0.7.1.tar.gz"
+
+ version('0.7.1', '976148283286a6ba5f69b0f81aef8052',
+ url="https://pypi.python.org/packages/53/72/6c6f1e787d9cab2cc733cf042f125abec07209a58308831c9f292504e826/imagesize-0.7.1.tar.gz")
+
+ depends_on('py-setuptools', type='build')
diff --git a/var/spack/repos/builtin/packages/py-iminuit/package.py b/var/spack/repos/builtin/packages/py-iminuit/package.py
new file mode 100644
index 0000000000..0b93a0f2b8
--- /dev/null
+++ b/var/spack/repos/builtin/packages/py-iminuit/package.py
@@ -0,0 +1,42 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class PyIminuit(PythonPackage):
+ """Interactive IPython-Friendly Minimizer based on SEAL Minuit2."""
+
+ homepage = "https://pypi.python.org/pypi/iminuit"
+ url = "https://pypi.python.org/packages/source/i/iminuit/iminuit-1.2.tar.gz"
+
+ version('1.2', '4701ec472cae42015e26251703e6e984')
+
+ # Required dependencies
+ depends_on('py-setuptools', type='build')
+
+ # Optional dependencies
+ depends_on('py-numpy', type=('build', 'run'))
+ depends_on('py-matplotlib', type=('build', 'run'))
+ depends_on('py-cython', type='build')
diff --git a/var/spack/repos/builtin/packages/py-ipdb/package.py b/var/spack/repos/builtin/packages/py-ipdb/package.py
new file mode 100644
index 0000000000..ab7982a76a
--- /dev/null
+++ b/var/spack/repos/builtin/packages/py-ipdb/package.py
@@ -0,0 +1,58 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class PyIpdb(Package):
+ """ipdb is the iPython debugger and has many additional features, including
+ a better interactive debugging experience via colorized output."""
+
+ homepage = "https://pypi.python.org/pypi/ipdb"
+ url = "https://pypi.io/packages/source/i/ipdb/ipdb-0.10.1.tar.gz"
+
+ version('0.10.1', '4aeab65f633ddc98ebdb5eebf08dc713')
+
+ # :TODO:
+ # There might be potential to add variants here, but at the time of writing
+ # this the original packager does not know what they are. See the 3rd party
+ # section on ipdb's GitHub:
+ # https://github.com/gotcha/ipdb#third-party-support
+ extends('python')
+ depends_on('python@2.6:2.7,3.2:')
+
+ # Dependencies gathered from:
+ # https://github.com/gotcha/ipdb/blob/master/setup.py
+ # However additional dependencies added below were found via testing.
+ depends_on('py-setuptools', type='build')
+ # ipdb needs iPython and others available at runtime
+ depends_on('py-ipython@0.10.2:', type=('build', 'link'))
+ depends_on('py-traitlets', type=('build', 'link'))
+ depends_on('py-six', type=('build', 'link'))
+ depends_on('py-pexpect', type=('build', 'link'))
+ depends_on('py-prompt-toolkit', type=('build', 'link'))
+
+ def install(self, spec, prefix):
+ # Installation is uncomplicated, this should suffice.
+ setup_py('install', '--prefix={0}'.format(prefix))
diff --git a/var/spack/repos/builtin/packages/py-ipykernel/package.py b/var/spack/repos/builtin/packages/py-ipykernel/package.py
new file mode 100644
index 0000000000..0303a8e43c
--- /dev/null
+++ b/var/spack/repos/builtin/packages/py-ipykernel/package.py
@@ -0,0 +1,51 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class PyIpykernel(PythonPackage):
+ """IPython Kernel for Jupyter"""
+
+ homepage = "https://pypi.python.org/pypi/ipykernel"
+ url = "https://github.com/ipython/ipykernel/archive/4.5.0.tar.gz"
+
+ version('4.5.0', 'ea6aaf431b100452905aaca208edac72')
+ version('4.4.1', 'c0033e524aa9e05ed18879641ffe6e0f')
+ version('4.4.0', '8e626a1708ceff83412180d2ff2f3e57')
+ version('4.3.1', '971eee85d630eb4bafcd52531c79673f')
+ version('4.3.0', '5961164fe908faf798232a265ed48c73')
+ version('4.2.2', '4ac8ae11f1eef4920bf4a5383e13ab50')
+ version('4.2.1', 'de583ee9c84db6296269ce7de0afb63f')
+ version('4.2.0', 'fc535e4e020a41cd2b55508302b155bb')
+ version('4.1.1', '51376850c46fb006e1f8d1cd353507c5')
+ version('4.1.0', '638a43e4f8a15872f749090c3f0827b6')
+
+ depends_on('python@2.7:2.7.999,3.3:')
+ depends_on('py-setuptools', type='build')
+ depends_on('py-traitlets@4.1.0:', type=('build', 'run'))
+ depends_on('py-tornado@4.0:', type=('build', 'run'))
+ depends_on('py-ipython@4.0:', type=('build', 'run'))
+ depends_on('py-jupyter-client', type=('build', 'run'))
+ depends_on('py-pexpect', type=('build', 'run'))
diff --git a/var/spack/repos/builtin/packages/py-ipython-genutils/package.py b/var/spack/repos/builtin/packages/py-ipython-genutils/package.py
new file mode 100644
index 0000000000..66e8a02130
--- /dev/null
+++ b/var/spack/repos/builtin/packages/py-ipython-genutils/package.py
@@ -0,0 +1,37 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class PyIpythonGenutils(PythonPackage):
+ """Vestigial utilities from IPython"""
+
+ homepage = "https://pypi.python.org/pypi/ipython_genutils"
+ url = "https://pypi.io/packages/source/i/ipython_genutils/ipython_genutils-0.1.0.tar.gz"
+
+ version('0.1.0', '9a8afbe0978adbcbfcb3b35b2d015a56')
+
+ depends_on('py-setuptools', type='build')
+ depends_on('python@2.7:2.7.999,3.3:')
diff --git a/var/spack/repos/builtin/packages/py-ipython/package.py b/var/spack/repos/builtin/packages/py-ipython/package.py
index 7a94f90b38..277e090faa 100644
--- a/var/spack/repos/builtin/packages/py-ipython/package.py
+++ b/var/spack/repos/builtin/packages/py-ipython/package.py
@@ -24,17 +24,26 @@
##############################################################################
from spack import *
-class PyIpython(Package):
- """IPython provides a rich toolkit to help you make the most out of using Python interactively."""
+
+class PyIpython(PythonPackage):
+ """IPython provides a rich toolkit to help you make the most out of using
+ Python interactively."""
homepage = "https://pypi.python.org/pypi/ipython"
- url = "https://pypi.python.org/packages/source/i/ipython/ipython-2.3.1.tar.gz"
+ url = "https://pypi.io/packages/source/i/ipython/ipython-2.3.1.tar.gz"
- version('2.3.1', '2b7085525dac11190bfb45bb8ec8dcbf')
+ version('5.1.0', '47c8122420f65b58784cb4b9b4af35e3')
version('3.1.0', 'a749d90c16068687b0ec45a27e72ef8f')
+ version('2.3.1', '2b7085525dac11190bfb45bb8ec8dcbf')
+
+ depends_on('py-pygments', type=('build', 'run'))
+ depends_on('py-setuptools', type=('build', 'run'))
- extends('python')
- depends_on('py-pygments')
- depends_on('py-setuptools')
+ # These dependencies breaks concretization
+ # See https://github.com/LLNL/spack/issues/2793
+ # depends_on('py-backports-shutil-get-terminal-size', when="^python@:3.2.999") # noqa
+ # depends_on('py-pathlib2', when="^python@:3.3.999")
+ depends_on('py-backports-shutil-get-terminal-size', type=('build', 'run'))
+ depends_on('py-pathlib2', type=('build', 'run'))
- def install(self, spec, prefix):
- python('setup.py', 'install', '--prefix=%s' % prefix)
+ depends_on('py-pickleshare', type=('build', 'run'))
+ depends_on('py-simplegeneric', type=('build', 'run'))
diff --git a/var/spack/repos/builtin/packages/py-ipywidgets/package.py b/var/spack/repos/builtin/packages/py-ipywidgets/package.py
new file mode 100644
index 0000000000..eafee8e084
--- /dev/null
+++ b/var/spack/repos/builtin/packages/py-ipywidgets/package.py
@@ -0,0 +1,40 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class PyIpywidgets(PythonPackage):
+ """IPython widgets for the Jupyter Notebook"""
+
+ homepage = "https://github.com/ipython/ipywidgets"
+ url = "https://github.com/ipython/ipywidgets/archive/5.2.2.tar.gz"
+
+ version('5.2.2', '112f3daa4aa0f42f8dda831cea3649c8')
+
+ depends_on('py-setuptools', type='build')
+ depends_on('python@2.7:2.7.999,3.3:')
+ depends_on('py-ipython@4.0.0:', type=('build', 'run'))
+ depends_on('py-ipykernel@4.2.2:', type=('build', 'run'))
+ depends_on('py-traitlets@4.2.1:', type=('build', 'run'))
diff --git a/var/spack/repos/builtin/packages/py-jdcal/package.py b/var/spack/repos/builtin/packages/py-jdcal/package.py
index fd1d6b4419..964db1448f 100644
--- a/var/spack/repos/builtin/packages/py-jdcal/package.py
+++ b/var/spack/repos/builtin/packages/py-jdcal/package.py
@@ -24,15 +24,11 @@
##############################################################################
from spack import *
-class PyJdcal(Package):
+
+class PyJdcal(PythonPackage):
"""Julian dates from proleptic Gregorian and Julian calendars"""
homepage = 'http://github.com/phn/jdcal'
url = "https://pypi.python.org/packages/source/j/jdcal/jdcal-1.2.tar.gz"
version('1.2', 'ab8d5ba300fd1eb01514f363d19b1eb9')
-
- extends('python')
-
- def install(self, spec, prefix):
- python('setup.py', 'install', '--prefix=%s' % prefix)
diff --git a/var/spack/repos/builtin/packages/py-jinja2/package.py b/var/spack/repos/builtin/packages/py-jinja2/package.py
index 48312d9baf..eafe8c252b 100644
--- a/var/spack/repos/builtin/packages/py-jinja2/package.py
+++ b/var/spack/repos/builtin/packages/py-jinja2/package.py
@@ -22,30 +22,23 @@
# License along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
-from spack import depends_on, extends, version
-from spack import Package
+from spack import *
-class PyJinja2(Package):
- """
- Jinja2 is a template engine written in pure Python. It provides
+class PyJinja2(PythonPackage):
+ """Jinja2 is a template engine written in pure Python. It provides
a Django inspired non-XML syntax but supports inline expressions
- and an optional sandboxed environment.
- """
+ and an optional sandboxed environment."""
homepage = "http://jinja.pocoo.org/"
- url = "https://github.com/pallets/jinja/archive/2.8.tar.gz"
+ url = "https://pypi.python.org/packages/source/J/Jinja2/Jinja2-2.8.tar.gz"
- version('2.8', '4114200650d7630594e3bc70af23f59e')
- version('2.7.3', '55b87bdc8e585b8b5b86734eefce2621')
- version('2.7.2', '8e8f226809ae6363009b9296e30adf30')
- version('2.7.1', '69b6675553c81b1087f95cae7f2179bb')
- version('2.7', 'ec70433f325051dcedacbb2465028a35')
-
- extends("python")
- depends_on("py-setuptools")
- depends_on("py-markupsafe")
-
- def install(self, spec, prefix):
- python('setup.py', 'install', '--prefix=%s' % prefix)
+ version('2.8', 'edb51693fe22c53cee5403775c71a99e')
+ version('2.7.3', 'b9dffd2f3b43d673802fe857c8445b1a')
+ version('2.7.2', 'df1581455564e97010e38bc792012aa5')
+ version('2.7.1', '282aed153e69f970d6e76f78ed9d027a')
+ version('2.7', 'c2fb12cbbb523c57d3d15bfe4dc0e8fe')
+ depends_on('py-setuptools', type='build')
+ depends_on('py-markupsafe', type=('build', 'run'))
+ depends_on('py-babel@0.8:', type=('build', 'run')) # optional, required for i18n
diff --git a/var/spack/repos/builtin/packages/py-jsonschema/package.py b/var/spack/repos/builtin/packages/py-jsonschema/package.py
new file mode 100644
index 0000000000..b1a0ac6606
--- /dev/null
+++ b/var/spack/repos/builtin/packages/py-jsonschema/package.py
@@ -0,0 +1,42 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class PyJsonschema(PythonPackage):
+ """Jsonschema: An(other) implementation of JSON Schema for Python."""
+
+ homepage = "http://github.com/Julian/jsonschema"
+ url = "https://pypi.python.org/packages/source/j/jsonschema/jsonschema-2.5.1.tar.gz"
+
+ version('2.5.1', '374e848fdb69a3ce8b7e778b47c30640')
+
+ depends_on('py-setuptools', type='build')
+ depends_on('py-vcversioner', type=('build', 'run'))
+
+ # This dependency breaks concretization
+ # See https://github.com/LLNL/spack/issues/2793
+ # depends_on('py-functools32', when="^python@2.7", type=('build', 'run'))
+ depends_on('py-functools32', type=('build', 'run'))
diff --git a/var/spack/repos/builtin/packages/py-jupyter-client/package.py b/var/spack/repos/builtin/packages/py-jupyter-client/package.py
new file mode 100644
index 0000000000..b0c7b06362
--- /dev/null
+++ b/var/spack/repos/builtin/packages/py-jupyter-client/package.py
@@ -0,0 +1,47 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class PyJupyterClient(PythonPackage):
+ """Jupyter protocol client APIs"""
+
+ homepage = "https://github.com/jupyter/jupyter_client"
+ url = "https://github.com/jupyter/jupyter_client/archive/4.4.0.tar.gz"
+
+ version('4.4.0', 'a0bd6fe6ba7c504fbc962a88a2a56a90')
+ version('4.3.0', '257d9f5429dac4d9511db84d201d3a9e')
+ version('4.2.2', '988ea87554215a83c6ad52e554d8d8c4')
+ version('4.2.1', '16994e5cace322c777456bc5a26502d7')
+ version('4.2.0', '61c43c9f243e42f1945fae5d56d0d23c')
+ version('4.1.1', '8436e4a3266a442f576cdfef39dc0e19')
+ version('4.1.0', 'cf42048b889c8434fbb5813a9eec1d34')
+ version('4.0.0', '00fa63c67cb3adf359d09dc4d803aff5')
+
+ depends_on('py-setuptools', type='build')
+ depends_on('python@2.7:2.7.999,3.3:')
+ depends_on('py-traitlets', type=('build', 'run'))
+ depends_on('py-jupyter-core', type=('build', 'run'))
+ depends_on('py-zmq@13:', type=('build', 'run'))
diff --git a/var/spack/repos/builtin/packages/py-jupyter-console/package.py b/var/spack/repos/builtin/packages/py-jupyter-console/package.py
new file mode 100644
index 0000000000..a5f3e53298
--- /dev/null
+++ b/var/spack/repos/builtin/packages/py-jupyter-console/package.py
@@ -0,0 +1,46 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class PyJupyterConsole(PythonPackage):
+ """Jupyter Terminal Console"""
+
+ homepage = "https://github.com/jupyter/jupyter_console"
+ url = "https://github.com/jupyter/jupyter_console/archive/5.0.0.tar.gz"
+
+ version('5.0.0', '08a9fde32a45c9e2e0b4cec6eca249c2')
+ version('4.1.1', 'a8b077ae0a5c57e9518ac039ad5febb8')
+ version('4.1.0', '9c655076262760bdbeeada9d7f586237')
+ version('4.0.3', '0e928ea261e7f8154698cf69ed4f2459')
+ version('4.0.2', 'f2e174938c91136549b908bd39fa5d59')
+
+ depends_on('py-setuptools', type='build')
+ depends_on('python@2.7:2.7.999,3.3:')
+ depends_on('py-jupyter-client', type=('build', 'run'))
+ depends_on('py-ipython', type=('build', 'run'))
+ depends_on('py-ipykernel', type=('build', 'run'))
+ depends_on('py-pygments', type=('build', 'run'))
+ depends_on('py-prompt-toolkit@1.0.0:1.999.999', type=('build', 'run'))
diff --git a/var/spack/repos/builtin/packages/py-jupyter-core/package.py b/var/spack/repos/builtin/packages/py-jupyter-core/package.py
new file mode 100644
index 0000000000..f650a91bb9
--- /dev/null
+++ b/var/spack/repos/builtin/packages/py-jupyter-core/package.py
@@ -0,0 +1,47 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class PyJupyterCore(PythonPackage):
+ """Core Jupyter functionality"""
+
+ homepage = "http://jupyter-core.readthedocs.io/"
+ url = "https://github.com/jupyter/jupyter_core/archive/4.2.0.tar.gz"
+
+ version('4.2.0', '25c1fc68b1b73c0a2e616c76f02bf061')
+ version('4.1.1', '2fce5ff60291bc01b39b1f00b3cbb784')
+ version('4.1.0', 'b7e928f965f68aef13fea1bf9d6384aa')
+ version('4.0.6', '50a73c3a4a8ed047a3674d2b5274cc3b')
+ version('4.0.5', 'c09bd3be58f141b49b90cdb2ba22f77f')
+ version('4.0.4', '5b6ca0e73bf559f4fe6106a6e412f913')
+ version('4.0.3', 'f2608f6e92f992ec8e37646b52c922a6')
+ version('4.0.2', 'ae0d0197c4febf43c050a97ac6277263')
+ version('4.0.1', 'f849136b2badaaba2a8a3b397bf04639')
+ version('4.0', 'b6b37cb4f40bd0fcd20433cb2cc7a4c1')
+
+ depends_on('py-setuptools', type='build')
+ depends_on('python@2.7:2.7.999,3.3:')
+ depends_on('py-traitlets', type=('build', 'run'))
diff --git a/var/spack/repos/builtin/packages/py-jupyter-notebook/package.py b/var/spack/repos/builtin/packages/py-jupyter-notebook/package.py
new file mode 100644
index 0000000000..4c0d12b245
--- /dev/null
+++ b/var/spack/repos/builtin/packages/py-jupyter-notebook/package.py
@@ -0,0 +1,61 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class PyJupyterNotebook(PythonPackage):
+ """Jupyter Interactive Notebook"""
+
+ homepage = "https://github.com/jupyter/notebook"
+ url = "https://github.com/jupyter/notebook/archive/4.2.3.tar.gz"
+
+ version('4.2.3', '5c6b0b1303adacd8972c4db21eda3e98')
+ version('4.2.2', '7f9717ae4fed930d187a44c0707b6379')
+ version('4.2.1', '4286f1eaf608257bd69cad4042c7c2fe')
+ version('4.2.0', '136be6b72fe9db7f0269dc7fa5652a62')
+ version('4.1.0', '763ab54b3fc69f6225b9659b6994e756')
+ version('4.0.6', 'd70d8a6d01893f4b64df9edbc0e13b52')
+ version('4.0.5', '2681a70e4c62aafe7ce69f1da5799ac8')
+ version('4.0.4', 'ab72f28f6af8107d71241a4110e92c05')
+ version('4.0.3', '119beea793865ee4b1673a50043ead2a')
+ version('4.0.2', '77f371e9a23a840d14d8a60fee7ba1b7')
+
+ variant('terminal', default=False, description="Enable terminal functionality")
+
+ depends_on('py-setuptools', type='build')
+ depends_on('python@2.7:2.7.999,3.3:')
+ depends_on('npm', type='build')
+ depends_on('py-jinja2', type=('build', 'run'))
+ depends_on('py-tornado@4:', type=('build', 'run'))
+ depends_on('py-ipython-genutils', type=('build', 'run'))
+ depends_on('py-traitlets', type=('build', 'run'))
+ depends_on('py-jupyter-core', type=('build', 'run'))
+ depends_on('py-jupyter-client', type=('build', 'run'))
+ depends_on('py-jupyter-console', type=('build', 'run'))
+ depends_on('py-nbformat', type=('build', 'run'))
+ depends_on('py-nbconvert', type=('build', 'run'))
+ depends_on('py-ipykernel', type=('build', 'run'))
+ depends_on('py-terminado@0.3.3:', when="+terminal", type=('build', 'run'))
+ depends_on('py-ipywidgets', when="+terminal", type=('build', 'run'))
diff --git a/var/spack/repos/builtin/packages/py-lockfile/package.py b/var/spack/repos/builtin/packages/py-lockfile/package.py
index 998b41abc6..1e57e6a1d7 100644
--- a/var/spack/repos/builtin/packages/py-lockfile/package.py
+++ b/var/spack/repos/builtin/packages/py-lockfile/package.py
@@ -24,7 +24,8 @@
##############################################################################
from spack import *
-class PyLockfile(Package):
+
+class PyLockfile(PythonPackage):
"""The lockfile package exports a LockFile class which provides a
simple API for locking files. Unlike the Windows msvcrt.locking
function, the fcntl.lockf and flock functions, and the
@@ -40,8 +41,4 @@ class PyLockfile(Package):
version('0.10.2', '1aa6175a6d57f082cd12e7ac6102ab15')
- extends("python")
- depends_on("py-setuptools")
-
- def install(self, spec, prefix):
- python('setup.py', 'install', '--prefix=%s' % prefix)
+ depends_on("py-setuptools", type='build')
diff --git a/var/spack/repos/builtin/packages/py-logilab-common/package.py b/var/spack/repos/builtin/packages/py-logilab-common/package.py
index 8675c48e9a..4c20885760 100644
--- a/var/spack/repos/builtin/packages/py-logilab-common/package.py
+++ b/var/spack/repos/builtin/packages/py-logilab-common/package.py
@@ -22,21 +22,16 @@
# License along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
-from spack import depends_on, extends, version
-from spack import Package
+from spack import *
-class PyLogilabCommon(Package):
+class PyLogilabCommon(PythonPackage):
"""Common modules used by Logilab projects"""
homepage = "https://www.logilab.org/project/logilab-common"
url = "https://pypi.python.org/packages/a7/31/1650d23e44794d46935d82b86e73454cc83b814cbe1365260ccce8a2f4c6/logilab-common-1.2.0.tar.gz"
version('1.2.0', 'f7b51351b7bfe052746fa04c03253c0b')
- extends("python")
- depends_on("py-setuptools")
- depends_on("py-six")
-
- def install(self, spec, prefix):
- python('setup.py', 'install', '--prefix=%s' % prefix)
-
+ extends('python', ignore=r'bin/pytest')
+ depends_on("py-setuptools", type='build')
+ depends_on("py-six", type=('build', 'run'))
diff --git a/var/spack/repos/builtin/packages/py-macs2/package.py b/var/spack/repos/builtin/packages/py-macs2/package.py
new file mode 100644
index 0000000000..42318faa2a
--- /dev/null
+++ b/var/spack/repos/builtin/packages/py-macs2/package.py
@@ -0,0 +1,42 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+
+from spack import *
+
+
+class PyMacs2(PythonPackage):
+ """MACS2 Model-based Analysis of ChIP-Seq"""
+
+ homepage = "https://github.com/taoliu/MACS"
+ url = "https://pypi.python.org/packages/9f/99/a8ac96b357f6b0a6f559fe0f5a81bcae12b98579551620ce07c5183aee2c/MACS2-2.1.1.20160309.tar.gz"
+
+ version('2.1.1.20160309', '2008ba838f83f34f8e0fddefe2a3a0159f4a740707c68058f815b31ddad53d26')
+
+ depends_on('python@2.7:2.8')
+
+ # Most Python packages only require py-setuptools as a build dependency.
+ # However, py-macs2 requires py-setuptools during runtime as well.
+ depends_on('py-setuptools', type=('build', 'run'))
+ depends_on('py-numpy@1.6:', type=('build', 'run'))
diff --git a/var/spack/repos/builtin/packages/py-mako/package.py b/var/spack/repos/builtin/packages/py-mako/package.py
index a03ef4defb..0707d0b12f 100644
--- a/var/spack/repos/builtin/packages/py-mako/package.py
+++ b/var/spack/repos/builtin/packages/py-mako/package.py
@@ -24,17 +24,18 @@
##############################################################################
from spack import *
-class PyMako(Package):
- """A super-fast templating language that borrows the best
- ideas from the existing templating languages."""
+
+class PyMako(PythonPackage):
+ """A super-fast templating language that borrows the best
+ ideas from the existing templating languages."""
homepage = "https://pypi.python.org/pypi/mako"
- url = "https://pypi.python.org/packages/source/M/Mako/Mako-1.0.1.tar.gz"
+ url = "https://pypi.python.org/packages/source/M/Mako/Mako-1.0.1.tar.gz"
+ version('1.0.4', 'c5fc31a323dd4990683d2f2da02d4e20')
version('1.0.1', '9f0aafd177b039ef67b90ea350497a54')
- depends_on('py-setuptools')
- extends('python')
-
- def install(self, spec, prefix):
- python('setup.py', 'install', '--prefix=%s' % prefix)
+ depends_on('py-setuptools', type='build')
+ # depends_on('py-mock', type='test') # TODO: Add test deptype
+ # depends_on('py-pytest', type='test') # TODO: Add test deptype
+ depends_on('py-markupsafe@0.9.2:', type=('build', 'run'))
diff --git a/var/spack/repos/builtin/packages/py-markdown/package.py b/var/spack/repos/builtin/packages/py-markdown/package.py
new file mode 100644
index 0000000000..23c8167021
--- /dev/null
+++ b/var/spack/repos/builtin/packages/py-markdown/package.py
@@ -0,0 +1,56 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+
+from spack import *
+
+
+class PyMarkdown(PythonPackage):
+ """This is a Python implementation of John Gruber's Markdown. It is
+ almost completely compliant with the reference implementation, though
+ there are a few very minor differences. See John's Syntax
+ Documentation for the syntax rules.
+ """
+
+ homepage = "https://pythonhosted.org/Markdown/"
+ url = "https://github.com/waylan/Python-Markdown/archive/2.6.7-final.tar.gz"
+
+ version('2.6.7', 'fd27044042e197ad99249b3d60215d97')
+ version('2.6.6', '2b47a0ff7eb19ef34453fe198a0cccc4')
+ version('2.6.5', 'e4b6b65b2d6bcac07176fb209bc55614')
+ version('2.6.4', '5fb3cd9945eb534e71af597f8ee3622b')
+ version('2.6.3', 'ec7a50ce9fd4a5fd0b24555d47e9d7d1')
+ version('2.6.2', '6ce86913e9bf5bb34d9ee394ac71f044')
+ version('2.6.1', '0ae69693c5adb27caf0160941d7dcbdf')
+ version('2.6', '9acdde43d99847d0c4ef03ea56b1d2c5')
+ version('2.5.2', 'ed2a662d22799186c1ef85d173d38b8a')
+ version('2.5.1', 'be6f6ba65a8fb843d2aaf1fcdd68c755')
+ version('2.5', '8393ceab9c6e33357fb8a7be063a4849')
+
+ depends_on('py-setuptools', type='build')
+ depends_on('python@2.7:2.8,3.2:3.4')
+
+ def url_for_version(self, version):
+ base_url = "https://github.com/waylan/Python-Markdown/archive"
+ return "{0}/{1}-final.tar.gz".format(base_url, version)
diff --git a/var/spack/repos/builtin/packages/py-markupsafe/package.py b/var/spack/repos/builtin/packages/py-markupsafe/package.py
index 58c9f70066..a31e3972de 100644
--- a/var/spack/repos/builtin/packages/py-markupsafe/package.py
+++ b/var/spack/repos/builtin/packages/py-markupsafe/package.py
@@ -22,30 +22,22 @@
# License along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
-from spack import depends_on, extends, version
-from spack import Package
+from spack import *
-class PyMarkupsafe(Package):
- """
- MarkupSafe is a library for Python that implements a unicode
- string that is aware of HTML escaping rules and can be used
- to implement automatic string escaping. It is used by Jinja 2,
- the Mako templating engine, the Pylons web framework and many more.
- """
+class PyMarkupsafe(PythonPackage):
+ """MarkupSafe is a library for Python that implements a unicode
+ string that is aware of HTML escaping rules and can be used to
+ implement automatic string escaping. It is used by Jinja 2, the
+ Mako templating engine, the Pylons web framework and many more."""
homepage = "http://www.pocoo.org/projects/markupsafe/"
- url = "https://github.com/pallets/markupsafe/archive/0.23.tar.gz"
+ url = "https://pypi.python.org/packages/source/M/MarkupSafe/MarkupSafe-0.23.tar.gz"
- version('0.23', '1a0dadc95169832367c9dcf142155cde')
- version('0.22', '7a2ac7427b58def567628d06dc328396')
- version('0.21', 'aebcd93ee05269773c8b80bb6c86fc2f')
- version('0.20', '0c1fef97c8fd6a986d708f08d7f84a02')
- version('0.19', '64b05361adb92c11839fc470e308c593')
-
- extends("python")
- depends_on("py-setuptools")
-
- def install(self, spec, prefix):
- python('setup.py', 'install', '--prefix=%s' % prefix)
+ version('0.23', 'f5ab3deee4c37cd6a922fb81e730da6e')
+ version('0.22', 'cb3ec29fd5361add24cfd0c6e2953b3e')
+ version('0.21', 'fde838d9337fa51744283f46a1db2e74')
+ version('0.20', '7da066d9cb191a70aa85d0a3d43565d1')
+ version('0.19', 'ccb3f746c807c5500850987006854a6d')
+ depends_on('py-setuptools', type='build')
diff --git a/var/spack/repos/builtin/packages/py-matplotlib/package.py b/var/spack/repos/builtin/packages/py-matplotlib/package.py
index 14f9716ae6..d808b0fc4b 100644
--- a/var/spack/repos/builtin/packages/py-matplotlib/package.py
+++ b/var/spack/repos/builtin/packages/py-matplotlib/package.py
@@ -25,55 +25,98 @@
from spack import *
import os
-class PyMatplotlib(Package):
- """Python plotting package."""
+
+class PyMatplotlib(PythonPackage):
+ """matplotlib is a python 2D plotting library which produces publication
+ quality figures in a variety of hardcopy formats and interactive
+ environments across platforms."""
+
homepage = "https://pypi.python.org/pypi/matplotlib"
- url = "https://pypi.python.org/packages/source/m/matplotlib/matplotlib-1.4.2.tar.gz"
+ url = "https://pypi.io/packages/source/m/matplotlib/matplotlib-1.4.2.tar.gz"
- version('1.4.2', '7d22efb6cce475025733c50487bd8898')
+ version('1.5.3', 'ba993b06113040fee6628d74b80af0fd')
+ version('1.5.1', 'f51847d8692cb63df64cd0bd0304fd20')
version('1.4.3', '86af2e3e3c61849ac7576a6f5ca44267')
+ version('1.4.2', '7d22efb6cce475025733c50487bd8898')
+
+ # See: http://matplotlib.org/users/installing.html
- variant('gui', default=False, description='Enable GUI')
+ # Variants enabled by default for a standard configuration
+ variant('tk', default=True, description='Enable Tk GUI')
+ variant('image', default=True,
+ description='Enable reading/saving JPEG, BMP and TIFF files')
+
+ # Variants optionally available to user
variant('ipython', default=False, description='Enable ipython support')
+ variant('qt', default=False, description='Enable Qt GUI')
+ variant('latex', default=False,
+ description='Enable LaTeX text rendering support')
+ variant('animation', default=False,
+ description='Enable animation support')
+ # Python 2.7, 3.4, or 3.5
extends('python', ignore=r'bin/nosetests.*$|bin/pbr$')
- depends_on('py-pyside', when='+gui')
+ # ------ Required dependencies
+ depends_on('py-setuptools', type='build')
+
+ depends_on('libpng@1.2:')
+ depends_on('freetype@2.3:')
+
+ depends_on('py-numpy@1.6:', type=('build', 'run'))
+ depends_on('py-dateutil@1.1:', type=('build', 'run'))
+ depends_on('py-pyparsing', type=('build', 'run'))
+ depends_on('py-pytz', type=('build', 'run'))
+ depends_on('py-cycler@0.9:', type=('build', 'run'))
+
+ # ------ Optional GUI frameworks
+ depends_on('tk@8.3:', when='+tk') # not 8.6.0 or 8.6.1
+ depends_on('qt', when='+qt')
+ depends_on('py-pyside', when='+qt', type=('build', 'run'))
+
+ # --------- Optional external programs
+ # ffmpeg/avconv or mencoder
+ depends_on('image-magick', when='+animation')
+
+ # --------- Optional dependencies
+ depends_on('pkg-config', type='build') # why not...
+ depends_on('pil', when='+image', type=('build', 'run'))
depends_on('py-ipython', when='+ipython')
- depends_on('py-pyparsing')
- depends_on('py-six')
- depends_on('py-dateutil')
- depends_on('py-pytz')
- depends_on('py-nose')
- depends_on('py-numpy')
- depends_on('py-mock')
- depends_on('py-pbr')
- depends_on('py-funcsigs')
-
- depends_on('pkg-config')
- depends_on('freetype')
- depends_on('qt', when='+gui')
- depends_on('bzip2')
- depends_on('tcl', when='+gui')
- depends_on('tk', when='+gui')
- depends_on('qhull')
-
- def install(self, spec, prefix):
- python('setup.py', 'install', '--prefix=%s' % prefix)
-
- if str(self.version) in ['1.4.2', '1.4.3']:
- # hack to fix configuration file
+ depends_on('ghostscript', when='+latex', type='run')
+ depends_on('texlive', when='+latex', type='run')
+
+ # Testing dependencies
+ depends_on('py-nose') # type='test'
+ depends_on('py-mock') # type='test'
+
+ # Required libraries that ship with matplotlib
+ # depends_on('agg@2.4:')
+ depends_on('qhull@2012.1:')
+ # depends_on('ttconv')
+ depends_on('py-six@1.9.0:', type=('build', 'run'))
+
+ @PythonPackage.sanity_check('install')
+ def set_backend(self):
+ spec = self.spec
+ prefix = self.prefix
+
+ if '+qt' in spec or '+tk' in spec:
+ # Set backend in matplotlib configuration file
config_file = None
- for p,d,f in os.walk(prefix.lib):
+ for p, d, f in os.walk(prefix.lib):
for file in f:
if file.find('matplotlibrc') != -1:
config_file = join_path(p, 'matplotlibrc')
- print config_file
- if config_file == None:
- raise InstallError('could not find config file')
- filter_file(r'backend : pyside',
- 'backend : Qt4Agg',
- config_file)
- filter_file(r'#backend.qt4 : PyQt4',
- 'backend.qt4 : PySide',
- config_file)
+ if not config_file:
+ raise InstallError('Could not find matplotlibrc')
+
+ kwargs = {'ignore_absent': False, 'backup': False, 'string': False}
+ rc = FileFilter(config_file)
+
+ # Only make Qt4 be the default backend if Tk is turned off
+ if '+qt' in spec and '+tk' not in spec:
+ rc.filter('^backend.*', 'backend : Qt4Agg', **kwargs)
+
+ # Additional options in case user is doing Qt4:
+ if '+qt' in spec:
+ rc.filter('^#backend.qt4.*', 'backend.qt4 : PySide', **kwargs)
diff --git a/var/spack/repos/builtin/packages/py-mccabe/package.py b/var/spack/repos/builtin/packages/py-mccabe/package.py
new file mode 100644
index 0000000000..c413193cdc
--- /dev/null
+++ b/var/spack/repos/builtin/packages/py-mccabe/package.py
@@ -0,0 +1,49 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class PyMccabe(PythonPackage):
+ """Ned's script to check McCabe complexity."""
+
+ homepage = "https://github.com/PyCQA/mccabe"
+ url = "https://github.com/PyCQA/mccabe/archive/0.5.2.tar.gz"
+
+ version('0.5.2', '3cdf2d7faa1464b18905fe9a7063a632')
+ version('0.5.1', '864b364829156701bec797712be8ece0')
+ version('0.5.0', '71c0ce5e5c4676753525154f6c5d3af8')
+ version('0.4.0', '9cf5712e5f1785aaa27273a4328babe4')
+ version('0.3.1', '45c48c0978e6fc1f31fedcb918178abb')
+ version('0.3', 'c583f58ea28be12842c001473d77504d')
+ version('0.2.1', 'fcba311ebd999f48359a8ab28da94b30')
+ version('0.2', '36d4808c37e187dbb1fe2373a0ac6645')
+ version('0.1', '3c9e8e72612a9c01d865630cc569150a')
+
+ depends_on('python@2.7:2.8,3.3:')
+
+ depends_on('py-setuptools', type='build')
+
+ # TODO: Add test dependencies
+ # depends_on('py-pytest', type='test')
diff --git a/var/spack/repos/builtin/packages/py-meep/package.py b/var/spack/repos/builtin/packages/py-meep/package.py
new file mode 100644
index 0000000000..0ebba77ac6
--- /dev/null
+++ b/var/spack/repos/builtin/packages/py-meep/package.py
@@ -0,0 +1,89 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class PyMeep(PythonPackage):
+ """Python-meep is a wrapper around libmeep. It allows the scripting of
+ Meep-simulations with Python"""
+
+ homepage = "https://launchpad.net/python-meep"
+ url = "https://launchpad.net/python-meep/1.4/1.4/+download/python-meep-1.4.2.tar"
+
+ version('1.4.2', 'f8913542d18b0dda92ebc64f0a10ce56')
+
+ variant('mpi', default=True, description='Enable MPI support')
+
+ depends_on('py-numpy', type=('build', 'run'))
+ depends_on('py-scipy', type=('build', 'run'))
+ depends_on('py-matplotlib', type=('build', 'run'))
+
+ depends_on('mpi', when='+mpi')
+ depends_on('meep~mpi', when='~mpi')
+ depends_on('meep+mpi', when='+mpi')
+
+ # As of SWIG 3.0.3, Python-style comments are now treated as
+ # pre-processor directives. Use older SWIG. But not too old,
+ # or else it can't handle newer C++ compilers and flags.
+ depends_on('swig@1.3.39:3.0.2')
+
+ phases = ['clean', 'build_ext', 'install', 'bdist']
+
+ def setup_file(self, spec, prefix):
+ return 'setup-mpi.py' if '+mpi' in spec else 'setup.py'
+
+ def common_args(self, spec, prefix):
+ include_dirs = [
+ spec['meep'].prefix.include,
+ spec['py-numpy'].include
+ ]
+
+ library_dirs = [
+ spec['meep'].prefix.lib
+ ]
+
+ if '+mpi' in spec:
+ include_dirs.append(spec['mpi'].prefix.include)
+ library_dirs.append(spec['mpi'].prefix.lib)
+
+ include_flags = '-I{0}'.format(','.join(include_dirs))
+ library_flags = '-L{0}'.format(','.join(library_dirs))
+
+ # FIXME: For some reason, this stopped working.
+ # The -I and -L are no longer being properly forwarded to setup.py:
+ # meep_common.i:87: Error: Unable to find 'meep/mympi.hpp'
+ # meep_common.i:88: Error: Unable to find 'meep/vec.hpp'
+ # meep_common.i:89: Error: Unable to find 'meep.hpp'
+
+ return [include_flags, library_flags]
+
+ def clean_args(self, spec, prefix):
+ return ['--all']
+
+ def build_ext_args(self, spec, prefix):
+ return self.common_args(spec, prefix)
+
+ def bdist_args(self, spec, prefix):
+ return self.common_args(spec, prefix)
diff --git a/var/spack/repos/builtin/packages/py-mistune/package.py b/var/spack/repos/builtin/packages/py-mistune/package.py
index 9bcbb5a927..cc859d4b78 100644
--- a/var/spack/repos/builtin/packages/py-mistune/package.py
+++ b/var/spack/repos/builtin/packages/py-mistune/package.py
@@ -22,11 +22,10 @@
# License along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
-from spack import depends_on, extends, version
-from spack import Package
+from spack import *
-class PyMistune(Package):
+class PyMistune(PythonPackage):
"""
Python markdown parser
"""
@@ -39,8 +38,4 @@ class PyMistune(Package):
version('0.5.1', '1c6cfce28a4aa90cf125217cd6c6fe6c')
version('0.5', '997736554f1f95eea78c66ae339b5722')
- extends('python')
- depends_on('py-setuptools')
-
- def install(self, spec, prefix):
- python('setup.py', 'install', '--prefix=%s' % prefix)
+ depends_on('py-setuptools', type='build')
diff --git a/var/spack/repos/builtin/packages/py-mock/package.py b/var/spack/repos/builtin/packages/py-mock/package.py
index 0587131b88..21edbd1dc0 100644
--- a/var/spack/repos/builtin/packages/py-mock/package.py
+++ b/var/spack/repos/builtin/packages/py-mock/package.py
@@ -24,7 +24,8 @@
##############################################################################
from spack import *
-class PyMock(Package):
+
+class PyMock(PythonPackage):
"""mock is a library for testing in Python. It allows you to replace parts
of your system under test with mock objects and make assertions about how
they have been used."""
@@ -34,9 +35,5 @@ class PyMock(Package):
version('1.3.0', '73ee8a4afb3ff4da1b4afa287f39fdeb')
- extends('python')
- depends_on('py-pbr')
- depends_on('py-setuptools@17.1:')
-
- def install(self, spec, prefix):
- python('setup.py', 'install', '--prefix=%s' % prefix)
+ depends_on('py-pbr', type=('build', 'run'))
+ depends_on('py-setuptools@17.1:', type='build')
diff --git a/var/spack/repos/builtin/packages/py-monotonic/package.py b/var/spack/repos/builtin/packages/py-monotonic/package.py
new file mode 100644
index 0000000000..b02f954ccc
--- /dev/null
+++ b/var/spack/repos/builtin/packages/py-monotonic/package.py
@@ -0,0 +1,36 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class PyMonotonic(PythonPackage):
+ """An implementation of time.monotonic() for Python 2 & < 3.3"""
+
+ homepage = "https://pypi.python.org/pypi/monotonic"
+ url = "https://pypi.io/packages/source/m/monotonic/monotonic-1.2.tar.gz"
+
+ version('1.2', 'd14c93aabc3d6af25ef086b032b123cf')
+
+ depends_on('py-setuptools', type='build')
diff --git a/var/spack/repos/builtin/packages/py-mpi4py/package.py b/var/spack/repos/builtin/packages/py-mpi4py/package.py
index 2764b8b3c6..7f8dc6b986 100644
--- a/var/spack/repos/builtin/packages/py-mpi4py/package.py
+++ b/var/spack/repos/builtin/packages/py-mpi4py/package.py
@@ -24,17 +24,19 @@
##############################################################################
from spack import *
-class PyMpi4py(Package):
- """This package provides Python bindings for the Message Passing Interface (MPI) standard. It is implemented on top of the MPI-1/MPI-2 specification and exposes an API which grounds on the standard MPI-2 C++ bindings."""
+
+class PyMpi4py(PythonPackage):
+ """This package provides Python bindings for the Message Passing
+ Interface (MPI) standard. It is implemented on top of the
+ MPI-1/MPI-2 specification and exposes an API which grounds on the
+ standard MPI-2 C++ bindings.
+
+ """
homepage = "https://pypi.python.org/pypi/mpi4py"
url = "https://pypi.python.org/packages/source/m/mpi4py/mpi4py-1.3.1.tar.gz"
version('2.0.0', '4f7d8126d7367c239fd67615680990e3')
version('1.3.1', 'dbe9d22bdc8ed965c23a7ceb6f32fc3c')
- extends('python')
- depends_on('py-setuptools')
+ depends_on('py-setuptools', type='build')
depends_on('mpi')
-
- def install(self, spec, prefix):
- python('setup.py', 'install', '--prefix=%s' % prefix)
diff --git a/var/spack/repos/builtin/packages/py-mpmath/package.py b/var/spack/repos/builtin/packages/py-mpmath/package.py
index 899ff053a9..d379e0bd03 100644
--- a/var/spack/repos/builtin/packages/py-mpmath/package.py
+++ b/var/spack/repos/builtin/packages/py-mpmath/package.py
@@ -24,14 +24,10 @@
##############################################################################
from spack import *
-class PyMpmath(Package):
+
+class PyMpmath(PythonPackage):
"""A Python library for arbitrary-precision floating-point arithmetic."""
homepage = "http://mpmath.org"
url = "https://pypi.python.org/packages/source/m/mpmath/mpmath-all-0.19.tar.gz"
version('0.19', 'd1b7e19dd6830d0d7b5e1bc93d46c02c')
-
- extends('python')
-
- def install(self, spec, prefix):
- python('setup.py', 'install', '--prefix=%s' % prefix)
diff --git a/var/spack/repos/builtin/packages/py-mx/package.py b/var/spack/repos/builtin/packages/py-mx/package.py
index d0f9f7cadf..9af74555b1 100644
--- a/var/spack/repos/builtin/packages/py-mx/package.py
+++ b/var/spack/repos/builtin/packages/py-mx/package.py
@@ -24,14 +24,15 @@
##############################################################################
from spack import *
-class PyMx(Package):
- """The eGenix.com mx Base Distribution for Python is a collection of professional quality software tools which enhance Python's usability in many important areas such as fast text searching, date/time processing and high speed data types."""
+
+class PyMx(PythonPackage):
+ """The eGenix.com mx Base Distribution for Python is a collection of
+ professional quality software tools which enhance Python's
+ usability in many important areas such as fast text searching,
+ date/time processing and high speed data types.
+
+ """
homepage = "http://www.egenix.com/products/python/mxBase/"
url = "https://downloads.egenix.com/python/egenix-mx-base-3.2.8.tar.gz"
version('3.2.8', '9d9d3a25f9dc051a15e97f452413423b')
-
- extends('python')
-
- def install(self, spec, prefix):
- python('setup.py', 'install', '--prefix=%s' % prefix)
diff --git a/var/spack/repos/builtin/packages/py-mysqldb1/package.py b/var/spack/repos/builtin/packages/py-mysqldb1/package.py
index 9918ba3e64..8fd794aadb 100644
--- a/var/spack/repos/builtin/packages/py-mysqldb1/package.py
+++ b/var/spack/repos/builtin/packages/py-mysqldb1/package.py
@@ -24,16 +24,15 @@
##############################################################################
from spack import *
-class PyMysqldb1(Package):
+
+class PyMysqldb1(PythonPackage):
"""Legacy mysql bindings for python"""
homepage = "https://github.com/farcepest/MySQLdb1"
url = "https://github.com/farcepest/MySQLdb1/archive/MySQLdb-1.2.5.tar.gz"
- version('1.2.5', '332c8f4955b6bc0c79ea15170bf7321b')
-
- extends('python')
- depends_on('py-setuptools')
+ version('1.2.5', '332c8f4955b6bc0c79ea15170bf7321b',
+ url="https://github.com/farcepest/MySQLdb1/archive/MySQLdb-1.2.5.tar.gz")
- def install(self, spec, prefix):
- python('setup.py', 'install', '--prefix=%s' % prefix)
+ # FIXME: Missing dependency on mysql
+ depends_on('py-setuptools', type='build')
diff --git a/var/spack/repos/builtin/packages/py-nbconvert/package.py b/var/spack/repos/builtin/packages/py-nbconvert/package.py
new file mode 100644
index 0000000000..0e221d355c
--- /dev/null
+++ b/var/spack/repos/builtin/packages/py-nbconvert/package.py
@@ -0,0 +1,55 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class PyNbconvert(PythonPackage):
+ """Jupyter Notebook Conversion"""
+
+ homepage = "https://github.com/jupyter/nbconvert"
+ url = "https://github.com/jupyter/nbconvert/archive/4.2.0.tar.gz"
+
+ version('4.2.0', '8bd88771cc00f575d5edcd0b5197f964')
+ version('4.1.0', '06655576713ba1ff7cece2b92760c187')
+ version('4.0.0', '9661620b1e10a7b46f314588d2d0932f')
+
+ depends_on('py-setuptools', type='build')
+ depends_on('py-pycurl', type='build')
+ depends_on('python@2.7:2.7.999,3.3:')
+ depends_on('py-mistune', type=('build', 'run'))
+ depends_on('py-jinja2', type=('build', 'run'))
+ depends_on('py-pygments', type=('build', 'run'))
+ depends_on('py-traitlets', type=('build', 'run'))
+ depends_on('py-jupyter-core', type=('build', 'run'))
+ depends_on('py-nbformat', type=('build', 'run'))
+ depends_on('py-entrypoints', type=('build', 'run'))
+ depends_on('py-tornado', type=('build', 'run'))
+ depends_on('py-jupyter-client', type=('build', 'run'))
+
+ # FIXME:
+ # Failed, try again after installing PycURL with `pip install pycurl` to avoid outdated SSL. # noqa
+ # Failed to download css from https://cdn.jupyter.org/notebook/4.1.0/style/style.min.css: [Errno socket error] [SSL: CERTIFICATE_VERIFY_FAILED] certificate verify failed (_ssl.c:661) # noqa
+ # Downloading CSS: https://cdn.jupyter.org/notebook/4.1.0/style/style.min.css # noqa
+ # error: Need Notebook CSS to proceed: nbconvert/resources/style.min.css
diff --git a/var/spack/repos/builtin/packages/py-nbformat/package.py b/var/spack/repos/builtin/packages/py-nbformat/package.py
new file mode 100644
index 0000000000..4ecf7f8fcf
--- /dev/null
+++ b/var/spack/repos/builtin/packages/py-nbformat/package.py
@@ -0,0 +1,42 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class PyNbformat(PythonPackage):
+ """The Jupyter Notebook format"""
+
+ homepage = "https://github.com/jupyter/nbformat"
+ url = "https://github.com/jupyter/nbformat/archive/4.1.0.tar.gz"
+
+ version('4.1.0', '826b4fc4ec42553b20144f53b57b4e7b')
+ version('4.0.1', 'ab7172e517c9d561c0c01eef5631b4c8')
+ version('4.0.0', '7cf61359fa4e9cf3ef5e969e2fcb909e')
+
+ depends_on('py-setuptools', type='build')
+ depends_on('py-ipython-genutils', type=('build', 'run'))
+ depends_on('py-traitlets', type=('build', 'run'))
+ depends_on('py-jsonschema', type=('build', 'run'))
+ depends_on('py-jupyter-core', type=('build', 'run'))
diff --git a/var/spack/repos/builtin/packages/py-nestle/package.py b/var/spack/repos/builtin/packages/py-nestle/package.py
new file mode 100644
index 0000000000..22dc9debe1
--- /dev/null
+++ b/var/spack/repos/builtin/packages/py-nestle/package.py
@@ -0,0 +1,40 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class PyNestle(PythonPackage):
+ """Nested sampling algorithms for evaluating Bayesian evidence."""
+
+ homepage = "http://kbarbary.github.io/nestle/"
+ url = "https://pypi.python.org/packages/source/n/nestle/nestle-0.1.1.tar.gz"
+
+ version('0.1.1', '4875c0f9a0a8e263c1d7f5fa6ce604c5')
+
+ # Required dependencies
+ depends_on('py-numpy', type=('build', 'run'))
+
+ # Optional dependencies
+ depends_on('py-scipy', type=('build', 'run'))
diff --git a/var/spack/repos/builtin/packages/py-netcdf/package.py b/var/spack/repos/builtin/packages/py-netcdf/package.py
index e3f857a0ce..2d35320ca0 100644
--- a/var/spack/repos/builtin/packages/py-netcdf/package.py
+++ b/var/spack/repos/builtin/packages/py-netcdf/package.py
@@ -24,17 +24,15 @@
##############################################################################
from spack import *
-class PyNetcdf(Package):
+
+class PyNetcdf(PythonPackage):
"""Python interface to the netCDF Library."""
homepage = "http://unidata.github.io/netcdf4-python"
url = "https://github.com/Unidata/netcdf4-python/tarball/v1.2.3.1rel"
version('1.2.3.1', '4fc4320d4f2a77b894ebf8da1c9895af')
- extends('python')
- depends_on('py-numpy')
- depends_on('py-cython')
+ depends_on('py-numpy', type=('build', 'run'))
+ depends_on('py-cython', type=('build', 'run'))
+ depends_on('py-setuptools', type=('build', 'run'))
depends_on('netcdf')
-
- def install(self, spec, prefix):
- python('setup.py', 'install', '--prefix=%s' % prefix)
diff --git a/var/spack/repos/builtin/packages/py-networkx/package.py b/var/spack/repos/builtin/packages/py-networkx/package.py
index 6bdb24e36e..6eca70c15c 100644
--- a/var/spack/repos/builtin/packages/py-networkx/package.py
+++ b/var/spack/repos/builtin/packages/py-networkx/package.py
@@ -24,16 +24,14 @@
##############################################################################
from spack import *
-class PyNetworkx(Package):
- """NetworkX is a Python package for the creation, manipulation, and study of the structure, dynamics, and functions of complex networks."""
+
+class PyNetworkx(PythonPackage):
+ """NetworkX is a Python package for the creation, manipulation, and study
+ of the structure, dynamics, and functions of complex networks."""
homepage = "http://networkx.github.io/"
url = "https://pypi.python.org/packages/source/n/networkx/networkx-1.11.tar.gz"
version('1.11', '6ef584a879e9163013e9a762e1cf7cd1')
- extends('python')
-
- depends_on('py-decorator')
-
- def install(self, spec, prefix):
- python('setup.py', 'install', '--prefix=%s' % prefix)
+ depends_on('py-decorator', type=('build', 'run'))
+ depends_on('py-setuptools', type='build')
diff --git a/var/spack/repos/builtin/packages/py-nose/package.py b/var/spack/repos/builtin/packages/py-nose/package.py
index abb2d500a2..050a018ffa 100644
--- a/var/spack/repos/builtin/packages/py-nose/package.py
+++ b/var/spack/repos/builtin/packages/py-nose/package.py
@@ -25,7 +25,7 @@
from spack import *
-class PyNose(Package):
+class PyNose(PythonPackage):
"""nose extends the test loading and running features of unittest,
making it easier to write, find and run tests."""
@@ -36,8 +36,4 @@ class PyNose(Package):
version('1.3.6', '0ca546d81ca8309080fc80cb389e7a16')
version('1.3.7', '4d3ad0ff07b61373d2cefc89c5d0b20b')
- extends('python', ignore=r'bin/nosetests.*$')
- depends_on('py-setuptools')
-
- def install(self, spec, prefix):
- python('setup.py', 'install', '--prefix=%s' % prefix)
+ depends_on('py-setuptools', type='build')
diff --git a/var/spack/repos/builtin/packages/py-numexpr/package.py b/var/spack/repos/builtin/packages/py-numexpr/package.py
index d8bacd179f..ee89820f5b 100644
--- a/var/spack/repos/builtin/packages/py-numexpr/package.py
+++ b/var/spack/repos/builtin/packages/py-numexpr/package.py
@@ -23,18 +23,18 @@
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
from spack import *
-import re
-class PyNumexpr(Package):
+
+class PyNumexpr(PythonPackage):
"""Fast numerical expression evaluator for NumPy"""
homepage = "https://pypi.python.org/pypi/numexpr"
url = "https://pypi.python.org/packages/source/n/numexpr/numexpr-2.4.6.tar.gz"
+ version('2.6.1', '6365245705b446426df9543ad218dd8e',
+ url="https://pypi.python.org/packages/c6/f0/11628fa4d332d8fe9ab0ba8e9bfe0e065fb6b5324859171ee72d84e079c0/numexpr-2.6.1.tar.gz")
+ version('2.5', '84f66cced45ba3e30dcf77a937763aaa')
version('2.4.6', '17ac6fafc9ea1ce3eb970b9abccb4fbd')
- version('2.5', '84f66cced45ba3e30dcf77a937763aaa')
-
- extends('python')
- depends_on('py-numpy')
- def install(self, spec, prefix):
- python('setup.py', 'install', '--prefix=%s' % prefix)
+ depends_on('python@2.6:')
+ depends_on('py-numpy@1.6:', type=('build', 'run'))
+ depends_on('py-setuptools', type='build')
diff --git a/var/spack/repos/builtin/packages/py-numpy/package.py b/var/spack/repos/builtin/packages/py-numpy/package.py
index e9672b7bb0..25a5f04b8c 100644
--- a/var/spack/repos/builtin/packages/py-numpy/package.py
+++ b/var/spack/repos/builtin/packages/py-numpy/package.py
@@ -23,46 +23,62 @@
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
from spack import *
+import platform
-class PyNumpy(Package):
+
+class PyNumpy(PythonPackage):
"""NumPy is the fundamental package for scientific computing with Python.
It contains among other things: a powerful N-dimensional array object,
sophisticated (broadcasting) functions, tools for integrating C/C++ and
Fortran code, and useful linear algebra, Fourier transform, and random
number capabilities"""
+
homepage = "http://www.numpy.org/"
- url = "https://pypi.python.org/packages/source/n/numpy/numpy-1.9.1.tar.gz"
+ url = "https://pypi.io/packages/source/n/numpy/numpy-1.9.1.tar.gz"
+ version('1.11.2', '03bd7927c314c43780271bf1ab795ebc')
+ version('1.11.1', '2f44a895a8104ffac140c3a70edbd450')
version('1.11.0', 'bc56fb9fc2895aa4961802ffbdb31d0b')
version('1.10.4', 'aed294de0aa1ac7bd3f9745f4f1968ad')
version('1.9.2', 'a1ed53432dbcd256398898d35bc8e645')
version('1.9.1', '78842b73560ec378142665e712ae4ad9')
-
variant('blas', default=True)
variant('lapack', default=True)
- extends('python')
- depends_on('py-nose')
+ depends_on('python@2.6:2.8,3.2:')
+ depends_on('py-nose', type='build')
+ depends_on('py-setuptools', type='build')
depends_on('blas', when='+blas')
depends_on('lapack', when='+lapack')
- def install(self, spec, prefix):
- libraries = []
- library_dirs = []
+ def setup_dependent_package(self, module, dep_spec):
+ python_version = self.spec['python'].version.up_to(2)
+ arch = '{0}-{1}'.format(platform.system().lower(), platform.machine())
- if '+blas' in spec:
- libraries.append('blas')
- library_dirs.append(spec['blas'].prefix.lib)
+ self.spec.include = join_path(
+ self.prefix.lib,
+ 'python{0}'.format(python_version),
+ 'site-packages',
+ 'numpy-{0}-py{1}-{2}.egg'.format(
+ self.spec.version, python_version, arch),
+ 'numpy/core/include')
+
+ def patch(self):
+ spec = self.spec
+ # for build notes see http://www.scipy.org/scipylib/building/linux.html
+ lapackblas = LibraryList('')
if '+lapack' in spec:
- libraries.append('lapack')
- library_dirs.append(spec['lapack'].prefix.lib)
+ lapackblas += spec['lapack'].lapack_libs
+
+ if '+blas' in spec:
+ lapackblas += spec['blas'].blas_libs
if '+blas' in spec or '+lapack' in spec:
with open('site.cfg', 'w') as f:
f.write('[DEFAULT]\n')
- f.write('libraries=%s\n' % ','.join(libraries))
- f.write('library_dirs=%s\n' % ':'.join(library_dirs))
-
- python('setup.py', 'install', '--prefix=%s' % prefix)
-
+ f.write('libraries=%s\n' % ','.join(lapackblas.names))
+ f.write('library_dirs=%s\n' % ':'.join(lapackblas.directories))
+ if not ((platform.system() == "Darwin") and
+ (platform.mac_ver()[0] == '10.12')):
+ f.write('rpath=%s\n' % ':'.join(lapackblas.directories))
diff --git a/var/spack/repos/builtin/packages/py-openpyxl/package.py b/var/spack/repos/builtin/packages/py-openpyxl/package.py
index 30cb52348f..d396916035 100644
--- a/var/spack/repos/builtin/packages/py-openpyxl/package.py
+++ b/var/spack/repos/builtin/packages/py-openpyxl/package.py
@@ -24,7 +24,8 @@
##############################################################################
from spack import *
-class PyOpenpyxl(Package):
+
+class PyOpenpyxl(PythonPackage):
"""A Python library to read/write Excel 2007 xlsx/xlsm files"""
homepage = 'http://openpyxl.readthedocs.org/'
@@ -32,10 +33,5 @@ class PyOpenpyxl(Package):
version('2.4.0-a1', 'e5ca6d23ceccb15115d45cdf26e736fc')
- extends('python')
-
- depends_on('py-jdcal')
- depends_on('py-setuptools')
-
- def install(self, spec, prefix):
- python('setup.py', 'install', '--prefix=%s' % prefix)
+ depends_on('py-jdcal', type=('build', 'run'))
+ depends_on('py-setuptools', type='build')
diff --git a/var/spack/repos/builtin/packages/py-ordereddict/package.py b/var/spack/repos/builtin/packages/py-ordereddict/package.py
new file mode 100644
index 0000000000..b560990f00
--- /dev/null
+++ b/var/spack/repos/builtin/packages/py-ordereddict/package.py
@@ -0,0 +1,35 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class PyOrdereddict(PythonPackage):
+ """A drop-in substitute for Py2.7's new collections.
+ OrderedDict that works in Python 2.4-2.6."""
+
+ homepage = "https://pypi.python.org/pypi/ordereddict"
+ url = "https://pypi.python.org/packages/source/o/ordereddict/ordereddict-1.1.tar.gz"
+
+ version('1.1', 'a0ed854ee442051b249bfad0f638bbec')
diff --git a/var/spack/repos/builtin/packages/py-pandas/package.py b/var/spack/repos/builtin/packages/py-pandas/package.py
index 7f54fc5d76..c0da33054f 100644
--- a/var/spack/repos/builtin/packages/py-pandas/package.py
+++ b/var/spack/repos/builtin/packages/py-pandas/package.py
@@ -23,10 +23,18 @@
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
from spack import *
-import os
-class PyPandas(Package):
- """pandas is a Python package providing fast, flexible, and expressive data structures designed to make working with relational or labeled data both easy and intuitive. It aims to be the fundamental high-level building block for doing practical, real world data analysis in Python. Additionally, it has the broader goal of becoming the most powerful and flexible open source data analysis / manipulation tool available in any language."""
+
+class PyPandas(PythonPackage):
+ """pandas is a Python package providing fast, flexible, and expressive
+ data structures designed to make working with relational or
+ labeled data both easy and intuitive. It aims to be the
+ fundamental high-level building block for doing practical, real
+ world data analysis in Python. Additionally, it has the broader
+ goal of becoming the most powerful and flexible open source data
+ analysis / manipulation tool available in any language.
+
+ """
homepage = "http://pandas.pydata.org/"
url = "https://pypi.python.org/packages/source/p/pandas/pandas-0.16.0.tar.gz#md5=bfe311f05dc0c351f8955fbd1e296e73"
@@ -34,13 +42,9 @@ class PyPandas(Package):
version('0.16.1', 'fac4f25748f9610a3e00e765474bdea8')
version('0.18.0', 'f143762cd7a59815e348adf4308d2cf6')
- extends('python')
- depends_on('py-dateutil')
- depends_on('py-numpy')
- depends_on('py-setuptools')
- depends_on('py-pytz')
- depends_on('py-numexpr')
- depends_on('py-bottleneck')
-
- def install(self, spec, prefix):
- python('setup.py', 'install', '--prefix=%s' % prefix)
+ depends_on('py-dateutil', type=('build', 'run'))
+ depends_on('py-numpy', type=('build', 'run'))
+ depends_on('py-setuptools', type='build')
+ depends_on('py-pytz', type=('build', 'run'))
+ depends_on('py-numexpr', type=('build', 'run'))
+ depends_on('py-bottleneck', type=('build', 'run'))
diff --git a/var/spack/repos/builtin/packages/py-pathlib2/package.py b/var/spack/repos/builtin/packages/py-pathlib2/package.py
new file mode 100644
index 0000000000..8951feff4c
--- /dev/null
+++ b/var/spack/repos/builtin/packages/py-pathlib2/package.py
@@ -0,0 +1,37 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class PyPathlib2(PythonPackage):
+ """Backport of pathlib from python 3.4"""
+
+ homepage = "https://pypi.python.org/pypi/pathlib2"
+ url = "https://pypi.io/packages/source/p/pathlib2/pathlib2-2.1.0.tar.gz"
+
+ version('2.1.0', '38e4f58b4d69dfcb9edb49a54a8b28d2')
+
+ depends_on('py-setuptools', type='build')
+ depends_on('python@:3.3.999')
diff --git a/var/spack/repos/builtin/packages/py-pathspec/package.py b/var/spack/repos/builtin/packages/py-pathspec/package.py
new file mode 100644
index 0000000000..e5030abc70
--- /dev/null
+++ b/var/spack/repos/builtin/packages/py-pathspec/package.py
@@ -0,0 +1,37 @@
+##############################################################################
+# Copyright (c) 2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class PyPathspec(PythonPackage):
+ """pathspec extends the test loading and running features of unittest,
+ making it easier to write, find and run tests."""
+
+ homepage = "https://pypi.python.org/pypi/pathspec"
+
+ version('0.3.4', '2a4af9bf2dee98845d583ec61a00d05d',
+ url='https://pypi.python.org/packages/14/9d/c9d790d373d6f6938d793e9c549b87ad8670b6fa7fc6176485e6ef11c1a4/pathspec-0.3.4.tar.gz')
+
+ depends_on('py-setuptools', type='build')
diff --git a/var/spack/repos/builtin/packages/py-pbr/package.py b/var/spack/repos/builtin/packages/py-pbr/package.py
index 336ea37144..2a320eb76d 100644
--- a/var/spack/repos/builtin/packages/py-pbr/package.py
+++ b/var/spack/repos/builtin/packages/py-pbr/package.py
@@ -23,20 +23,14 @@
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
from spack import *
-import os
-class PyPbr(Package):
- """PBR is a library that injects some useful and sensible default behaviors into your setuptools run."""
+
+class PyPbr(PythonPackage):
+ """PBR is a library that injects some useful and sensible default
+ behaviors into your setuptools run."""
homepage = "https://pypi.python.org/pypi/pbr"
url = "https://pypi.python.org/packages/source/p/pbr/pbr-1.8.1.tar.gz"
version('1.8.1', 'c8f9285e1a4ca6f9654c529b158baa3a')
- extends('python')
-
- depends_on('py-setuptools')
-
- def install(self, spec, prefix):
- python('setup.py', 'install', '--prefix=%s' % prefix)
-
-
+ depends_on('py-setuptools', type='build')
diff --git a/var/spack/repos/builtin/packages/py-periodictable/package.py b/var/spack/repos/builtin/packages/py-periodictable/package.py
index 7d8a635979..9ff1543018 100644
--- a/var/spack/repos/builtin/packages/py-periodictable/package.py
+++ b/var/spack/repos/builtin/packages/py-periodictable/package.py
@@ -24,7 +24,8 @@
##############################################################################
from spack import *
-class PyPeriodictable(Package):
+
+class PyPeriodictable(PythonPackage):
"""nose extends the test loading and running features of unittest,
making it easier to write, find and run tests."""
@@ -33,9 +34,5 @@ class PyPeriodictable(Package):
version('1.4.1', '7246b63cc0b6b1be6e86b6616f9e866e')
- depends_on('py-numpy')
- depends_on('py-pyparsing')
- extends('python')
-
- def install(self, spec, prefix):
- python('setup.py', 'install', '--prefix=%s' % prefix)
+ depends_on('py-numpy', type=('build', 'run'))
+ depends_on('py-pyparsing', type=('build', 'run'))
diff --git a/var/spack/repos/builtin/packages/py-pexpect/package.py b/var/spack/repos/builtin/packages/py-pexpect/package.py
index dd95af9643..5c194c44b6 100644
--- a/var/spack/repos/builtin/packages/py-pexpect/package.py
+++ b/var/spack/repos/builtin/packages/py-pexpect/package.py
@@ -24,14 +24,13 @@
##############################################################################
from spack import *
-class PyPexpect(Package):
+
+class PyPexpect(PythonPackage):
"""Pexpect allows easy control of interactive console applications."""
homepage = "https://pypi.python.org/pypi/pexpect"
- url = "https://pypi.python.org/packages/source/p/pexpect/pexpect-3.3.tar.gz"
+ url = "https://pypi.io/packages/source/p/pexpect/pexpect-4.2.1.tar.gz"
+ version('4.2.1', '3694410001a99dff83f0b500a1ca1c95')
version('3.3', '0de72541d3f1374b795472fed841dce8')
- extends('python')
-
- def install(self, spec, prefix):
- python('setup.py', 'install', '--prefix=%s' % prefix)
+ depends_on('py-ptyprocess', type=('build', 'run'))
diff --git a/var/spack/repos/builtin/packages/py-phonopy/package.py b/var/spack/repos/builtin/packages/py-phonopy/package.py
index 68774c90c2..b7f1003e28 100644
--- a/var/spack/repos/builtin/packages/py-phonopy/package.py
+++ b/var/spack/repos/builtin/packages/py-phonopy/package.py
@@ -24,7 +24,8 @@
##############################################################################
from spack import *
-class PyPhonopy(Package):
+
+class PyPhonopy(PythonPackage):
"""Phonopy is an open source package for phonon
calculations at harmonic and quasi-harmonic levels."""
homepage = "http://atztogo.github.io/phonopy/index.html"
@@ -32,11 +33,7 @@ class PyPhonopy(Package):
version('1.10.0', '973ed1bcea46e21b9bf747aab9061ff6')
- extends('python')
- depends_on('py-numpy')
- depends_on('py-scipy')
- depends_on('py-matplotlib')
- depends_on('py-pyyaml')
-
- def install(self, spec, prefix):
- python('setup.py', 'install', '--home=%s' % prefix)
+ depends_on('py-numpy', type=('build', 'run'))
+ depends_on('py-scipy', type=('build', 'run'))
+ depends_on('py-matplotlib', type=('build', 'run'))
+ depends_on('py-pyyaml', type=('build', 'run'))
diff --git a/var/spack/repos/builtin/packages/py-pickleshare/package.py b/var/spack/repos/builtin/packages/py-pickleshare/package.py
new file mode 100644
index 0000000000..9bf9ff63fb
--- /dev/null
+++ b/var/spack/repos/builtin/packages/py-pickleshare/package.py
@@ -0,0 +1,36 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class PyPickleshare(PythonPackage):
+ """Tiny 'shelve'-like database with concurrency support"""
+
+ homepage = "https://pypi.python.org/pypi/pickleshare"
+ url = "https://pypi.io/packages/source/p/pickleshare/pickleshare-0.7.4.tar.gz"
+
+ version('0.7.4', '6a9e5dd8dfc023031f6b7b3f824cab12')
+
+ depends_on('py-setuptools', type='build')
diff --git a/var/spack/repos/builtin/packages/py-pil/package.py b/var/spack/repos/builtin/packages/py-pil/package.py
index dc4418fdfe..fb14fb9b27 100644
--- a/var/spack/repos/builtin/packages/py-pil/package.py
+++ b/var/spack/repos/builtin/packages/py-pil/package.py
@@ -24,15 +24,19 @@
##############################################################################
from spack import *
-class PyPil(Package):
- """The Python Imaging Library (PIL) adds image processing capabilities to your Python interpreter. This library supports many file formats, and provides powerful image processing and graphics capabilities."""
+
+class PyPil(PythonPackage):
+ """The Python Imaging Library (PIL) adds image processing capabilities
+ to your Python interpreter. This library supports many file formats,
+ and provides powerful image processing and graphics capabilities."""
homepage = "http://www.pythonware.com/products/pil/"
url = "http://effbot.org/media/downloads/Imaging-1.1.7.tar.gz"
version('1.1.7', 'fc14a54e1ce02a0225be8854bfba478e')
- extends('python')
+ provides('pil')
- def install(self, spec, prefix):
- python('setup.py', 'install', '--prefix=%s' % prefix)
+ # py-pil currently only works with Python2.
+ # If you are using Python 3, try using py-pillow instead.
+ depends_on('python@1.5.2:2.8')
diff --git a/var/spack/repos/builtin/packages/py-pillow/package.py b/var/spack/repos/builtin/packages/py-pillow/package.py
index cdbc57e0a5..5729c5c3dd 100644
--- a/var/spack/repos/builtin/packages/py-pillow/package.py
+++ b/var/spack/repos/builtin/packages/py-pillow/package.py
@@ -23,77 +23,110 @@
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
from spack import *
-import os
+import sys
-class PyPillow(Package):
- """Pillow is the friendly PIL fork by Alex Clark and Contributors. PIL is the Python Imaging Library by Fredrik Lundh and Contributors. The Python Imaging Library (PIL) adds image processing capabilities to your Python interpreter. This library supports many file formats, and provides powerful image processing and graphics capabilities."""
- homepage = "https://python-pillow.github.io/"
- url = "https://pypi.python.org/packages/source/P/Pillow/Pillow-3.0.0.tar.gz"
+class PyPillow(PythonPackage):
+ """Pillow is a fork of the Python Imaging Library (PIL). It adds image
+ processing capabilities to your Python interpreter. This library supports
+ many file formats, and provides powerful image processing and graphics
+ capabilities."""
+ homepage = "https://python-pillow.org/"
+ url = "https://pypi.python.org/packages/source/P/Pillow/Pillow-3.0.0.tar.gz"
+
+ # TODO: This version should be deleted once the next release comes out.
+ # TODO: It fixes a bug that prevented us from linking to Tk/Tcl.
+ # TODO: Tk/Tcl support is necessary for tkinter bitmap and photo images.
+ # TODO: If you require this support, run the following command:
+ # TODO: `spack install py-pillow@3.3.0.dev0 ^python+tk`
+ version('3.3.0.dev0', git='https://github.com/python-pillow/Pillow.git',
+ commit='30eced62868141a6c859a4370efd40b9434a7c3f')
+
+ version('3.2.0', '7cfd093c11205d9e2ebe3c51dfcad510', preferred=True)
version('3.0.0', 'fc8ac44e93da09678eac7e30c9b7377d')
- provides('PIL')
+
+ provides('pil')
# These defaults correspond to Pillow defaults
variant('jpeg', default=True, description='Provide JPEG functionality')
variant('zlib', default=True, description='Access to compressed PNGs')
variant('tiff', default=False, description='Access to TIFF files')
variant('freetype', default=False, description='Font related services')
- variant('tk', default=False, description='Support for tkinter bitmap and photo images')
variant('lcms', default=False, description='Color management')
+ variant('jpeg2000', default=False,
+ description='Provide JPEG 2000 functionality')
# Spack does not (yet) support these modes of building
- # variant('webp', default=False, description='')
- # variant('webpmux', default=False, description='')
- # variant('jpeg2000', default=False, description='')
+ # variant('webp', default=False, description='Provide the WebP format')
+ # variant('webpmux', default=False,
+ # description='WebP metadata, relies on WebP support')
+ # variant('imagequant', default=False,
+ # description='Provide improved color quantization')
- extends('python')
- depends_on('binutils')
- depends_on('py-setuptools')
+ # Required dependencies
+ depends_on('binutils', type='build', when=sys.platform != 'darwin')
+ depends_on('py-setuptools', type='build')
- depends_on('jpeg', when='+jpeg') # BUG: It will use the system libjpeg anyway
+ # Recommended dependencies
+ depends_on('jpeg', when='+jpeg')
depends_on('zlib', when='+zlib')
- depends_on('tiff', when='+tiff')
+
+ # Optional dependencies
+ depends_on('libtiff', when='+tiff')
depends_on('freetype', when='+freetype')
depends_on('lcms', when='+lcms')
- depends_on('tcl', when='+tk')
- depends_on('tk', when='+tk')
+ depends_on('openjpeg', when='+jpeg2000')
+
+ # Spack does not (yet) support these modes of building
+ # depends_on('webp', when='+webp')
+ # depends_on('webpmux', when='+webpmux')
+ # depends_on('imagequant', when='+imagequant')
+
+ phases = ['build_ext', 'install']
+
+ def patch(self):
+ """Patch setup.py to provide lib and include directories
+ for dependencies."""
- def install(self, spec, prefix):
- libpath=[]
+ spec = self.spec
+ setup = FileFilter('setup.py')
if '+jpeg' in spec:
- libpath.append(join_path(spec['jpeg'].prefix, 'lib'))
+ setup.filter('JPEG_ROOT = None',
+ 'JPEG_ROOT = ("{0}", "{1}")'.format(
+ spec['jpeg'].prefix.lib,
+ spec['jpeg'].prefix.include))
if '+zlib' in spec:
- libpath.append(join_path(spec['zlib'].prefix, 'lib'))
+ setup.filter('ZLIB_ROOT = None',
+ 'ZLIB_ROOT = ("{0}", "{1}")'.format(
+ spec['zlib'].prefix.lib,
+ spec['zlib'].prefix.include))
if '+tiff' in spec:
- libpath.append(join_path(spec['tiff'].prefix, 'lib'))
+ setup.filter('TIFF_ROOT = None',
+ 'TIFF_ROOT = ("{0}", "{1}")'.format(
+ spec['libtiff'].prefix.lib,
+ spec['libtiff'].prefix.include))
if '+freetype' in spec:
- libpath.append(join_path(spec['freetype'].prefix, 'lib'))
+ setup.filter('FREETYPE_ROOT = None',
+ 'FREETYPE_ROOT = ("{0}", "{1}")'.format(
+ spec['freetype'].prefix.lib,
+ spec['freetype'].prefix.include))
if '+lcms' in spec:
- libpath.append(join_path(spec['lcms'].prefix, 'lib'))
-
- # This has not been tested, and likely needs some other treatment.
- #if '+tk' in spec:
- # libpath.append(join_path(spec['tcl'].prefix, 'lib'))
- # libpath.append(join_path(spec['tk'].prefix, 'lib'))
-
- # -------- Building
- cmd = ['build_ext',
- '--%s-jpeg' % ('enable' if '+jpeg' in spec else 'disable'),
- '--%s-zlib' % ('enable' if '+zlib' in spec else 'disable'),
- '--%s-tiff' % ('enable' if '+tiff' in spec else 'disable'),
- '--%s-freetype' % ('enable' if '+freetype' in spec else 'disable'),
- '--%s-lcms' % ('enable' if '+lcms' in spec else 'disable'),
- '-L'+':'.join(libpath) # NOTE: This does not make it find libjpeg
- ]
-
- #if '+tk' in spec:
- # cmd.extend(['--enable-tcl', '--enable-tk'])
- #else:
- # cmd.extend(['--disable-tcl', '--disable-tk'])
-
- # --------- Installation
- cmd.extend(['install', '--prefix=%s' % prefix])
-
- python('setup.py', *cmd)
+ setup.filter('LCMS_ROOT = None',
+ 'LCMS_ROOT = ("{0}", "{1}")'.format(
+ spec['lcms'].prefix.lib,
+ spec['lcms'].prefix.include))
+ if '+jpeg2000' in spec:
+ setup.filter('JPEG2K_ROOT = None',
+ 'JPEG2K_ROOT = ("{0}", "{1}")'.format(
+ spec['openjpeg'].prefix.lib,
+ spec['openjpeg'].prefix.include))
+
+ def build_ext_args(self, spec, prefix):
+ def variant_to_flag(variant):
+ able = 'enable' if '+{0}'.format(variant) in spec else 'disable'
+ return '--{0}-{1}'.format(able, variant)
+
+ variants = ['jpeg', 'zlib', 'tiff', 'freetype', 'lcms', 'jpeg2000']
+ return list(map(variant_to_flag, variants))
diff --git a/var/spack/repos/builtin/packages/py-pip/package.py b/var/spack/repos/builtin/packages/py-pip/package.py
new file mode 100644
index 0000000000..73e1661245
--- /dev/null
+++ b/var/spack/repos/builtin/packages/py-pip/package.py
@@ -0,0 +1,36 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class PyPip(PythonPackage):
+ """The PyPA recommended tool for installing Python packages."""
+
+ homepage = "https://pypi.python.org/pypi/pip"
+ url = "https://pypi.io/packages/source/p/pip/pip-9.0.1.tar.gz"
+
+ version('9.0.1', '35f01da33009719497f01a4ba69d63c9')
+
+ depends_on('py-setuptools', type='build')
diff --git a/var/spack/repos/builtin/packages/py-ply/package.py b/var/spack/repos/builtin/packages/py-ply/package.py
new file mode 100644
index 0000000000..f5a1e537e2
--- /dev/null
+++ b/var/spack/repos/builtin/packages/py-ply/package.py
@@ -0,0 +1,33 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class PyPly(PythonPackage):
+ """PLY is nothing more than a straightforward lex/yacc implementation."""
+ homepage = "http://www.dabeaz.com/ply"
+ url = "http://www.dabeaz.com/ply/ply-3.8.tar.gz"
+
+ version('3.8', '94726411496c52c87c2b9429b12d5c50')
diff --git a/var/spack/repos/builtin/packages/py-pmw/package.py b/var/spack/repos/builtin/packages/py-pmw/package.py
index 062bfe9c03..3293d94cd6 100644
--- a/var/spack/repos/builtin/packages/py-pmw/package.py
+++ b/var/spack/repos/builtin/packages/py-pmw/package.py
@@ -24,14 +24,11 @@
##############################################################################
from spack import *
-class PyPmw(Package):
- """Pmw is a toolkit for building high-level compound widgets, or megawidgets, constructed using other widgets as component parts."""
+
+class PyPmw(PythonPackage):
+ """Pmw is a toolkit for building high-level compound widgets, or
+ megawidgets, constructed using other widgets as component parts."""
homepage = "https://pypi.python.org/pypi/Pmw"
url = "https://pypi.python.org/packages/source/P/Pmw/Pmw-2.0.0.tar.gz"
version('2.0.0', 'c7c3f26c4f5abaa99807edefee578fc0')
-
- extends('python')
-
- def install(self, spec, prefix):
- python('setup.py', 'install', '--prefix=%s' % prefix)
diff --git a/var/spack/repos/builtin/packages/py-prettytable/package.py b/var/spack/repos/builtin/packages/py-prettytable/package.py
index e3edc7b706..2203f68af0 100644
--- a/var/spack/repos/builtin/packages/py-prettytable/package.py
+++ b/var/spack/repos/builtin/packages/py-prettytable/package.py
@@ -22,23 +22,18 @@
# License along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
-from spack import depends_on, extends, version
-from spack import Package
+from spack import *
-class PyPrettytable(Package):
- """
- PrettyTable is a simple Python library designed to make
+class PyPrettytable(PythonPackage):
+ """PrettyTable is a simple Python library designed to make
it quick and easy to represent tabular data in visually
- appealing ASCII tables
+ appealing ASCII tables.
"""
+
homepage = "https://code.google.com/archive/p/prettytable/"
url = "https://pypi.python.org/packages/e0/a1/36203205f77ccf98f3c6cf17cf068c972e6458d7e58509ca66da949ca347/prettytable-0.7.2.tar.gz"
version('0.7.2', 'a6b80afeef286ce66733d54a0296b13b')
- extends("python")
- depends_on("py-setuptools")
-
- def install(self, spec, prefix):
- python('setup.py', 'install', '--prefix=%s' % prefix)
+ depends_on("py-setuptools", type='build')
diff --git a/var/spack/repos/builtin/packages/py-proj/package.py b/var/spack/repos/builtin/packages/py-proj/package.py
new file mode 100644
index 0000000000..949aab88c3
--- /dev/null
+++ b/var/spack/repos/builtin/packages/py-proj/package.py
@@ -0,0 +1,46 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class PyProj(PythonPackage):
+ """Python interface to the PROJ.4 Library."""
+ homepage = "http://jswhit.github.io/pyproj/"
+ url = "https://github.com/jswhit/pyproj/tarball/v1.9.5.1rel"
+
+ # This is not a tagged release of pyproj.
+ # The changes in this "version" fix some bugs, especially with Python3 use.
+ version('1.9.5.1.1', 'd035e4bc704d136db79b43ab371b27d2',
+ url='https://www.github.com/jswhit/pyproj/tarball/0be612cc9f972e38b50a90c946a9b353e2ab140f')
+
+ version('1.9.5.1', 'a4b80d7170fc82aee363d7f980279835')
+
+ depends_on('py-cython', type='build')
+ depends_on('py-setuptools', type='build')
+
+ # NOTE: py-proj does NOT depends_on('proj').
+ # The py-proj git repo actually includes the correct version of PROJ.4,
+ # which is built internally as part of the py-proj build.
+ # Adding depends_on('proj') will cause mysterious build errors.
diff --git a/var/spack/repos/builtin/packages/py-prompt-toolkit/package.py b/var/spack/repos/builtin/packages/py-prompt-toolkit/package.py
new file mode 100644
index 0000000000..da48cb932f
--- /dev/null
+++ b/var/spack/repos/builtin/packages/py-prompt-toolkit/package.py
@@ -0,0 +1,38 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class PyPromptToolkit(PythonPackage):
+ """Library for building powerful interactive command lines in Python"""
+
+ homepage = "https://pypi.python.org/pypi/prompt_toolkit"
+ url = "https://pypi.io/packages/source/p/prompt_toolkit/prompt_toolkit-1.0.9.tar.gz"
+
+ version('1.0.9', 'a39f91a54308fb7446b1a421c11f227c')
+
+ depends_on('py-setuptools', type='build')
+ depends_on('py-six@1.9.0:', type=('build', 'run'))
+ depends_on('py-wcwidth', type=('build', 'run'))
diff --git a/var/spack/repos/builtin/packages/py-protobuf/package.py b/var/spack/repos/builtin/packages/py-protobuf/package.py
new file mode 100644
index 0000000000..4cdb3801a5
--- /dev/null
+++ b/var/spack/repos/builtin/packages/py-protobuf/package.py
@@ -0,0 +1,45 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class PyProtobuf(PythonPackage):
+ """Protocol buffers are Google's language-neutral, platform-neutral,
+ extensible mechanism for serializing structured data - think XML, but
+ smaller, faster, and simpler. You define how you want your data to be
+ structured once, then you can use special generated source code to easily
+ write and read your structured data to and from a variety of data streams
+ and using a variety of languages."""
+
+ homepage = 'https://developers.google.com/protocol-buffers/'
+ url = 'https://pypi.python.org/packages/source/p/protobuf/protobuf-3.0.0b2.tar.gz'
+
+ version('3.0.0b2', 'f0d3bd2394345a9af4a277cd0302ae83')
+ version('2.6.1', '6bf843912193f70073db7f22e2ea55e2')
+ version('2.5.0', '338813f3629d59e9579fed9035ecd457')
+ version('2.4.1', '72f5141d20ab1bcae6b1e00acfb1068a')
+ version('2.3.0', 'bb020c962f252fe81bfda8fb433bafdd')
+
+ depends_on('py-setuptools', type='build')
diff --git a/var/spack/repos/builtin/packages/py-psutil/package.py b/var/spack/repos/builtin/packages/py-psutil/package.py
new file mode 100644
index 0000000000..7aa776d2a1
--- /dev/null
+++ b/var/spack/repos/builtin/packages/py-psutil/package.py
@@ -0,0 +1,43 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class PyPsutil(Package):
+ """psutil is a cross-platform library for retrieving information on
+ running processes and system utilization (CPU, memory, disks, network)
+ in Python."""
+
+ homepage = "https://pypi.python.org/pypi/psutil"
+ url = "https://pypi.python.org/packages/d9/c8/8c7a2ab8ec108ba9ab9a4762c5a0d67c283d41b13b5ce46be81fdcae3656/psutil-5.0.1.tar.gz"
+
+ version('5.0.1', '153dc8be94badc4072016ceeac7808dc')
+
+ extends('python')
+ depends_on('python@2.6:')
+ depends_on('py-setuptools', type='build')
+
+ def install(self, spec, prefix):
+ setup_py('install', '--prefix=%s' % prefix)
diff --git a/var/spack/repos/builtin/packages/py-ptyprocess/package.py b/var/spack/repos/builtin/packages/py-ptyprocess/package.py
new file mode 100644
index 0000000000..2419793227
--- /dev/null
+++ b/var/spack/repos/builtin/packages/py-ptyprocess/package.py
@@ -0,0 +1,36 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class PyPtyprocess(PythonPackage):
+ """Run a subprocess in a pseudo terminal"""
+
+ homepage = "https://pypi.python.org/pypi/ptyprocess"
+ url = "https://pypi.io/packages/source/p/ptyprocess/ptyprocess-0.5.1.tar.gz"
+
+ version('0.5.1', '94e537122914cc9ec9c1eadcd36e73a1')
+
+ depends_on('py-setuptools', type='build')
diff --git a/var/spack/repos/builtin/packages/py-pudb/package.py b/var/spack/repos/builtin/packages/py-pudb/package.py
new file mode 100644
index 0000000000..66896bd83f
--- /dev/null
+++ b/var/spack/repos/builtin/packages/py-pudb/package.py
@@ -0,0 +1,39 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+
+from spack import *
+
+
+class PyPudb(PythonPackage):
+ """Full-screen console debugger for Python"""
+
+ homepage = "http://mathema.tician.de/software/pudb"
+ url = "https://pypi.io/packages/source/p/pudb/pudb-2016.2.tar.gz"
+
+ version('2016.2', '4573b70163329c1cb59836a357bfdf7c')
+
+ depends_on('py-setuptools', type='build')
+ depends_on('py-urwid@1.1.1:', type=('build', 'run'))
+ depends_on('py-pygments@1.0:', type=('build', 'run'))
diff --git a/var/spack/repos/builtin/packages/py-py/package.py b/var/spack/repos/builtin/packages/py-py/package.py
new file mode 100644
index 0000000000..5a963105bd
--- /dev/null
+++ b/var/spack/repos/builtin/packages/py-py/package.py
@@ -0,0 +1,36 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class PyPy(PythonPackage):
+ """library with cross-python path, ini-parsing, io, code, log facilities"""
+
+ homepage = "http://pylib.readthedocs.io/en/latest/"
+ url = "https://pypi.python.org/packages/source/p/py/py-1.4.31.tar.gz"
+
+ version('1.4.31', '5d2c63c56dc3f2115ec35c066ecd582b')
+
+ depends_on('py-setuptools', type='build')
diff --git a/var/spack/repos/builtin/packages/py-py2cairo/package.py b/var/spack/repos/builtin/packages/py-py2cairo/package.py
new file mode 100644
index 0000000000..bb404c61f0
--- /dev/null
+++ b/var/spack/repos/builtin/packages/py-py2cairo/package.py
@@ -0,0 +1,44 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class PyPy2cairo(Package):
+ """Pycairo is a set of Python bindings for the cairo graphics library."""
+
+ homepage = "https://www.cairographics.org/pycairo/"
+ url = "https://cairographics.org/releases/py2cairo-1.10.0.tar.bz2"
+
+ version('1.10.0', '20337132c4ab06c1146ad384d55372c5')
+
+ extends('python')
+
+ depends_on('cairo+X')
+ depends_on('pixman')
+
+ def install(self, spec, prefix):
+ python('waf', 'configure', '--prefix={0}'.format(prefix))
+ python('waf', 'build')
+ python('waf', 'install')
diff --git a/var/spack/repos/builtin/packages/py-py2neo/package.py b/var/spack/repos/builtin/packages/py-py2neo/package.py
index 9c6e219264..1db080ac97 100644
--- a/var/spack/repos/builtin/packages/py-py2neo/package.py
+++ b/var/spack/repos/builtin/packages/py-py2neo/package.py
@@ -22,14 +22,14 @@
# License along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
-from spack import depends_on, extends, version
-from spack import Package
+from spack import *
-class PyPy2neo(Package):
- """FIXME: put a proper description of your package here."""
- # FIXME: add a proper url for your package's homepage here.
- homepage = "http://www.example.com"
+class PyPy2neo(PythonPackage):
+ """Py2neo is a client library and toolkit for working with Neo4j from
+ within Python applications and from the command line."""
+
+ homepage = "http://py2neo.org/"
url = "https://github.com/nigelsmall/py2neo/archive/py2neo-2.0.8.tar.gz"
version('2.0.8', 'e3ec5172a9e006515ef4155688a05a55')
@@ -38,9 +38,4 @@ class PyPy2neo(Package):
version('2.0.5', '143b1f9c0aa22faf170c1b9f84c7343b')
version('2.0.4', 'b3f7efd3344dc3f66db4eda11e5899f7')
- depends_on("py-setuptools")
- extends("python")
-
- def install(self, spec, prefix):
- python('setup.py', 'install', '--prefix=%s' % prefix)
-
+ depends_on("py-setuptools", type='build')
diff --git a/var/spack/repos/builtin/packages/py-pychecker/package.py b/var/spack/repos/builtin/packages/py-pychecker/package.py
index 137c5f491c..de09b380c9 100644
--- a/var/spack/repos/builtin/packages/py-pychecker/package.py
+++ b/var/spack/repos/builtin/packages/py-pychecker/package.py
@@ -24,14 +24,10 @@
##############################################################################
from spack import *
-class PyPychecker(Package):
- """"""
+
+class PyPychecker(PythonPackage):
+ """Python source code checking tool."""
homepage = "http://pychecker.sourceforge.net/"
url = "http://sourceforge.net/projects/pychecker/files/pychecker/0.8.19/pychecker-0.8.19.tar.gz"
version('0.8.19', 'c37182863dfb09209d6ba4f38fce9d2b')
-
- extends('python')
-
- def install(self, spec, prefix):
- python('setup.py', 'install', '--prefix=%s' % prefix)
diff --git a/var/spack/repos/builtin/packages/py-pycodestyle/package.py b/var/spack/repos/builtin/packages/py-pycodestyle/package.py
new file mode 100644
index 0000000000..3e668a2704
--- /dev/null
+++ b/var/spack/repos/builtin/packages/py-pycodestyle/package.py
@@ -0,0 +1,47 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class PyPycodestyle(PythonPackage):
+ """pycodestyle is a tool to check your Python code against some of the
+ style conventions in PEP 8. Note: formerly called pep8."""
+
+ homepage = "https://github.com/PyCQA/pycodestyle"
+ url = "https://github.com/PyCQA/pycodestyle/archive/2.0.0.tar.gz"
+
+ version('2.0.0', '5c3e90001f538bf3b7896d60e92eb6f6')
+ version('1.7.0', '31070a3a6391928893cbf5fa523eb8d9')
+ version('1.6.2', '8df18246d82ddd3d19ffe7518f983955')
+ version('1.6.1', '9d59bdc7c60f46f7cee86c732e28aa1a')
+ version('1.6', '340fa7e39bb44fb08db6eddf7cdc880a')
+ version('1.5.7', '6d0f5fc7d95755999bc9275cad5cbf3e')
+ version('1.5.6', 'c5c30e3d267b48bf3dfe7568e803a813')
+ version('1.5.5', 'cfa12df9b86b3a1dfb13aced1927e12f')
+ version('1.5.4', '3977a760829652543544074c684610ee')
+
+ # Most Python packages only require py-setuptools as a build dependency.
+ # However, py-pycodestyle requires py-setuptools during runtime as well.
+ depends_on('py-setuptools', type=('build', 'run'))
diff --git a/var/spack/repos/builtin/packages/py-pycparser/package.py b/var/spack/repos/builtin/packages/py-pycparser/package.py
index c33769c526..8de5c39d32 100644
--- a/var/spack/repos/builtin/packages/py-pycparser/package.py
+++ b/var/spack/repos/builtin/packages/py-pycparser/package.py
@@ -24,16 +24,12 @@
##############################################################################
from spack import *
-class PyPycparser(Package):
- """pycparser is a complete parser of the C language, written in pure python"""
+
+class PyPycparser(PythonPackage):
+ """A complete parser of the C language, written in pure python."""
homepage = "https://github.com/eliben/pycparser"
url = "https://pypi.python.org/packages/source/p/pycparser/pycparser-2.13.tar.gz"
version('2.13', 'e4fe1a2d341b22e25da0d22f034ef32f')
-
- extends('python')
- depends_on('py-setuptools')
-
- def install(self, spec, prefix):
- python('setup.py', 'install', '--prefix=%s' % prefix)
+ depends_on('py-setuptools', type='build')
diff --git a/var/spack/repos/builtin/packages/py-pycurl/package.py b/var/spack/repos/builtin/packages/py-pycurl/package.py
new file mode 100644
index 0000000000..81a2a35064
--- /dev/null
+++ b/var/spack/repos/builtin/packages/py-pycurl/package.py
@@ -0,0 +1,39 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class PyPycurl(PythonPackage):
+ """PycURL is a Python interface to libcurl. PycURL can be used to fetch
+ objects identified by a URL from a Python program."""
+
+ homepage = "http://pycurl.io/"
+ url = "https://pypi.python.org/packages/source/p/pycurl/pycurl-7.43.0.tar.gz"
+
+ version('7.43.0', 'c94bdba01da6004fa38325e9bd6b9760')
+
+ depends_on('python@2.6:')
+ depends_on('py-setuptools', type='build')
+ depends_on('curl@7.19.0:')
diff --git a/var/spack/repos/builtin/packages/py-pydatalog/package.py b/var/spack/repos/builtin/packages/py-pydatalog/package.py
new file mode 100644
index 0000000000..600a34cdfc
--- /dev/null
+++ b/var/spack/repos/builtin/packages/py-pydatalog/package.py
@@ -0,0 +1,33 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class PyPydatalog(PythonPackage):
+ """pyDatalog adds logic programming to Python."""
+ homepage = 'https://pypi.python.org/pypi/pyDatalog/'
+ url = 'https://pypi.python.org/packages/09/0b/2670eb9c0027aacfb5b5024ca75e5fee2f1261180ab8797108ffc941158a/pyDatalog-0.17.1.zip'
+
+ version('0.17.1', '6b2682301200068d208d6f2d01723939')
diff --git a/var/spack/repos/builtin/packages/py-pyelftools/package.py b/var/spack/repos/builtin/packages/py-pyelftools/package.py
index 0c4a7a36cc..d586f14f0d 100644
--- a/var/spack/repos/builtin/packages/py-pyelftools/package.py
+++ b/var/spack/repos/builtin/packages/py-pyelftools/package.py
@@ -24,14 +24,11 @@
##############################################################################
from spack import *
-class PyPyelftools(Package):
- """A pure-Python library for parsing and analyzing ELF files and DWARF debugging information"""
+
+class PyPyelftools(PythonPackage):
+ """A pure-Python library for parsing and analyzing ELF files and DWARF
+ debugging information"""
homepage = "https://pypi.python.org/pypi/pyelftools"
url = "https://pypi.python.org/packages/source/p/pyelftools/pyelftools-0.23.tar.gz"
version('0.23', 'aa7cefa8bd2f63d7b017440c9084f310')
-
- extends('python')
-
- def install(self, spec, prefix):
- python('setup.py', 'install', '--prefix=%s' % prefix)
diff --git a/var/spack/repos/builtin/packages/py-pyflakes/package.py b/var/spack/repos/builtin/packages/py-pyflakes/package.py
new file mode 100644
index 0000000000..53674bb6e4
--- /dev/null
+++ b/var/spack/repos/builtin/packages/py-pyflakes/package.py
@@ -0,0 +1,47 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class PyPyflakes(PythonPackage):
+ """A simple program which checks Python source files for errors."""
+
+ homepage = "https://github.com/PyCQA/pyflakes"
+ url = "https://github.com/PyCQA/pyflakes/archive/1.3.0.tar.gz"
+
+ version('1.3.0', 'a76173deb7a84fe860c0b60e2fbcdfe2')
+ version('1.2.3', '2ac2e148a5c46b6bb06c4785be76f7cc')
+ version('1.2.2', 'fe759b9381a6500e67a2ddbbeb5161a4')
+ version('1.2.1', '444a06b256e0a70e41c11698b7190e84')
+ version('1.2.0', '5d1c87bf09696c4c35dc3103f2a1185c')
+ version('1.1.0', '4e18bf78c0455ebcd41e5d6104392c88')
+ version('1.0.0', 'e2ea22a825c5100f12e54b71771cde71')
+ version('0.9.2', 'd02d5f68e944085fd6ec163a34737a96')
+ version('0.9.1', '8108d2248e93ca6a315fa2dd31ee9bb1')
+ version('0.9.0', '43c2bcee88606bde55dbf25a253ef886')
+
+ # Most Python packages only require py-setuptools as a build dependency.
+ # However, py-pyflakes requires py-setuptools during runtime as well.
+ depends_on('py-setuptools', type=('build', 'run'))
diff --git a/var/spack/repos/builtin/packages/py-pygments/package.py b/var/spack/repos/builtin/packages/py-pygments/package.py
index fedf3d068d..42e3366cdf 100644
--- a/var/spack/repos/builtin/packages/py-pygments/package.py
+++ b/var/spack/repos/builtin/packages/py-pygments/package.py
@@ -24,16 +24,15 @@
##############################################################################
from spack import *
-class PyPygments(Package):
+
+class PyPygments(PythonPackage):
"""Pygments is a syntax highlighting package written in Python."""
+
homepage = "https://pypi.python.org/pypi/pygments"
url = "https://pypi.python.org/packages/source/P/Pygments/Pygments-2.0.1.tar.gz"
+ version('2.1.3', 'ed3fba2467c8afcda4d317e4ef2c6150')
version('2.0.1', 'e0daf4c14a4fe5b630da765904de4d6c')
version('2.0.2', '238587a1370d62405edabd0794b3ec4a')
- extends('python')
- depends_on('py-setuptools')
-
- def install(self, spec, prefix):
- python('setup.py', 'install', '--prefix=%s' % prefix)
+ depends_on('py-setuptools', type='build')
diff --git a/var/spack/repos/builtin/packages/py-pygobject/package.py b/var/spack/repos/builtin/packages/py-pygobject/package.py
new file mode 100644
index 0000000000..1dae1a128b
--- /dev/null
+++ b/var/spack/repos/builtin/packages/py-pygobject/package.py
@@ -0,0 +1,48 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class PyPygobject(AutotoolsPackage):
+ """bindings for the GLib, and GObject,
+ to be used in Python."""
+
+ homepage = "https://pypi.python.org/pypi/pygobject"
+
+ # FIXME: This URL is no longer available for download from PyPi
+ url = "https://pypi.python.org/packages/6d/15/97c8b5ccca2be14cf59a2f79e15e3a82a1c3408a6b76b4107689a8b94846/pygobject-2.28.3.tar.bz2"
+
+ version('2.28.3', 'aa64900b274c4661a5c32e52922977f9')
+
+ extends('python')
+ depends_on("libffi")
+ depends_on('glib')
+ depends_on('py-py2cairo')
+ depends_on('gobject-introspection')
+
+ patch('pygobject-2.28.6-introspection-1.patch')
+
+ def install(self, spec, prefix):
+ make('install', parallel=False)
diff --git a/var/spack/repos/builtin/packages/py-pygobject/pygobject-2.28.6-introspection-1.patch b/var/spack/repos/builtin/packages/py-pygobject/pygobject-2.28.6-introspection-1.patch
new file mode 100644
index 0000000000..ef96ba3352
--- /dev/null
+++ b/var/spack/repos/builtin/packages/py-pygobject/pygobject-2.28.6-introspection-1.patch
@@ -0,0 +1,35 @@
+Submitted By: Andrew Benton <andy@benton.eu.com>
+Date: 2012-03-29
+Initial Package Version: 2.28.6
+Upstream Status: not submitted
+Origin: me
+Description: Fixes compiling with a recent version of gobject-introspection
+
+--- pygobject-2.28.6/gi/pygi-info.c-orig 2012-03-29 02:27:37.494228732 +0100
++++ pygobject-2.28.6/gi/pygi-info.c 2012-03-29 02:26:37.735132310 +0100
+@@ -162,9 +162,6 @@
+ case GI_INFO_TYPE_CONSTANT:
+ type = &PyGIConstantInfo_Type;
+ break;
+- case GI_INFO_TYPE_ERROR_DOMAIN:
+- type = &PyGIErrorDomainInfo_Type;
+- break;
+ case GI_INFO_TYPE_UNION:
+ type = &PyGIUnionInfo_Type;
+ break;
+@@ -481,7 +478,6 @@
+ case GI_INFO_TYPE_INVALID:
+ case GI_INFO_TYPE_FUNCTION:
+ case GI_INFO_TYPE_CONSTANT:
+- case GI_INFO_TYPE_ERROR_DOMAIN:
+ case GI_INFO_TYPE_VALUE:
+ case GI_INFO_TYPE_SIGNAL:
+ case GI_INFO_TYPE_PROPERTY:
+@@ -860,7 +856,6 @@
+ case GI_INFO_TYPE_INVALID:
+ case GI_INFO_TYPE_FUNCTION:
+ case GI_INFO_TYPE_CONSTANT:
+- case GI_INFO_TYPE_ERROR_DOMAIN:
+ case GI_INFO_TYPE_VALUE:
+ case GI_INFO_TYPE_SIGNAL:
+ case GI_INFO_TYPE_PROPERTY:
diff --git a/var/spack/repos/builtin/packages/py-pygtk/package.py b/var/spack/repos/builtin/packages/py-pygtk/package.py
new file mode 100644
index 0000000000..56e0b39fd5
--- /dev/null
+++ b/var/spack/repos/builtin/packages/py-pygtk/package.py
@@ -0,0 +1,44 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class PyPygtk(AutotoolsPackage):
+ """bindings for the Gtk in Python"""
+ homepage = "http://www.pygtk.org/"
+ url = "http://ftp.gnome.org/pub/GNOME/sources/pygtk/2.24/pygtk-2.24.0.tar.gz"
+
+ version('2.24.0', 'd27c7f245a9e027f6b6cd9acb7468e36')
+
+ extends('python')
+ depends_on("libffi")
+ depends_on('cairo')
+ depends_on('glib')
+ depends_on('gtkplus')
+ depends_on('py-pygobject')
+ depends_on('py-py2cairo')
+
+ def install(self, spec, prefix):
+ make('install', parallel=False)
diff --git a/var/spack/repos/builtin/packages/py-pylint/package.py b/var/spack/repos/builtin/packages/py-pylint/package.py
index 66118e8228..84830c9bcf 100644
--- a/var/spack/repos/builtin/packages/py-pylint/package.py
+++ b/var/spack/repos/builtin/packages/py-pylint/package.py
@@ -23,9 +23,9 @@
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
from spack import *
-import re
-class PyPylint(Package):
+
+class PyPylint(PythonPackage):
"""array processing for numbers, strings, records, and objects."""
homepage = "https://pypi.python.org/pypi/pylint"
url = "https://pypi.python.org/packages/source/p/pylint/pylint-1.4.1.tar.gz"
@@ -33,9 +33,9 @@ class PyPylint(Package):
version('1.4.1', 'df7c679bdcce5019389038847e4de622')
version('1.4.3', '5924c1c7ca5ca23647812f5971d0ea44')
- extends('python')
- depends_on('py-nose')
- depends_on('py-setuptools')
-
- def install(self, spec, prefix):
- python('setup.py', 'install', '--prefix=%s' % prefix)
+ extends('python', ignore=r'bin/pytest')
+ depends_on('py-six', type=('build', 'run'))
+ depends_on('py-astroid', type=('build', 'run'))
+ depends_on('py-logilab-common', type=('build', 'run'))
+ depends_on('py-nose', type='build')
+ depends_on('py-setuptools', type='build')
diff --git a/var/spack/repos/builtin/packages/py-pypar/package.py b/var/spack/repos/builtin/packages/py-pypar/package.py
index f762789dea..f10b6d807f 100644
--- a/var/spack/repos/builtin/packages/py-pypar/package.py
+++ b/var/spack/repos/builtin/packages/py-pypar/package.py
@@ -24,15 +24,21 @@
##############################################################################
from spack import *
-class PyPypar(Package):
- """Pypar is an efficient but easy-to-use module that allows programs written in Python to run in parallel on multiple processors and communicate using MPI."""
+
+class PyPypar(PythonPackage):
+ """Pypar is an efficient but easy-to-use module that allows programs
+ written in Python to run in parallel on multiple processors and
+ communicate using MPI."""
homepage = "http://code.google.com/p/pypar/"
- url = "https://pypar.googlecode.com/files/pypar-2.1.5_108.tgz"
+ url = "https://storage.googleapis.com/google-code-archive-downloads/v2/code.google.com/pypar/pypar-2.1.5_108.tgz"
+
+ version('2.1.5_108', '7a1f28327d2a3b679f9455c843d850b8')
- version('2.1.5_108', '7a1f28327d2a3b679f9455c843d850b8', url='https://pypar.googlecode.com/files/pypar-2.1.5_108.tgz')
- extends('python')
depends_on('mpi')
+ depends_on('py-numpy', type=('build', 'run'))
+
+ def url_for_version(self, version):
+ return "https://storage.googleapis.com/google-code-archive-downloads/v2/code.google.com/pypar/pypar-%s.tgz" % version
- def install(self, spec, prefix):
- with working_dir('source'):
- python('setup.py', 'install', '--prefix=%s' % prefix)
+ def build_directory(self):
+ return 'source'
diff --git a/var/spack/repos/builtin/packages/py-pyparsing/package.py b/var/spack/repos/builtin/packages/py-pyparsing/package.py
index 0423aa3bdb..cb158a76d6 100644
--- a/var/spack/repos/builtin/packages/py-pyparsing/package.py
+++ b/var/spack/repos/builtin/packages/py-pyparsing/package.py
@@ -24,14 +24,10 @@
##############################################################################
from spack import *
-class PyPyparsing(Package):
+
+class PyPyparsing(PythonPackage):
"""A Python Parsing Module."""
homepage = "https://pypi.python.org/pypi/pyparsing"
url = "https://pypi.python.org/packages/source/p/pyparsing/pyparsing-2.0.3.tar.gz"
version('2.0.3', '0fe479be09fc2cf005f753d3acc35939')
-
- extends('python')
-
- def install(self, spec, prefix):
- python('setup.py', 'install', '--prefix=%s' % prefix)
diff --git a/var/spack/repos/builtin/packages/py-pyqt/package.py b/var/spack/repos/builtin/packages/py-pyqt/package.py
index b090e25682..e1e87880e0 100644
--- a/var/spack/repos/builtin/packages/py-pyqt/package.py
+++ b/var/spack/repos/builtin/packages/py-pyqt/package.py
@@ -24,6 +24,7 @@
##############################################################################
from spack import *
+
class PyPyqt(Package):
"""PyQt is a set of Python v2 and v3 bindings for Digia's Qt
application framework and runs on all platforms supported by Qt
@@ -34,11 +35,11 @@ class PyPyqt(Package):
version('4.11.3', '997c3e443165a89a559e0d96b061bf70')
extends('python')
- depends_on('py-sip')
+ depends_on('py-sip', type=('build', 'run'))
# TODO: allow qt5 when conditional deps are supported.
# TODO: Fix version matching so that @4 works like @:4
- depends_on('qt@:4')
+ depends_on('qt@:4+phonon+dbus')
def install(self, spec, prefix):
python('configure.py',
diff --git a/var/spack/repos/builtin/packages/py-pyside/package.py b/var/spack/repos/builtin/packages/py-pyside/package.py
index affee6c55a..961aef7864 100644
--- a/var/spack/repos/builtin/packages/py-pyside/package.py
+++ b/var/spack/repos/builtin/packages/py-pyside/package.py
@@ -25,26 +25,30 @@
from spack import *
import os
-class PyPyside(Package):
+
+class PyPyside(PythonPackage):
"""Python bindings for Qt."""
homepage = "https://pypi.python.org/pypi/pyside"
url = "https://pypi.python.org/packages/source/P/PySide/PySide-1.2.2.tar.gz"
- version('1.2.2', 'c45bc400c8a86d6b35f34c29e379e44d')
+ version('1.2.4', '3cb7174c13bd45e3e8f77638926cb8c0') # rpath problems
+ version('1.2.2', 'c45bc400c8a86d6b35f34c29e379e44d', preferred=True)
- # TODO: make build dependency
- # depends_on("cmake")
+ depends_on('cmake', type='build')
- extends('python')
- depends_on('py-setuptools')
- depends_on('qt@:4')
+ depends_on('py-setuptools', type='build')
+ depends_on('py-sphinx', type=('build', 'run'))
+ depends_on('qt@4.5:4.9')
+ depends_on('libxml2@2.6.32:')
+ depends_on('libxslt@1.1.19:')
def patch(self):
"""Undo PySide RPATH handling and add Spack RPATH."""
# Figure out the special RPATH
pypkg = self.spec['python'].package
rpath = self.rpath
- rpath.append(os.path.join(self.prefix, pypkg.site_packages_dir, 'PySide'))
+ rpath.append(os.path.join(
+ self.prefix, pypkg.site_packages_dir, 'PySide'))
# Add Spack's standard CMake args to the sub-builds.
# They're called BY setup.py so we have to patch it.
@@ -57,13 +61,23 @@ class PyPyside(Package):
# PySide tries to patch ELF files to remove RPATHs
# Disable this and go with the one we set.
- filter_file(
- r'^\s*rpath_cmd\(pyside_path, srcpath\)',
- r'#rpath_cmd(pyside_path, srcpath)',
- 'pyside_postinstall.py')
+ if self.spec.satisfies('@1.2.4:'):
+ rpath_file = 'setup.py'
+ else:
+ rpath_file = 'pyside_postinstall.py'
+
+ filter_file(r'(^\s*)(rpath_cmd\(.*\))', r'\1#\2', rpath_file)
+
+ # TODO: rpath handling for PySide 1.2.4 still doesn't work.
+ # PySide can't find the Shiboken library, even though it comes
+ # bundled with it and is installed in the same directory.
+ # PySide does not provide official support for
+ # Python 3.5, but it should work fine
+ filter_file("'Programming Language :: Python :: 3.4'",
+ "'Programming Language :: Python :: 3.4',\r\n "
+ "'Programming Language :: Python :: 3.5'",
+ "setup.py")
- def install(self, spec, prefix):
- python('setup.py', 'install',
- '--prefix=%s' % prefix,
- '--jobs=%s' % make_jobs)
+ def build_args(self, spec, prefix):
+ return ['--jobs={0}'.format(make_jobs)]
diff --git a/var/spack/repos/builtin/packages/py-pytables/package.py b/var/spack/repos/builtin/packages/py-pytables/package.py
index 47592fb3d1..3d9bfb2c2f 100644
--- a/var/spack/repos/builtin/packages/py-pytables/package.py
+++ b/var/spack/repos/builtin/packages/py-pytables/package.py
@@ -23,21 +23,24 @@
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
from spack import *
-import re
-class PyPytables(Package):
- """PyTables is a package for managing hierarchical datasets and designed to efficiently and easily cope with extremely large amounts of data."""
+
+class PyPytables(PythonPackage):
+ """PyTables is a package for managing hierarchical datasets and designed to
+ efficiently and easily cope with extremely large amounts of data."""
homepage = "http://www.pytables.org/"
url = "https://github.com/PyTables/PyTables/archive/v.3.2.2.tar.gz"
+ version('3.3.0', '056c161ae0fd2d6e585b766adacf3b0b',
+ url='https://github.com/PyTables/PyTables/archive/v3.3.0.tar.gz')
version('3.2.2', '7cbb0972e4d6580f629996a5bed92441')
- extends('python')
- depends_on('hdf5')
- depends_on('py-numpy')
- depends_on('py-numexpr')
- depends_on('py-cython')
+ depends_on('hdf5@1.8.0:1.8.999')
+ depends_on('py-numpy@1.8.0:', type=('build', 'run'))
+ depends_on('py-numexpr@2.5.2:', type=('build', 'run'))
+ depends_on('py-cython', type=('build', 'run'))
+ depends_on('py-six', type=('build', 'run'))
+ depends_on('py-setuptools', type='build')
- def install(self, spec, prefix):
- env["HDF5_DIR"] = spec['hdf5'].prefix
- python('setup.py', 'install', '--prefix=%s' % prefix)
+ def setup_environment(self, spack_env, run_env):
+ spack_env.set('HDF5_DIR', self.spec['hdf5'].prefix)
diff --git a/var/spack/repos/builtin/packages/py-pytest/package.py b/var/spack/repos/builtin/packages/py-pytest/package.py
new file mode 100644
index 0000000000..4af49306c2
--- /dev/null
+++ b/var/spack/repos/builtin/packages/py-pytest/package.py
@@ -0,0 +1,38 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class PyPytest(PythonPackage):
+ """pytest: simple powerful testing with Python."""
+
+ homepage = "http://doc.pytest.org/en/latest/"
+ url = "https://pypi.python.org/packages/source/p/pytest/pytest-3.0.2.tar.gz"
+
+ version('3.0.2', '61dc36e65a6f6c11c53b1388e043a9f5',
+ url="https://pypi.python.org/packages/2b/05/e20806c99afaff43331f5fd8770bb346145303882f98ef3275fa1dd66f6d/pytest-3.0.2.tar.gz")
+
+ depends_on('py-setuptools', type='build')
+ depends_on('py-py@1.4.29:', type=('build', 'run'))
diff --git a/var/spack/repos/builtin/packages/py-python-daemon/package.py b/var/spack/repos/builtin/packages/py-python-daemon/package.py
index ae6393986d..9ed085f031 100644
--- a/var/spack/repos/builtin/packages/py-python-daemon/package.py
+++ b/var/spack/repos/builtin/packages/py-python-daemon/package.py
@@ -24,7 +24,8 @@
##############################################################################
from spack import *
-class PyPythonDaemon(Package):
+
+class PyPythonDaemon(PythonPackage):
"""Library to implement a well-behaved Unix daemon process.
This library implements the well-behaved daemon specification of
@@ -41,10 +42,5 @@ class PyPythonDaemon(Package):
version('2.0.5', '73e7f49f525c51fa4a995aea4d80de41')
- extends("python")
- depends_on("py-setuptools")
- depends_on("py-lockfile")
-
- def install(self, spec, prefix):
- python('setup.py', 'install', '--prefix=%s' % prefix)
-
+ depends_on("py-setuptools", type='build')
+ depends_on("py-lockfile", type=('build', 'run'))
diff --git a/var/spack/repos/builtin/packages/py-pytz/package.py b/var/spack/repos/builtin/packages/py-pytz/package.py
index caf2c90e73..7a905f9f98 100644
--- a/var/spack/repos/builtin/packages/py-pytz/package.py
+++ b/var/spack/repos/builtin/packages/py-pytz/package.py
@@ -24,16 +24,17 @@
##############################################################################
from spack import *
-class PyPytz(Package):
+
+class PyPytz(PythonPackage):
"""World timezone definitions, modern and historical."""
+
homepage = "https://pypi.python.org/pypi/pytz"
url = "https://pypi.python.org/packages/source/p/pytz/pytz-2014.10.tar.gz"
+ version('2016.6.1', 'b6c28a3b968bc1d8badfb61b93874e03',
+ url="https://pypi.python.org/packages/5d/8e/6635d8f3f9f48c03bb925fab543383089858271f9cfd1216b83247e8df94/pytz-2016.6.1.tar.gz")
version('2014.10', 'eb1cb941a20c5b751352c52486aa1dd7')
version('2015.4', '417a47b1c432d90333e42084a605d3d8')
version('2016.3', 'abae92c3301b27bd8a9f56b14f52cb29')
- extends('python')
-
- def install(self, spec, prefix):
- python('setup.py', 'install', '--prefix=%s' % prefix)
+ depends_on('py-setuptools', type='build')
diff --git a/var/spack/repos/builtin/packages/py-pyyaml/package.py b/var/spack/repos/builtin/packages/py-pyyaml/package.py
index c0e22ba681..94d8fdd0e6 100644
--- a/var/spack/repos/builtin/packages/py-pyyaml/package.py
+++ b/var/spack/repos/builtin/packages/py-pyyaml/package.py
@@ -24,14 +24,10 @@
##############################################################################
from spack import *
-class PyPyyaml(Package):
+
+class PyPyyaml(PythonPackage):
"""PyYAML is a YAML parser and emitter for Python."""
homepage = "http://pyyaml.org/wiki/PyYAML"
url = "http://pyyaml.org/download/pyyaml/PyYAML-3.11.tar.gz"
version('3.11', 'f50e08ef0fe55178479d3a618efe21db')
-
- extends('python')
-
- def install(self, spec, prefix):
- python('setup.py', 'install', '--prefix=%s' % prefix)
diff --git a/var/spack/repos/builtin/packages/py-readme-renderer/package.py b/var/spack/repos/builtin/packages/py-readme-renderer/package.py
new file mode 100644
index 0000000000..2abe4bac74
--- /dev/null
+++ b/var/spack/repos/builtin/packages/py-readme-renderer/package.py
@@ -0,0 +1,42 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class PyReadmeRenderer(PythonPackage):
+ """readme_renderer is a library for rendering "readme" descriptions
+ for Warehouse."""
+
+ homepage = "https://github.com/pypa/readme_renderer"
+ url = "https://pypi.python.org/packages/f2/6e/ef1bc3a24eb14e14574aba9dc1bd50bc9a5e7cc880e8ff9cadd385b4fb37/readme_renderer-16.0.tar.gz"
+
+ version('16.0', '70321cea986956bcf2deef9981569f39')
+
+ depends_on('python@2.6:2.7,3.2:3.3')
+ depends_on('py-setuptools', type='build')
+ depends_on('py-bleach', type=('build', 'run'))
+ depends_on('py-docutils@0.13.1:', type=('build', 'run'))
+ depends_on('py-pygments', type=('build', 'run'))
+ depends_on('py-six', type=('build', 'run'))
diff --git a/var/spack/repos/builtin/packages/py-restview/package.py b/var/spack/repos/builtin/packages/py-restview/package.py
new file mode 100644
index 0000000000..871016fdb1
--- /dev/null
+++ b/var/spack/repos/builtin/packages/py-restview/package.py
@@ -0,0 +1,39 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class PyRestview(PythonPackage):
+ """A viewer for ReStructuredText documents that renders them on the fly."""
+
+ homepage = "https://mg.pov.lt/restview/"
+ url = "https://pypi.python.org/packages/source/r/restview/restview-2.6.1.tar.gz"
+
+ version('2.6.1', 'ac8b70e15b8f1732d1733d674813666b')
+
+ depends_on('python@2.7.0:2.7.999,3.3:3.5')
+ depends_on('py-docutils@0.13.1:', type=('build', 'run'))
+ depends_on('py-readme-renderer', type=('build', 'run'))
+ depends_on('py-pygments', type=('build', 'run'))
diff --git a/var/spack/repos/builtin/packages/py-rpy2/package.py b/var/spack/repos/builtin/packages/py-rpy2/package.py
index 03fd2b4a06..284a41894a 100644
--- a/var/spack/repos/builtin/packages/py-rpy2/package.py
+++ b/var/spack/repos/builtin/packages/py-rpy2/package.py
@@ -24,18 +24,25 @@
##############################################################################
from spack import *
-class PyRpy2(Package):
- """rpy2 is a redesign and rewrite of rpy. It is providing a low-level interface to R from Python, a proposed high-level interface, including wrappers to graphical libraries, as well as R-like structures and functions."""
+
+class PyRpy2(PythonPackage):
+ """rpy2 is a redesign and rewrite of rpy. It is providing a low-level
+ interface to R from Python, a proposed high-level interface,
+ including wrappers to graphical libraries, as well as R-like
+ structures and functions.
+
+ """
homepage = "https://pypi.python.org/pypi/rpy2"
- url = "https://pypi.python.org/packages/source/r/rpy2/rpy2-2.5.4.tar.gz"
+ url = "https://pypi.python.org/packages/source/r/rpy2/rpy2-2.5.4.tar.gz"
version('2.5.4', '115a20ac30883f096da2bdfcab55196d')
version('2.5.6', 'a36e758b633ce6aec6a5f450bfee980f')
- extends('python')
- depends_on('py-setuptools')
+ # FIXME: Missing dependencies:
+ # ld: cannot find -licuuc
+ # ld: cannot find -licui18
- depends_on('R')
+ depends_on('py-six', type=('build', 'run'))
+ depends_on('py-setuptools', type='build')
- def install(self, spec, prefix):
- python('setup.py', 'install', '--prefix=%s' % prefix)
+ depends_on('r')
diff --git a/var/spack/repos/builtin/packages/py-rtree/package.py b/var/spack/repos/builtin/packages/py-rtree/package.py
new file mode 100644
index 0000000000..55f98ad19e
--- /dev/null
+++ b/var/spack/repos/builtin/packages/py-rtree/package.py
@@ -0,0 +1,56 @@
+##############################################################################
+# Copyright (c) 2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class PyRtree(PythonPackage):
+ """Python interface to the RTREE.4 Library."""
+ homepage = "http://toblerity.org/rtree/"
+ url = "https://github.com/Toblerity/rtree/tarball/0.8.2"
+
+ # Not an official release yet. But changes in here are required
+ # to work with Spack. As it does with all packages, Spack
+ # installs libspatialindex in a non-system location. Without the
+ # changes in this fork, py-rtree requires an environment variables
+ # to be set *at runtime*, in order to find libspatialindex. That
+ # is not feasible within the Spack worldview.
+ version('0.8.2.2', 'b1fe96a73153db49ea6ce45a063d82cb',
+ url='https://github.com/citibeth/rtree/tarball/95a678cc7350857a1bb631bc41254efcd1fc0a0d')
+
+ version('0.8.2.1', '394696ca849dd9f3a5ef24fb02a41ef4',
+ url='https://github.com/citibeth/rtree/tarball/3a87d86f66a3955676b2507d3bf424ade938a22b')
+
+ # Does not work with Spack
+ # version('0.8.2', '593c7ac6babc397b8ba58f1636c1e0a0')
+
+ depends_on('py-setuptools', type='build')
+ depends_on('libspatialindex')
+
+ def setup_environment(self, spack_env, run_env):
+ lib = self.spec['libspatialindex'].prefix.lib
+ spack_env.set('SPATIALINDEX_LIBRARY',
+ join_path(lib, 'libspatialindex.%s' % dso_suffix))
+ spack_env.set('SPATIALINDEX_C_LIBRARY',
+ join_path(lib, 'libspatialindex_c.%s' % dso_suffix))
diff --git a/var/spack/repos/builtin/packages/py-scientificpython/package.py b/var/spack/repos/builtin/packages/py-scientificpython/package.py
index 7f7eb76b73..f0fe2e4e6c 100644
--- a/var/spack/repos/builtin/packages/py-scientificpython/package.py
+++ b/var/spack/repos/builtin/packages/py-scientificpython/package.py
@@ -24,7 +24,8 @@
##############################################################################
from spack import *
-class PyScientificpython(Package):
+
+class PyScientificpython(PythonPackage):
"""ScientificPython is a collection of Python modules for
scientific computing. It contains support for geometry,
mathematical functions, statistics, physical units, IO,
@@ -34,7 +35,4 @@ class PyScientificpython(Package):
url = "https://sourcesup.renater.fr/frs/download.php/file/4411/ScientificPython-2.8.1.tar.gz"
version('2.8.1', '73ee0df19c7b58cdf2954261f0763c77')
- extends('python')
-
- def install(self, spec, prefix):
- python('setup.py', 'install', '--prefix=%s' % prefix)
+ depends_on('py-numpy')
diff --git a/var/spack/repos/builtin/packages/py-scikit-image/package.py b/var/spack/repos/builtin/packages/py-scikit-image/package.py
index 0479e9eb0a..d05341f9eb 100644
--- a/var/spack/repos/builtin/packages/py-scikit-image/package.py
+++ b/var/spack/repos/builtin/packages/py-scikit-image/package.py
@@ -24,8 +24,11 @@
##############################################################################
from spack import *
-class PyScikitImage(Package):
- """Image processing algorithms for SciPy, including IO, morphology, filtering, warping, color manipulation, object detection, etc."""
+
+class PyScikitImage(PythonPackage):
+ """Image processing algorithms for SciPy, including IO, morphology,
+ filtering, warping, color manipulation, object detection, etc."""
+
homepage = "http://scikit-image.org/"
url = "https://pypi.python.org/packages/source/s/scikit-image/scikit-image-0.12.3.tar.gz"
@@ -33,12 +36,10 @@ class PyScikitImage(Package):
extends('python', ignore=r'bin/.*\.py$')
- depends_on('py-dask')
- depends_on('py-pillow')
- depends_on('py-networkx')
- depends_on('py-six')
- depends_on('py-scipy')
- depends_on('py-matplotlib')
-
- def install(self, spec, prefix):
- python('setup.py', 'install', '--prefix=%s' % prefix)
+ depends_on('py-dask', type=('build', 'run'))
+ depends_on('pil', type=('build', 'run'))
+ depends_on('py-networkx', type=('build', 'run'))
+ depends_on('py-six', type=('build', 'run'))
+ depends_on('py-scipy', type=('build', 'run'))
+ depends_on('py-matplotlib', type=('build', 'run'))
+ depends_on('py-setuptools', type='build')
diff --git a/var/spack/repos/builtin/packages/py-scikit-learn/package.py b/var/spack/repos/builtin/packages/py-scikit-learn/package.py
index 7bb5853b19..7c7102ce41 100644
--- a/var/spack/repos/builtin/packages/py-scikit-learn/package.py
+++ b/var/spack/repos/builtin/packages/py-scikit-learn/package.py
@@ -24,8 +24,9 @@
##############################################################################
from spack import *
-class PyScikitLearn(Package):
- """"""
+
+class PyScikitLearn(PythonPackage):
+ """A set of python modules for machine learning and data mining."""
homepage = "https://pypi.python.org/pypi/scikit-learn"
url = "https://pypi.python.org/packages/source/s/scikit-learn/scikit-learn-0.15.2.tar.gz"
@@ -33,11 +34,6 @@ class PyScikitLearn(Package):
version('0.16.1', '363ddda501e3b6b61726aa40b8dbdb7e')
version('0.17.1', 'a2f8b877e6d99b1ed737144f5a478dfc')
- extends('python')
-
- depends_on('py-setuptools')
- depends_on('py-numpy')
- depends_on('py-scipy')
-
- def install(self, spec, prefix):
- python('setup.py', 'install', '--prefix=%s' % prefix)
+ depends_on('py-setuptools', type='build')
+ depends_on('py-numpy', type=('build', 'run'))
+ depends_on('py-scipy', type=('build', 'run'))
diff --git a/var/spack/repos/builtin/packages/py-scipy/package.py b/var/spack/repos/builtin/packages/py-scipy/package.py
index f7c372b345..85b6f631e1 100644
--- a/var/spack/repos/builtin/packages/py-scipy/package.py
+++ b/var/spack/repos/builtin/packages/py-scipy/package.py
@@ -24,26 +24,28 @@
##############################################################################
from spack import *
-class PyScipy(Package):
- """Scientific Library for Python."""
+
+class PyScipy(PythonPackage):
+ """SciPy (pronounced "Sigh Pie") is a Scientific Library for Python.
+ It provides many user-friendly and efficient numerical routines such
+ as routines for numerical integration and optimization."""
+
homepage = "http://www.scipy.org/"
- url = "https://pypi.python.org/packages/source/s/scipy/scipy-0.15.0.tar.gz"
+ url = "https://pypi.python.org/packages/source/s/scipy/scipy-0.15.0.tar.gz"
+ version('0.18.1', '5fb5fb7ccb113ab3a039702b6c2f3327',
+ url="https://pypi.python.org/packages/22/41/b1538a75309ae4913cdbbdc8d1cc54cae6d37981d2759532c1aa37a41121/scipy-0.18.1.tar.gz")
version('0.17.0', '5ff2971e1ce90e762c59d2cd84837224')
version('0.15.1', 'be56cd8e60591d6332aac792a5880110')
version('0.15.0', '639112f077f0aeb6d80718dc5019dc7a')
- extends('python')
- depends_on('py-nose')
- depends_on('py-numpy+blas+lapack')
-
- def install(self, spec, prefix):
- if 'atlas' in spec:
- # libatlas.so actually isn't always installed, but this
- # seems to make the build autodetect things correctly.
- env['ATLAS'] = join_path(spec['atlas'].prefix.lib, 'libatlas.' + dso_suffix)
- else:
- env['BLAS'] = spec['blas'].blas_shared_lib
- env['LAPACK'] = spec['lapack'].lapack_shared_lib
+ depends_on('python@2.6:2.8,3.2:')
+ depends_on('py-nose', type='build')
+ # Known not to work with 2.23, 2.25
+ depends_on('binutils@2.26:', type='build')
+ depends_on('py-numpy@1.7.1:+blas+lapack', type=('build', 'run'))
- python('setup.py', 'install', '--prefix=%s' % prefix)
+ # NOTE: scipy picks up Blas/Lapack from numpy, see
+ # http://www.scipy.org/scipylib/building/linux.html#step-4-build-numpy-1-5-0
+ depends_on('blas')
+ depends_on('lapack')
diff --git a/var/spack/repos/builtin/packages/py-setuptools/package.py b/var/spack/repos/builtin/packages/py-setuptools/package.py
index 68032cb68d..d355800958 100644
--- a/var/spack/repos/builtin/packages/py-setuptools/package.py
+++ b/var/spack/repos/builtin/packages/py-setuptools/package.py
@@ -24,11 +24,16 @@
##############################################################################
from spack import *
-class PySetuptools(Package):
- """Easily download, build, install, upgrade, and uninstall Python packages."""
+
+class PySetuptools(PythonPackage):
+ """A Python utility that aids in the process of downloading, building,
+ upgrading, installing, and uninstalling Python packages."""
+
homepage = "https://pypi.python.org/pypi/setuptools"
url = "https://pypi.python.org/packages/source/s/setuptools/setuptools-11.3.tar.gz"
+ version('25.2.0', 'a0dbb65889c46214c691f6c516cf959c',
+ url="https://pypi.python.org/packages/9f/32/81c324675725d78e7f6da777483a3453611a427db0145dfb878940469692/setuptools-25.2.0.tar.gz")
version('20.7.0', '5d12b39bf3e75e80fdce54e44b255615')
version('20.6.7', '45d6110f3ec14924e44c33411db64fe6')
version('20.5', 'fadc1e1123ddbe31006e5e43e927362b')
@@ -36,8 +41,3 @@ class PySetuptools(Package):
version('18.1', 'f72e87f34fbf07f299f6cb46256a0b06')
version('16.0', '0ace0b96233516fc5f7c857d086aa3ad')
version('11.3.1', '01f69212e019a2420c1693fb43593930')
-
- extends('python')
-
- def install(self, spec, prefix):
- python('setup.py', 'install', '--prefix=%s' % prefix)
diff --git a/var/spack/repos/builtin/packages/py-shiboken/package.py b/var/spack/repos/builtin/packages/py-shiboken/package.py
index a2240c68a4..3ad51d5fc5 100644
--- a/var/spack/repos/builtin/packages/py-shiboken/package.py
+++ b/var/spack/repos/builtin/packages/py-shiboken/package.py
@@ -25,18 +25,18 @@
from spack import *
import os
-class PyShiboken(Package):
- """Shiboken generates bindings for C++ libraries using CPython source code."""
+
+class PyShiboken(PythonPackage):
+ """Shiboken generates bindings for C++ libraries using CPython."""
homepage = "https://shiboken.readthedocs.org/"
url = "https://pypi.python.org/packages/source/S/Shiboken/Shiboken-1.2.2.tar.gz"
version('1.2.2', '345cfebda221f525842e079a6141e555')
- # TODO: make build dependency
- # depends_on("cmake")
+ depends_on('cmake', type='build')
- extends('python')
- depends_on("py-setuptools")
+ depends_on("py-setuptools", type='build')
+ depends_on("py-sphinx", type=('build', 'run'))
depends_on("libxml2")
depends_on("qt@:4.8")
@@ -46,7 +46,8 @@ class PyShiboken(Package):
# They're called BY setup.py so we have to patch it.
pypkg = self.spec['python'].package
rpath = self.rpath
- rpath.append(os.path.join(self.prefix, pypkg.site_packages_dir, 'Shiboken'))
+ rpath.append(os.path.join(
+ self.prefix, pypkg.site_packages_dir, 'Shiboken'))
filter_file(
r'OPTION_CMAKE,',
@@ -62,8 +63,5 @@ class PyShiboken(Package):
r'#rpath_cmd(shiboken_path, srcpath)',
'shiboken_postinstall.py')
-
- def install(self, spec, prefix):
- python('setup.py', 'install',
- '--prefix=%s' % prefix,
- '--jobs=%s' % make_jobs)
+ def build_args(self, spec, prefix):
+ return ['--jobs={0}'.format(make_jobs)]
diff --git a/var/spack/repos/builtin/packages/py-simplegeneric/package.py b/var/spack/repos/builtin/packages/py-simplegeneric/package.py
new file mode 100644
index 0000000000..3881f8bc88
--- /dev/null
+++ b/var/spack/repos/builtin/packages/py-simplegeneric/package.py
@@ -0,0 +1,38 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class PySimplegeneric(PythonPackage):
+ """Simple generic functions (similar to Python's own len(),
+ pickle.dump(), etc.)"""
+
+ homepage = "https://pypi.python.org/pypi/simplegeneric"
+ url = "https://pypi.io/packages/source/s/simplegeneric/simplegeneric-0.8.zip"
+
+ version('0.8.1', 'f9c1fab00fd981be588fc32759f474e3')
+ version('0.8', 'eaa358a5f9517a8b475d03fbee3ec90f')
+
+ depends_on('py-setuptools', type='build')
diff --git a/var/spack/repos/builtin/packages/py-singledispatch/package.py b/var/spack/repos/builtin/packages/py-singledispatch/package.py
new file mode 100644
index 0000000000..9125ecb5f8
--- /dev/null
+++ b/var/spack/repos/builtin/packages/py-singledispatch/package.py
@@ -0,0 +1,41 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class PySingledispatch(PythonPackage):
+ """This library brings functools.singledispatch to Python 2.6-3.3."""
+
+ homepage = "https://pypi.python.org/pypi/singledispatch"
+ url = "https://pypi.io/packages/source/s/singledispatch/singledispatch-3.4.0.3.tar.gz"
+
+ version('3.4.0.3', 'af2fc6a3d6cc5a02d0bf54d909785fcb')
+
+ depends_on('py-setuptools', type='build')
+ depends_on('py-six')
+
+ # This dependency breaks concretization
+ # See https://github.com/LLNL/spack/issues/2793
+ # depends_on('py-ordereddict', when="^python@:2.6.999", type=('build', 'run')) # noqa
diff --git a/var/spack/repos/builtin/packages/py-sip/package.py b/var/spack/repos/builtin/packages/py-sip/package.py
index fc8e7f5296..9d97f08433 100644
--- a/var/spack/repos/builtin/packages/py-sip/package.py
+++ b/var/spack/repos/builtin/packages/py-sip/package.py
@@ -25,8 +25,10 @@
from spack import *
import os
+
class PySip(Package):
- """SIP is a tool that makes it very easy to create Python bindings for C and C++ libraries."""
+ """SIP is a tool that makes it very easy to create Python bindings for C
+ and C++ libraries."""
homepage = "http://www.riverbankcomputing.com/software/sip/intro"
url = "http://sourceforge.net/projects/pyqt/files/sip/sip-4.16.5/sip-4.16.5.tar.gz"
diff --git a/var/spack/repos/builtin/packages/py-six/package.py b/var/spack/repos/builtin/packages/py-six/package.py
index 3efb3d4317..7d653fd102 100644
--- a/var/spack/repos/builtin/packages/py-six/package.py
+++ b/var/spack/repos/builtin/packages/py-six/package.py
@@ -24,16 +24,16 @@
##############################################################################
from spack import *
-class PySix(Package):
+
+class PySix(PythonPackage):
"""Python 2 and 3 compatibility utilities."""
+
homepage = "https://pypi.python.org/pypi/six"
url = "https://pypi.python.org/packages/source/s/six/six-1.9.0.tar.gz"
- version('1.9.0', '476881ef4012262dfc8adc645ee786c4')
version('1.10.0', '34eed507548117b2ab523ab14b2f8b55')
+ version('1.9.0', '476881ef4012262dfc8adc645ee786c4')
- extends('python')
- depends_on('py-setuptools')
+ extends('python', ignore=r'bin/pytest')
- def install(self, spec, prefix):
- python('setup.py', 'install', '--prefix=%s' % prefix)
+ depends_on('py-setuptools', type='build')
diff --git a/var/spack/repos/builtin/packages/py-sncosmo/package.py b/var/spack/repos/builtin/packages/py-sncosmo/package.py
new file mode 100644
index 0000000000..f9d2546da3
--- /dev/null
+++ b/var/spack/repos/builtin/packages/py-sncosmo/package.py
@@ -0,0 +1,48 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class PySncosmo(PythonPackage):
+ """SNCosmo is a Python library for high-level supernova cosmology
+ analysis."""
+
+ homepage = "http://sncosmo.readthedocs.io/"
+ url = "https://pypi.python.org/packages/source/s/sncosmo/sncosmo-1.2.0.tar.gz"
+
+ version('1.2.0', '028e6d1dc84ab1c17d2f3b6378b2cb1e')
+
+ # Required dependencies
+ # py-sncosmo binaries are duplicates of those from py-astropy
+ extends('python', ignore=r'bin/.*')
+ depends_on('py-numpy', type=('build', 'run'))
+ depends_on('py-scipy', type=('build', 'run'))
+ depends_on('py-astropy', type=('build', 'run'))
+
+ # Recommended dependencies
+ depends_on('py-matplotlib', type=('build', 'run'))
+ depends_on('py-iminuit', type=('build', 'run'))
+ depends_on('py-emcee', type=('build', 'run'))
+ depends_on('py-nestle', type=('build', 'run'))
diff --git a/var/spack/repos/builtin/packages/py-snowballstemmer/package.py b/var/spack/repos/builtin/packages/py-snowballstemmer/package.py
new file mode 100644
index 0000000000..cfeeeb26ce
--- /dev/null
+++ b/var/spack/repos/builtin/packages/py-snowballstemmer/package.py
@@ -0,0 +1,35 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class PySnowballstemmer(PythonPackage):
+ """This package provides 16 stemmer algorithms (15 + Poerter
+ English stemmer) generated from Snowball algorithms."""
+
+ homepage = "https://pypi.python.org/pypi/snowballstemmer"
+ url = "https://pypi.python.org/packages/source/s/snowballstemmer/snowballstemmer-1.2.1.tar.gz"
+
+ version('1.2.1', '643b019667a708a922172e33a99bf2fa')
diff --git a/var/spack/repos/builtin/packages/py-sphinx-bootstrap-theme/package.py b/var/spack/repos/builtin/packages/py-sphinx-bootstrap-theme/package.py
new file mode 100644
index 0000000000..38f127dfc4
--- /dev/null
+++ b/var/spack/repos/builtin/packages/py-sphinx-bootstrap-theme/package.py
@@ -0,0 +1,41 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class PySphinxBootstrapTheme(Package):
+ """Sphinx Bootstrap Theme."""
+
+ homepage = "https://pypi.python.org/pypi/sphinx-bootstrap-theme/"
+ url = "https://pypi.io/packages/source/s/sphinx-bootstrap-theme/sphinx-bootstrap-theme-0.4.13.tar.gz"
+
+ version('0.4.13', '32e513a9c8ffbb8c1e4b036e8f74fb51')
+
+ extends('python')
+
+ depends_on('py-setuptools', type='build')
+
+ def install(self, spec, prefix):
+ setup_py('install', '--prefix={0}'.format(prefix))
diff --git a/var/spack/repos/builtin/packages/py-sphinx-rtd-theme/package.py b/var/spack/repos/builtin/packages/py-sphinx-rtd-theme/package.py
new file mode 100644
index 0000000000..4b9141d80c
--- /dev/null
+++ b/var/spack/repos/builtin/packages/py-sphinx-rtd-theme/package.py
@@ -0,0 +1,37 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class PySphinxRtdTheme(PythonPackage):
+ """ReadTheDocs.org theme for Sphinx."""
+
+ homepage = "https://pypi.python.org/pypi/sphinx_rtd_theme"
+ url = "https://pypi.python.org/packages/source/s/sphinx_rtd_theme/sphinx_rtd_theme-0.1.10a0.tar.gz"
+
+ version('0.1.10a0', '83bd95cae55aa8b773a8cc3a41094282',
+ url="https://pypi.python.org/packages/da/6b/1b75f13d8aa3333f19c6cdf1f0bc9f52ea739cae464fbee050307c121857/sphinx_rtd_theme-0.1.10a0.tar.gz")
+
+ depends_on('py-setuptools', type='build')
diff --git a/var/spack/repos/builtin/packages/py-sphinx/package.py b/var/spack/repos/builtin/packages/py-sphinx/package.py
index d00f1d128b..b71f2ed8c5 100644
--- a/var/spack/repos/builtin/packages/py-sphinx/package.py
+++ b/var/spack/repos/builtin/packages/py-sphinx/package.py
@@ -24,14 +24,28 @@
##############################################################################
from spack import *
-class PySphinx(Package):
+
+class PySphinx(PythonPackage):
"""Sphinx Documentation Generator."""
homepage = "http://sphinx-doc.org"
url = "https://pypi.python.org/packages/source/S/Sphinx/Sphinx-1.3.1.tar.gz"
+ version('1.4.5', '5c2cd2dac45dfa6123d067e32a89e89a',
+ url='https://pypi.python.org/packages/8b/78/eeea2b837f911cdc301f5f05163f9729a2381cadd03ccf35b25afe816c90/Sphinx-1.4.5.tar.gz')
version('1.3.1', '8786a194acf9673464c5455b11fd4332')
- extends('python')
+ extends('python', ignore='bin/(pybabel|pygmentize)')
+
+ # Most Python packages only require py-setuptools as a build dependency.
+ # However, py-sphinx requires py-setuptools during runtime as well.
+ depends_on('py-setuptools', type=('build', 'run'))
- def install(self, spec, prefix):
- python('setup.py', 'install', '--prefix=%s' % prefix)
+ depends_on('py-six@1.4:', type=('build', 'run'))
+ depends_on('py-jinja2@2.3:', type=('build', 'run'))
+ depends_on('py-pygments@2.0:', type=('build', 'run'))
+ depends_on('py-docutils@0.11:', type=('build', 'run'))
+ depends_on('py-snowballstemmer@1.1:', type=('build', 'run'))
+ depends_on('py-babel@1.3:', type=('build', 'run')) # not 2.0
+ depends_on('py-alabaster@0.7:', type=('build', 'run'))
+ depends_on('py-imagesize', when='@1.4:', type=('build', 'run'))
+ depends_on('py-sphinx-rtd-theme@0.1:', type=('build', 'run')) # optional as of 1.4
diff --git a/var/spack/repos/builtin/packages/py-SQLAlchemy/package.py b/var/spack/repos/builtin/packages/py-sqlalchemy/package.py
index 05f4616ff5..f8221058a0 100644
--- a/var/spack/repos/builtin/packages/py-SQLAlchemy/package.py
+++ b/var/spack/repos/builtin/packages/py-sqlalchemy/package.py
@@ -24,15 +24,11 @@
##############################################################################
from spack import *
-class PySqlalchemy(Package):
+
+class PySqlalchemy(PythonPackage):
"""The Python SQL Toolkit and Object Relational Mapper"""
homepage = 'http://www.sqlalchemy.org/'
url = "https://pypi.python.org/packages/source/S/SQLAlchemy/SQLAlchemy-1.0.12.tar.gz"
version('1.0.12', '6d19ef29883bbebdcac6613cf391cac4')
-
- extends('python')
-
- def install(self, spec, prefix):
- python('setup.py', 'install', '--prefix=%s' % prefix)
diff --git a/var/spack/repos/builtin/packages/py-storm/package.py b/var/spack/repos/builtin/packages/py-storm/package.py
index 53af720c50..a6c2900414 100644
--- a/var/spack/repos/builtin/packages/py-storm/package.py
+++ b/var/spack/repos/builtin/packages/py-storm/package.py
@@ -22,19 +22,14 @@
# License along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
-from spack import depends_on, extends, version
-from spack import Package
+from spack import *
-class PyStorm(Package):
+class PyStorm(PythonPackage):
"""Storm is an object-relational mapper (ORM) for Python"""
homepage = "https://storm.canonical.com/"
url = "https://launchpad.net/storm/trunk/0.20/+download/storm-0.20.tar.gz"
version('0.20', '8628503141f0f06c0749d607ac09b9c7')
- extends('python')
- depends_on('py-setuptools')
-
- def install(self, spec, prefix):
- python('setup.py', 'install', '--prefix=%s' % prefix)
+ depends_on('py-setuptools', type='build')
diff --git a/var/spack/repos/builtin/packages/py-symengine/package.py b/var/spack/repos/builtin/packages/py-symengine/package.py
new file mode 100644
index 0000000000..0817d394f2
--- /dev/null
+++ b/var/spack/repos/builtin/packages/py-symengine/package.py
@@ -0,0 +1,45 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class PySymengine(PythonPackage):
+ """Python wrappers for SymEngine, a symbolic manipulation library."""
+
+ homepage = "https://github.com/symengine/symengine.py"
+ url = "https://github.com/symengine/symengine.py/archive/v0.2.0.tar.gz"
+
+ version('0.2.0', 'e1d114fa12be4c8c7e9f24007e07718c')
+ version('develop', git='https://github.com/symengine/symengine.py.git')
+
+ # Build dependencies
+ depends_on('python@2.7:2.8,3.3:')
+ depends_on('py-setuptools', type='build')
+ depends_on('py-cython@0.19.1:')
+ depends_on('cmake@2.8.7:', type='build')
+ depends_on('symengine@0.2.0:')
+
+ def build_args(self, spec, prefix):
+ return ['--symengine-dir={0}'.format(spec['symengine'].prefix)]
diff --git a/var/spack/repos/builtin/packages/py-sympy/package.py b/var/spack/repos/builtin/packages/py-sympy/package.py
index 5e38fc5be1..58c4167e23 100644
--- a/var/spack/repos/builtin/packages/py-sympy/package.py
+++ b/var/spack/repos/builtin/packages/py-sympy/package.py
@@ -24,7 +24,8 @@
##############################################################################
from spack import *
-class PySympy(Package):
+
+class PySympy(PythonPackage):
"""SymPy is a Python library for symbolic mathematics."""
homepage = "https://pypi.python.org/pypi/sympy"
url = "https://pypi.python.org/packages/source/s/sympy/sympy-0.7.6.tar.gz"
@@ -32,8 +33,4 @@ class PySympy(Package):
version('0.7.6', '3d04753974306d8a13830008e17babca')
version('1.0', '43e797de799f00f9e8fd2307dba9fab1')
- extends('python')
depends_on('py-mpmath', when='@1.0:')
-
- def install(self, spec, prefix):
- python('setup.py', 'install', '--prefix=%s' % prefix)
diff --git a/var/spack/repos/builtin/packages/py-tappy/package.py b/var/spack/repos/builtin/packages/py-tappy/package.py
index a1026a9b4b..22bc15392f 100644
--- a/var/spack/repos/builtin/packages/py-tappy/package.py
+++ b/var/spack/repos/builtin/packages/py-tappy/package.py
@@ -24,7 +24,8 @@
##############################################################################
from spack import *
-class PyTappy(Package):
+
+class PyTappy(PythonPackage):
"""Python TAP interface module for unit tests"""
homepage = "https://github.com/mblayman/tappy"
# base https://pypi.python.org/pypi/cffi
@@ -32,8 +33,9 @@ class PyTappy(Package):
version('1.6', 'c8bdb93ad66e05f939905172a301bedf')
- extends('python')
- depends_on('py-setuptools')
+ extends('python', ignore='bin/nosetests|bin/pygmentize')
- def install(self, spec, prefix):
- python('setup.py', 'install', '--prefix=%s' % prefix)
+ depends_on('python@2.6:2.7,3.2:3.4')
+ depends_on('py-nose', type=('build', 'run'))
+ depends_on('py-pygments', type=('build', 'run'))
+ depends_on('py-setuptools', type='build')
diff --git a/var/spack/repos/builtin/packages/py-terminado/package.py b/var/spack/repos/builtin/packages/py-terminado/package.py
new file mode 100644
index 0000000000..4cebe14fca
--- /dev/null
+++ b/var/spack/repos/builtin/packages/py-terminado/package.py
@@ -0,0 +1,38 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class PyTerminado(PythonPackage):
+ """Terminals served to term.js using Tornado websockets"""
+
+ homepage = "https://pypi.python.org/pypi/terminado"
+ url = "https://pypi.io/packages/source/t/terminado/terminado-0.6.tar.gz"
+
+ version('0.6', '5b6c65da27fe1ed07a9f80f0588cdaba')
+
+ depends_on('py-setuptools', type='build')
+ depends_on('py-tornado@4:', type=('build', 'run'))
+ depends_on('py-ptyprocess', type=('build', 'run'))
diff --git a/var/spack/repos/builtin/packages/py-tornado/package.py b/var/spack/repos/builtin/packages/py-tornado/package.py
new file mode 100644
index 0000000000..eb9c660947
--- /dev/null
+++ b/var/spack/repos/builtin/packages/py-tornado/package.py
@@ -0,0 +1,48 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class PyTornado(PythonPackage):
+ """Tornado is a Python web framework and asynchronous networking
+ library."""
+ homepage = "https://github.com/tornadoweb/tornado"
+ url = "https://github.com/tornadoweb/tornado/archive/v4.4.0.tar.gz"
+
+ version('4.4.0', 'c28675e944f364ee96dda3a8d2527a87ed28cfa3')
+
+ depends_on('py-setuptools', type='build')
+
+ # requirements from setup.py
+ # These dependencies breaks concretization
+ # See https://github.com/LLNL/spack/issues/2793
+ # depends_on('py-backports-ssl-match-hostname', when='^python@:2.7.8', type=('build', 'run')) # noqa
+ # depends_on('py-singledispatch', when='^python@:3.3', type=('build', 'run')) # noqa
+ # depends_on('py-certifi', when='^python@:3.3', type=('build', 'run'))
+ # depends_on('py-backports-abc@0.4:', when='^python@:3.4', type=('build', 'run')) # noqa
+ depends_on('py-backports-ssl-match-hostname', type=('build', 'run'))
+ depends_on('py-singledispatch', type=('build', 'run'))
+ depends_on('py-certifi', type=('build', 'run'))
+ depends_on('py-backports-abc@0.4:', type=('build', 'run'))
diff --git a/var/spack/repos/builtin/packages/py-traitlets/package.py b/var/spack/repos/builtin/packages/py-traitlets/package.py
new file mode 100644
index 0000000000..debd1dca43
--- /dev/null
+++ b/var/spack/repos/builtin/packages/py-traitlets/package.py
@@ -0,0 +1,50 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class PyTraitlets(PythonPackage):
+ """Traitlets Python config system"""
+
+ homepage = "https://pypi.python.org/pypi/traitlets"
+ url = "https://github.com/ipython/traitlets/archive/4.3.1.tar.gz"
+
+ version('4.3.1', '146a4885ea64079f62a33b2049841543')
+ version('4.3.0', '17af8d1306a401c42dbc92a080722422')
+ version('4.2.2', 'ffc03056dc5c8d1fc5dbd6eac76e1e46')
+ version('4.2.1', 'fc7f46a76b99ebc5068f99033d268dcf')
+ version('4.2.0', '53553a10d124e264fd2e234d0571b7d0')
+ version('4.1.0', 'd5bc75c7bd529afb40afce86c2facc3a')
+ version('4.0.0', 'b5b95ea5941fd9619b4704dfd8201568')
+ version('4.0', '14544e25ccf8e920ed1cbf833852481f')
+
+ depends_on('py-setuptools', type='build')
+ depends_on('py-decorator', type=('build', 'run'))
+ depends_on('py-ipython-genutils', type=('build', 'run'))
+
+ # This dependency breaks concretization
+ # See https://github.com/LLNL/spack/issues/2793
+ # depends_on('py-enum34', when='^python@:3.3', type=('build', 'run'))
+ depends_on('py-enum34', type=('build', 'run'))
diff --git a/var/spack/repos/builtin/packages/py-tuiview/package.py b/var/spack/repos/builtin/packages/py-tuiview/package.py
index 200a33a676..93726cf004 100644
--- a/var/spack/repos/builtin/packages/py-tuiview/package.py
+++ b/var/spack/repos/builtin/packages/py-tuiview/package.py
@@ -24,20 +24,17 @@
##############################################################################
from spack import *
-class PyTuiview(Package):
- """
- TuiView is a lightweight raster GIS with powerful raster attribute
- table manipulation abilities.
+
+class PyTuiview(PythonPackage):
+ """TuiView is a lightweight raster GIS with powerful raster attribute
+ table manipulation abilities.
"""
+
homepage = "https://bitbucket.org/chchrsc/tuiview"
url = "https://bitbucket.org/chchrsc/tuiview/get/tuiview-1.1.7.tar.gz"
version('1.1.7', '4b3b38a820cc239c8ab4a181ac5d4c30')
- extends("python")
- depends_on("py-pyqt")
- depends_on("py-numpy")
+ depends_on("py-pyqt", type=('build', 'run'))
+ depends_on("py-numpy", type=('build', 'run'))
depends_on("gdal")
-
- def install(self, spec, prefix):
- python('setup.py', 'install', '--prefix=%s' % prefix)
diff --git a/var/spack/repos/builtin/packages/py-twisted/package.py b/var/spack/repos/builtin/packages/py-twisted/package.py
index 27db4adff4..e558adbc7f 100644
--- a/var/spack/repos/builtin/packages/py-twisted/package.py
+++ b/var/spack/repos/builtin/packages/py-twisted/package.py
@@ -24,7 +24,8 @@
##############################################################################
from spack import *
-class PyTwisted(Package):
+
+class PyTwisted(PythonPackage):
"""An asynchronous networking framework written in Python"""
homepage = "https://twistedmatrix.com/"
url = "https://pypi.python.org/packages/source/T/Twisted/Twisted-15.3.0.tar.bz2"
@@ -32,9 +33,4 @@ class PyTwisted(Package):
version('15.4.0', '5337ffb6aeeff3790981a2cd56db9655')
version('15.3.0', 'b58e83da2f00b3352afad74d0c5c4599')
- depends_on('py-setuptools')
-
- extends('python')
-
- def install(self, spec, prefix):
- python('setup.py', 'install', '--prefix=%s' % prefix)
+ depends_on('py-setuptools', type='build')
diff --git a/var/spack/repos/builtin/packages/py-unittest2/package.py b/var/spack/repos/builtin/packages/py-unittest2/package.py
new file mode 100644
index 0000000000..d6a68c3535
--- /dev/null
+++ b/var/spack/repos/builtin/packages/py-unittest2/package.py
@@ -0,0 +1,37 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class PyUnittest2(PythonPackage):
+ """unittest2 is a backport of the new features added to the unittest
+ testing framework in Python 2.7 and onwards."""
+
+ homepage = "https://pypi.python.org/pypi/unittest2"
+ url = "https://pypi.python.org/packages/source/u/unittest2/unittest2-1.1.0.tar.gz"
+
+ version('1.1.0', 'f72dae5d44f091df36b6b513305ea000')
+
+ depends_on('py-setuptools', type='build')
diff --git a/var/spack/repos/builtin/packages/py-unittest2py3k/package.py b/var/spack/repos/builtin/packages/py-unittest2py3k/package.py
new file mode 100644
index 0000000000..03134acfcd
--- /dev/null
+++ b/var/spack/repos/builtin/packages/py-unittest2py3k/package.py
@@ -0,0 +1,39 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class PyUnittest2py3k(PythonPackage):
+ """unittest2 is a backport of the new features added to the unittest
+ testing framework in Python 2.7 and 3.2. This is a Python 3 compatible
+ version of unittest2."""
+
+ homepage = "https://pypi.python.org/pypi/unittest2py3k"
+ url = "https://pypi.python.org/packages/source/u/unittest2py3k/unittest2py3k-0.5.1.tar.gz"
+
+ version('0.5.1', '8824ff92044310d9365f90d892bf0f09')
+
+ depends_on('python@3:')
+ depends_on('py-setuptools', type='build')
diff --git a/var/spack/repos/builtin/packages/py-urwid/package.py b/var/spack/repos/builtin/packages/py-urwid/package.py
index 0accc72df8..8e33d2bef2 100644
--- a/var/spack/repos/builtin/packages/py-urwid/package.py
+++ b/var/spack/repos/builtin/packages/py-urwid/package.py
@@ -24,17 +24,12 @@
##############################################################################
from spack import *
-class PyUrwid(Package):
+
+class PyUrwid(PythonPackage):
"""A full-featured console UI library"""
homepage = "http://urwid.org/"
url = "https://pypi.python.org/packages/source/u/urwid/urwid-1.3.0.tar.gz"
version('1.3.0', 'a989acd54f4ff1a554add464803a9175')
- depends_on('py-setuptools')
-
- extends("python")
-
- def install(self, spec, prefix):
- python('setup.py', 'install', '--prefix=%s' % prefix)
-
+ depends_on('py-setuptools', type='build')
diff --git a/var/spack/repos/builtin/packages/py-vcversioner/package.py b/var/spack/repos/builtin/packages/py-vcversioner/package.py
new file mode 100644
index 0000000000..81e4f7bdda
--- /dev/null
+++ b/var/spack/repos/builtin/packages/py-vcversioner/package.py
@@ -0,0 +1,36 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class PyVcversioner(PythonPackage):
+ """Vcversioner: Take version numbers from version control."""
+
+ homepage = "https://github.com/habnabit/vcversioner"
+ url = "https://pypi.python.org/packages/source/v/vcversioner/vcversioner-2.16.0.0.tar.gz"
+
+ version('2.16.0.0', 'aab6ef5e0cf8614a1b1140ed5b7f107d')
+
+ depends_on('py-setuptools', type='build')
diff --git a/var/spack/repos/builtin/packages/py-virtualenv/package.py b/var/spack/repos/builtin/packages/py-virtualenv/package.py
index d42cb96eb7..5e6431b637 100644
--- a/var/spack/repos/builtin/packages/py-virtualenv/package.py
+++ b/var/spack/repos/builtin/packages/py-virtualenv/package.py
@@ -23,9 +23,9 @@
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
from spack import *
-import shutil
-class PyVirtualenv(Package):
+
+class PyVirtualenv(PythonPackage):
"""virtualenv is a tool to create isolated Python environments."""
homepage = "http://virtualenv.readthedocs.org/projects/virtualenv/"
url = "https://pypi.python.org/packages/source/v/virtualenv/virtualenv-1.11.6.tar.gz"
@@ -34,8 +34,4 @@ class PyVirtualenv(Package):
version('13.0.1', '1ffc011bde6667f0e37ecd976f4934db')
version('15.0.1', '28d76a0d9cbd5dc42046dd14e76a6ecc')
- extends('python')
- depends_on('py-setuptools')
-
- def install(self, spec, prefix):
- python('setup.py', 'install', '--prefix=%s' % prefix)
+ depends_on('py-setuptools', type='build')
diff --git a/var/spack/repos/builtin/packages/py-wcsaxes/package.py b/var/spack/repos/builtin/packages/py-wcsaxes/package.py
new file mode 100644
index 0000000000..be1d151ee9
--- /dev/null
+++ b/var/spack/repos/builtin/packages/py-wcsaxes/package.py
@@ -0,0 +1,40 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class PyWcsaxes(PythonPackage):
+ """WCSAxes is a framework for making plots of Astronomical data
+ in Matplotlib."""
+
+ homepage = "http://wcsaxes.readthedocs.io/en/latest/index.html"
+ url = "https://github.com/astrofrog/wcsaxes/archive/v0.8.tar.gz"
+
+ version('0.8', 'de1c60fdae4c330bf5ddb9f1ab5ab920')
+
+ extends('python', ignore=r'bin/')
+ depends_on('py-numpy', type=('build', 'run'))
+ depends_on('py-matplotlib', type=('build', 'run'))
+ depends_on('py-astropy', type=('build', 'run'))
diff --git a/var/spack/repos/builtin/packages/py-wcwidth/package.py b/var/spack/repos/builtin/packages/py-wcwidth/package.py
new file mode 100644
index 0000000000..c4846e2ee7
--- /dev/null
+++ b/var/spack/repos/builtin/packages/py-wcwidth/package.py
@@ -0,0 +1,36 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class PyWcwidth(PythonPackage):
+ """Measures number of Terminal column cells of wide-character codes"""
+
+ homepage = "https://pypi.python.org/pypi/wcwidth"
+ url = "https://pypi.io/packages/source/w/wcwidth/wcwidth-0.1.7.tar.gz"
+
+ version('0.1.7', 'b3b6a0a08f0c8a34d1de8cf44150a4ad')
+
+ depends_on('py-setuptools', type='build')
diff --git a/var/spack/repos/builtin/packages/py-wheel/package.py b/var/spack/repos/builtin/packages/py-wheel/package.py
index 68b6f3d679..7e678df2e8 100644
--- a/var/spack/repos/builtin/packages/py-wheel/package.py
+++ b/var/spack/repos/builtin/packages/py-wheel/package.py
@@ -24,7 +24,8 @@
##############################################################################
from spack import *
-class PyWheel(Package):
+
+class PyWheel(PythonPackage):
"""A built-package format for Python."""
homepage = "https://pypi.python.org/pypi/wheel"
@@ -32,8 +33,4 @@ class PyWheel(Package):
version('0.26.0', '4cfc6e7e3dc7377d0164914623922a10')
- extends('python')
- depends_on('py-setuptools')
-
- def install(self, spec, prefix):
- python('setup.py', 'install', '--prefix=%s' % prefix)
+ depends_on('py-setuptools', type='build')
diff --git a/var/spack/repos/builtin/packages/py-widgetsnbextension/package.py b/var/spack/repos/builtin/packages/py-widgetsnbextension/package.py
new file mode 100644
index 0000000000..916263bda6
--- /dev/null
+++ b/var/spack/repos/builtin/packages/py-widgetsnbextension/package.py
@@ -0,0 +1,38 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class PyWidgetsnbextension(PythonPackage):
+ """IPython HTML widgets for Jupyter"""
+
+ homepage = "https://pypi.python.org/pypi/widgetsnbextension"
+ url = "https://pypi.io/packages/source/w/widgetsnbextension/widgetsnbextension-1.2.6.tar.gz"
+
+ version('1.2.6', '0aa4e152c9ba2d704389dc2453f448c7')
+
+ depends_on('py-setuptools', type='build')
+ depends_on('python@2.7:2.7.999,3.3:')
+ depends_on('py-jupyter-notebook@4.2.0:', type=('build', 'run'))
diff --git a/var/spack/repos/builtin/packages/py-xlrd/package.py b/var/spack/repos/builtin/packages/py-xlrd/package.py
index 9220f90340..bbd2f57b07 100644
--- a/var/spack/repos/builtin/packages/py-xlrd/package.py
+++ b/var/spack/repos/builtin/packages/py-xlrd/package.py
@@ -24,7 +24,8 @@
##############################################################################
from spack import *
-class PyXlrd(Package):
+
+class PyXlrd(PythonPackage):
"""Library for developers to extract data from Microsoft Excel (tm)
spreadsheet files"""
@@ -32,8 +33,3 @@ class PyXlrd(Package):
url = "https://pypi.python.org/packages/source/x/xlrd/xlrd-0.9.4.tar.gz"
version('0.9.4', '911839f534d29fe04525ef8cd88fe865')
-
- extends('python')
-
- def install(self, spec, prefix):
- python('setup.py', 'install', '--prefix=%s' % prefix)
diff --git a/var/spack/repos/builtin/packages/py-xpyb/package.py b/var/spack/repos/builtin/packages/py-xpyb/package.py
new file mode 100644
index 0000000000..49c6343c45
--- /dev/null
+++ b/var/spack/repos/builtin/packages/py-xpyb/package.py
@@ -0,0 +1,47 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class PyXpyb(Package):
+ """xpyb provides a Python binding to the X Window System protocol
+ via libxcb."""
+
+ homepage = "https://xcb.freedesktop.org/"
+ url = "https://xcb.freedesktop.org/dist/xpyb-1.3.1.tar.gz"
+
+ version('1.3.1', '75d567e25517fb883a56f10b77fd2757')
+
+ extends('python')
+
+ depends_on('libxcb@1.5:')
+
+ depends_on('xcb-proto@1.7.1:', type='build')
+
+ def install(self, spec, prefix):
+ configure('--prefix={0}'.format(prefix))
+
+ make()
+ make('install')
diff --git a/var/spack/repos/builtin/packages/py-yapf/package.py b/var/spack/repos/builtin/packages/py-yapf/package.py
index 60f740c98d..5f5d32e3d0 100644
--- a/var/spack/repos/builtin/packages/py-yapf/package.py
+++ b/var/spack/repos/builtin/packages/py-yapf/package.py
@@ -24,7 +24,8 @@
##############################################################################
from spack import *
-class PyYapf(Package):
+
+class PyYapf(PythonPackage):
""" Yet Another Python Formatter """
homepage = "https://github.com/google/yapf"
# base https://pypi.python.org/pypi/cffi
@@ -32,8 +33,4 @@ class PyYapf(Package):
version('0.2.1', '348ccf86cf2057872e4451b204fb914c')
- extends('python')
- depends_on('py-setuptools')
-
- def install(self, spec, prefix):
- python('setup.py', 'install', '--prefix=%s' % prefix)
+ depends_on('py-setuptools', type='build')
diff --git a/var/spack/repos/builtin/packages/py-yt/package.py b/var/spack/repos/builtin/packages/py-yt/package.py
new file mode 100644
index 0000000000..6ab967d8a5
--- /dev/null
+++ b/var/spack/repos/builtin/packages/py-yt/package.py
@@ -0,0 +1,73 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+
+from spack import *
+
+
+class PyYt(PythonPackage):
+ """Volumetric Data Analysis
+
+ yt is a python package for analyzing and visualizing
+ volumetric, multi-resolution data from astrophysical
+ simulations, radio telescopes, and a burgeoning
+ interdisciplinary community.
+ """
+ homepage = "http://yt-project.org"
+ url = "https://bitbucket.org/yt_analysis/yt"
+
+ version("3.3.1", hg="https://bitbucket.org/yt_analysis/yt",
+ tag="yt-3.3.1", commit="9bc3d0e9b750c923d44d73c447df64fc431f5838")
+ version("3.2.3", hg="https://bitbucket.org/yt_analysis/yt",
+ tag="yt-3.2.3", commit="83d2c1e9313e7d83eb5b96888451ff2646fd8ff3")
+ version("3.1.0", hg="https://bitbucket.org/yt_analysis/yt",
+ tag="yt-3.1.0", commit="fd7cdc4836188a3badf81adb477bcc1b9632e485")
+ version("3.0.2", hg="https://bitbucket.org/yt_analysis/yt",
+ tag="yt-3.0.2", commit="511887af4c995a78fe606e58ce8162c88380ecdc")
+ version("2.6.3", hg="https://bitbucket.org/yt_analysis/yt",
+ tag="yt-2.6.3", commit="816186f16396a16853810ac9ebcde5057d8d5b1a")
+ version("development", hg="https://bitbucket.org/yt_analysis/yt",
+ branch="yt")
+
+ variant("astropy", default=True, description="enable astropy support")
+ variant("h5py", default=True, description="enable h5py support")
+ variant("scipy", default=True, description="enable scipy support")
+
+ depends_on("py-astropy", type=('build', 'run'), when="+astropy")
+ depends_on("py-cython", type=('build', 'run'))
+ depends_on("py-h5py", type=('build', 'run'), when="+h5py")
+ depends_on("py-ipython", type=('build', 'run'))
+ depends_on("py-matplotlib", type=('build', 'run'))
+ depends_on("py-numpy", type=('build', 'run'))
+ depends_on("py-scipy", type=('build', 'run'), when="+scipy")
+ depends_on("py-setuptools", type="build")
+ depends_on("py-sympy", type=('build', 'run'))
+ depends_on("python @2.7:2.999,3.4:")
+
+ @PythonPackage.sanity_check('install')
+ def check_install(self):
+ # The Python interpreter path can be too long for this
+ # yt = Executable(join_path(prefix.bin, "yt"))
+ # yt("--help")
+ python(join_path(self.prefix.bin, "yt"), "--help")
diff --git a/var/spack/repos/builtin/packages/py-zmq/package.py b/var/spack/repos/builtin/packages/py-zmq/package.py
new file mode 100644
index 0000000000..cbc0e02e6e
--- /dev/null
+++ b/var/spack/repos/builtin/packages/py-zmq/package.py
@@ -0,0 +1,40 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class PyZmq(PythonPackage):
+ """PyZMQ: Python bindings for zeromq."""
+ homepage = "https://github.com/zeromq/pyzmq"
+ url = "https://github.com/zeromq/pyzmq/archive/v14.7.0.tar.gz"
+
+ version('16.0.2', '4cf14a2995742253b2b009541f4436f4')
+ version('14.7.0', 'bf304fb73d72aee314ff82d3554328c179938ecf')
+
+ depends_on('py-setuptools', type='build')
+ depends_on('py-cython@0.16:', type=('build', 'run'))
+ depends_on('py-py', type=('build', 'run'))
+ depends_on('py-cffi', type=('build', 'run'))
+ depends_on('zeromq')
diff --git a/var/spack/repos/builtin/packages/python/ncurses.patch b/var/spack/repos/builtin/packages/python/ncurses.patch
new file mode 100644
index 0000000000..9054c03e7b
--- /dev/null
+++ b/var/spack/repos/builtin/packages/python/ncurses.patch
@@ -0,0 +1,11 @@
+--- a/setup.py 2016-08-30 15:39:59.334926574 -0500
++++ b/setup.py 2016-08-30 15:46:57.227946339 -0500
+@@ -745,8 +745,6 @@
+ # use the same library for the readline and curses modules.
+ if 'curses' in readline_termcap_library:
+ curses_library = readline_termcap_library
+- elif self.compiler.find_library_file(lib_dirs, 'ncursesw'):
+- curses_library = 'ncursesw'
+ elif self.compiler.find_library_file(lib_dirs, 'ncurses'):
+ curses_library = 'ncurses'
+ elif self.compiler.find_library_file(lib_dirs, 'curses'):
diff --git a/var/spack/repos/builtin/packages/python/package.py b/var/spack/repos/builtin/packages/python/package.py
index 86c12498e1..348b075e2f 100644
--- a/var/spack/repos/builtin/packages/python/package.py
+++ b/var/spack/repos/builtin/packages/python/package.py
@@ -22,81 +22,293 @@
# License along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
-import functools
-import glob
-import inspect
+import ast
import os
import re
from contextlib import closing
import spack
+import llnl.util.tty as tty
from llnl.util.lang import match_predicate
+from llnl.util.filesystem import force_remove
from spack import *
from spack.util.environment import *
+import spack.util.spack_json as sjson
class Python(Package):
"""The Python programming language."""
- homepage = "http://www.python.org"
- url = "http://www.python.org/ftp/python/2.7.8/Python-2.7.8.tgz"
- extendable = True
+ homepage = "http://www.python.org"
+ url = "http://www.python.org/ftp/python/2.7.8/Python-2.7.8.tgz"
+ list_url = "https://www.python.org/downloads/"
+ list_depth = 2
+ version('3.6.0', '3f7062ccf8be76491884d0e47ac8b251')
+ version('3.5.2', '3fe8434643a78630c61c6464fe2e7e72')
version('3.5.1', 'be78e48cdfc1a7ad90efff146dce6cfe')
version('3.5.0', 'a56c0c0b45d75a0ec9c6dee933c41c36')
- version('2.7.11', '6b6076ec9e93f05dd63e47eb9c15728b', preferred=True)
+ version('3.4.3', '4281ff86778db65892c05151d5de738d')
+ version('3.3.6', 'cdb3cd08f96f074b3f3994ccb51063e9')
+ version('3.2.6', '23815d82ae706e9b781ca65865353d39')
+ version('3.1.5', '02196d3fc7bc76bdda68aa36b0dd16ab')
+ version('2.7.13', '17add4bf0ad0ec2f08e0cae6d205c700', preferred=True)
+ version('2.7.12', '88d61f82e3616a4be952828b3694109d')
+ version('2.7.11', '6b6076ec9e93f05dd63e47eb9c15728b')
version('2.7.10', 'd7547558fd673bd9d38e2108c6b42521')
version('2.7.9', '5eebcaa0030dc4061156d3429657fb83')
version('2.7.8', 'd4bca0159acb0b44a781292b5231936f')
+ extendable = True
+
+ variant('tk', default=False, description='Provide support for Tkinter')
+ variant('ucs4', default=False,
+ description='Enable UCS4 (wide) unicode strings')
+ # From https://docs.python.org/2/c-api/unicode.html: Python's default
+ # builds use a 16-bit type for Py_UNICODE and store Unicode values
+ # internally as UCS2. It is also possible to build a UCS4 version of Python
+ # (most recent Linux distributions come with UCS4 builds of Python). These
+ # builds then use a 32-bit type for Py_UNICODE and store Unicode data
+ # internally as UCS4. Note that UCS2 and UCS4 Python builds are not binary
+ # compatible.
+
depends_on("openssl")
depends_on("bzip2")
depends_on("readline")
depends_on("ncurses")
depends_on("sqlite")
depends_on("zlib")
+ depends_on("tk", when="+tk")
+ depends_on("tcl", when="+tk")
+
+ patch('ncurses.patch')
+
+ _DISTUTIL_VARS_TO_SAVE = ['LDSHARED']
+ _DISTUTIL_CACHE_FILENAME = 'sysconfig.json'
+ _distutil_vars = None
+
+ @when('@2.7,3.4:')
+ def patch(self):
+ # NOTE: Python's default installation procedure makes it possible for a
+ # user's local configurations to change the Spack installation. In
+ # order to prevent this behavior for a full installation, we must
+ # modify the installation script so that it ignores user files.
+ ff = FileFilter('Makefile.pre.in')
+ ff.filter(
+ r'^(.*)setup\.py(.*)((build)|(install))(.*)$',
+ r'\1setup.py\2 --no-user-cfg \3\6'
+ )
+
+ @when('@:2.6,3.0:3.3')
+ def patch(self):
+ # See https://github.com/LLNL/spack/issues/1490
+ pass
def install(self, spec, prefix):
+ # TODO: The '--no-user-cfg' option for Python installation is only in
+ # Python v2.7 and v3.4+ (see https://bugs.python.org/issue1180) and
+ # adding support for ignoring user configuration will require
+ # significant changes to this package for other Python versions.
+ if not spec.satisfies('@2.7,3.4:'):
+ tty.warn(('Python v{0} may not install properly if Python '
+ 'user configurations are present.').format(self.version))
+
# Need this to allow python build to find the Python installation.
- env['PYTHONHOME'] = prefix
+ env['PYTHONHOME'], env['PYTHONPATH'] = prefix, prefix
env['MACOSX_DEPLOYMENT_TARGET'] = '10.6'
# Rest of install is pretty standard except setup.py needs to
# be able to read the CPPFLAGS and LDFLAGS as it scans for the
# library and headers to build
- configure_args= [
- "--prefix=%s" % prefix,
- "--with-threads",
- "--enable-shared",
- "CPPFLAGS=-I%s/include -I%s/include -I%s/include -I%s/include -I%s/include -I%s/include" % (
- spec['openssl'].prefix, spec['bzip2'].prefix,
- spec['readline'].prefix, spec['ncurses'].prefix,
- spec['sqlite'].prefix, spec['zlib'].prefix),
- "LDFLAGS=-L%s/lib -L%s/lib -L%s/lib -L%s/lib -L%s/lib -L%s/lib" % (
- spec['openssl'].prefix, spec['bzip2'].prefix,
- spec['readline'].prefix, spec['ncurses'].prefix,
- spec['sqlite'].prefix, spec['zlib'].prefix)
- ]
- if spec.satisfies('@3:'):
- configure_args.append('--without-ensurepip')
- configure(*configure_args)
- make()
- make("install")
-
- # Modify compiler paths in configuration files. This is necessary for
- # building site packages outside of spack
- filter_file(r'([/s]=?)([\S=]*)/lib/spack/env(/[^\s/]*)?/(\S*)(\s)',
- (r'\4\5'),
- join_path(prefix.lib, 'python%d.%d' % self.version[:2], '_sysconfigdata.py'))
+ dep_pfxs = [dspec.prefix for dspec in spec.dependencies('link')]
+ config_args = [
+ '--prefix={0}'.format(prefix),
+ '--with-threads',
+ '--enable-shared',
+ 'CPPFLAGS=-I{0}'.format(' -I'.join(dp.include for dp in dep_pfxs)),
+ 'LDFLAGS=-L{0}'.format(' -L'.join(dp.lib for dp in dep_pfxs)),
+ ]
+ if spec.satisfies("platform=darwin") and ('%gcc' in spec):
+ config_args.append('--disable-toolbox-glue')
+
+ if '+ucs4' in spec:
+ if spec.satisfies('@:2.7'):
+ config_args.append('--enable-unicode=ucs4')
+ elif spec.satisfies('@3.0:3.2'):
+ config_args.append('--with-wide-unicode')
+ elif spec.satisfies('@3.3:'):
+ # https://docs.python.org/3.3/whatsnew/3.3.html
+ raise ValueError(
+ '+ucs4 variant not compatible with Python 3.3 and beyond')
- python3_version = ''
if spec.satisfies('@3:'):
- python3_version = '-%d.%dm' % self.version[:2]
- makefile_filepath = join_path(prefix.lib, 'python%d.%d' % self.version[:2], 'config%s' % python3_version, 'Makefile')
- filter_file(r'([/s]=?)([\S=]*)/lib/spack/env(/[^\s/]*)?/(\S*)(\s)',
- (r'\4\5'),
- makefile_filepath)
+ config_args.append('--without-ensurepip')
+ configure(*config_args)
+ make()
+ make('install')
+
+ self._save_distutil_vars(prefix)
+
+ self.filter_compilers(prefix)
+
+ # TODO:
+ # On OpenSuse 13, python uses <prefix>/lib64/python2.7/lib-dynload/*.so
+ # instead of <prefix>/lib/python2.7/lib-dynload/*.so. Oddly enough the
+ # result is that Python can not find modules like cPickle. A workaround
+ # for now is to symlink to `lib`:
+ src = os.path.join(prefix.lib64,
+ 'python{0}'.format(self.version.up_to(2)),
+ 'lib-dynload')
+ dst = os.path.join(prefix.lib,
+ 'python{0}'.format(self.version.up_to(2)),
+ 'lib-dynload')
+ if os.path.isdir(src) and not os.path.isdir(dst):
+ mkdirp(dst)
+ for f in os.listdir(src):
+ os.symlink(os.path.join(src, f),
+ os.path.join(dst, f))
+
+ # TODO: Once better testing support is integrated, add the following tests
+ # https://wiki.python.org/moin/TkInter
+ #
+ # Note: Only works if ForwardX11Trusted is enabled, i.e. `ssh -Y`
+ #
+ # if '+tk' in spec:
+ # env['TK_LIBRARY'] = join_path(spec['tk'].prefix.lib,
+ # 'tk{0}'.format(spec['tk'].version.up_to(2)))
+ # env['TCL_LIBRARY'] = join_path(spec['tcl'].prefix.lib,
+ # 'tcl{0}'.format(spec['tcl'].version.up_to(2)))
+ #
+ # $ python
+ # >>> import _tkinter
+ #
+ # if spec.satisfies('@3:')
+ # >>> import tkinter
+ # >>> tkinter._test()
+ # else:
+ # >>> import Tkinter
+ # >>> Tkinter._test()
+
+ def _save_distutil_vars(self, prefix):
+ """
+ Run before changing automatically generated contents of the
+ _sysconfigdata.py, which is used by distutils to figure out what
+ executables to use while compiling and linking extensions. If we build
+ extensions with spack those executables should be spack's wrappers.
+ Spack partially covers this by setting environment variables that
+ are also accounted for by distutils. Currently there is one more known
+ variable that must be set, which is LDSHARED, so the method saves its
+ autogenerated value to pass it to the dependant package's setup script.
+ """
+
+ self._distutil_vars = {}
+
+ input_filename = None
+ for filename in [join_path(lib_dir,
+ 'python{0}'.format(self.version.up_to(2)),
+ '_sysconfigdata.py')
+ for lib_dir in [prefix.lib, prefix.lib64]]:
+ if os.path.isfile(filename):
+ input_filename = filename
+ break
+
+ if not input_filename:
+ return
+
+ input_dict = None
+ try:
+ with open(input_filename) as input_file:
+ match = re.search(r'build_time_vars\s*=\s*(?P<dict>{.*})',
+ input_file.read(),
+ flags=re.DOTALL)
+
+ if match:
+ input_dict = ast.literal_eval(match.group('dict'))
+ except (IOError, SyntaxError):
+ pass
+
+ if not input_dict:
+ tty.warn('Failed to find \'build_time_vars\' dictionary in file '
+ '\'%s\'. This might cause the extensions that are '
+ 'installed with distutils to call compilers directly '
+ 'avoiding Spack\'s wrappers.' % input_filename)
+ return
+
+ for var_name in Python._DISTUTIL_VARS_TO_SAVE:
+ if var_name in input_dict:
+ self._distutil_vars[var_name] = input_dict[var_name]
+ else:
+ tty.warn('Failed to find key \'%s\' in \'build_time_vars\' '
+ 'dictionary in file \'%s\'. This might cause the '
+ 'extensions that are installed with distutils to '
+ 'call compilers directly avoiding Spack\'s wrappers.'
+ % (var_name, input_filename))
+
+ if len(self._distutil_vars) > 0:
+ output_filename = None
+ try:
+ output_filename = join_path(
+ spack.store.layout.metadata_path(self.spec),
+ Python._DISTUTIL_CACHE_FILENAME)
+ with open(output_filename, 'w') as output_file:
+ sjson.dump(self._distutil_vars, output_file)
+ except:
+ tty.warn('Failed to save metadata for distutils. This might '
+ 'cause the extensions that are installed with '
+ 'distutils to call compilers directly avoiding '
+ 'Spack\'s wrappers.')
+ # We make the cache empty if we failed to save it to file
+ # to provide the same behaviour as in the case when the cache
+ # is initialized by the method load_distutils_data().
+ self._distutil_vars = {}
+ if output_filename:
+ force_remove(output_filename)
+
+ def _load_distutil_vars(self):
+ # We update and keep the cache unchanged only if the package is
+ # installed.
+ if not self._distutil_vars and self.installed:
+ try:
+ input_filename = join_path(
+ spack.store.layout.metadata_path(self.spec),
+ Python._DISTUTIL_CACHE_FILENAME)
+ if os.path.isfile(input_filename):
+ with open(input_filename) as input_file:
+ self._distutil_vars = sjson.load(input_file)
+ except:
+ pass
+
+ if not self._distutil_vars:
+ self._distutil_vars = {}
+
+ return self._distutil_vars
+
+ def filter_compilers(self, prefix):
+ """Run after install to tell the configuration files and Makefiles
+ to use the compilers that Spack built the package with.
+
+ If this isn't done, they'll have CC and CXX set to Spack's generic
+ cc and c++. We want them to be bound to whatever compiler
+ they were built with."""
+
+ kwargs = {'ignore_absent': True, 'backup': False, 'string': True}
+
+ lib_dirnames = [
+ join_path(lib_dir, 'python{0}'.format(self.version.up_to(2))) for
+ lib_dir in [prefix.lib, prefix.lib64]]
+
+ config_dirname = 'config-{0}m'.format(
+ self.version.up_to(2)) if self.spec.satisfies('@3:') else 'config'
+
+ rel_filenames = ['_sysconfigdata.py',
+ join_path(config_dirname, 'Makefile')]
+
+ abs_filenames = [join_path(dirname, filename) for dirname in
+ lib_dirnames for filename in rel_filenames]
+
+ filter_file(env['CC'], self.compiler.cc, *abs_filenames, **kwargs)
+ filter_file(env['CXX'], self.compiler.cxx, *abs_filenames, **kwargs)
# ========================================================================
# Set up environment to make install easy for python extensions.
@@ -104,57 +316,86 @@ class Python(Package):
@property
def python_lib_dir(self):
- return os.path.join('lib', 'python%d.%d' % self.version[:2])
-
+ return join_path('lib', 'python{0}'.format(self.version.up_to(2)))
@property
def python_include_dir(self):
- return os.path.join('include', 'python%d.%d' % self.version[:2])
-
+ return join_path('include', 'python{0}'.format(self.version.up_to(2)))
@property
def site_packages_dir(self):
- return os.path.join(self.python_lib_dir, 'site-packages')
-
+ return join_path(self.python_lib_dir, 'site-packages')
def setup_dependent_environment(self, spack_env, run_env, extension_spec):
- # TODO: do this only for actual extensions.
+ """Set PYTHONPATH to include site-packages dir for the
+ extension and any other python extensions it depends on."""
+ # The python executable for version 3 may be python3 or python
+ # See https://github.com/LLNL/spack/pull/2173#issuecomment-257170199
+ pythonex = 'python{0}'.format('3' if self.spec.satisfies('@3') else '')
+ if os.path.isdir(self.prefix.bin):
+ base = self.prefix.bin
+ else:
+ base = self.prefix
+ if not os.path.isfile(os.path.join(base, pythonex)):
+ if self.spec.satisfies('@3'):
+ python = Executable(os.path.join(base, 'python'))
+ version = python('-c', 'import sys; print(sys.version)',
+ output=str)
+ if version.startswith('3'):
+ pythonex = 'python'
+ else:
+ raise RuntimeError('Cannot locate python executable')
+ else:
+ raise RuntimeError('Cannot locate python executable')
+ python = Executable(os.path.join(base, pythonex))
+ prefix = python('-c', 'import sys; print(sys.prefix)', output=str)
+ spack_env.set('PYTHONHOME', prefix.strip('\n'))
- # Set PYTHONPATH to include site-packages dir for the
- # extension and any other python extensions it depends on.
python_paths = []
- for d in extension_spec.traverse():
+ for d in extension_spec.traverse(
+ deptype=('build', 'run'), deptype_query='run'):
if d.package.extends(self.spec):
- python_paths.append(os.path.join(d.prefix, self.site_packages_dir))
+ python_paths.append(join_path(d.prefix,
+ self.site_packages_dir))
pythonpath = ':'.join(python_paths)
spack_env.set('PYTHONPATH', pythonpath)
- # For run time environment set only the path for extension_spec and prepend it to PYTHONPATH
+ # For run time environment set only the path for
+ # extension_spec and prepend it to PYTHONPATH
if extension_spec.package.extends(self.spec):
- run_env.prepend_path('PYTHONPATH', os.path.join(extension_spec.prefix, self.site_packages_dir))
-
+ run_env.prepend_path('PYTHONPATH', join_path(
+ extension_spec.prefix, self.site_packages_dir))
def setup_dependent_package(self, module, ext_spec):
- """
- Called before python modules' install() methods.
+ """Called before python modules' install() methods.
In most cases, extensions will only need to have one line::
- python('setup.py', 'install', '--prefix=%s' % prefix)
- """
- # Python extension builds can have a global python executable function
- if self.version >= Version("3.0.0") and self.version < Version("4.0.0"):
- module.python = Executable(join_path(self.spec.prefix.bin, 'python3'))
- else:
- module.python = Executable(join_path(self.spec.prefix.bin, 'python'))
+ setup_py('install', '--prefix={0}'.format(prefix))"""
+ python_path = join_path(
+ self.spec.prefix.bin,
+ 'python{0}'.format('3' if self.spec.satisfies('@3') else '')
+ )
- # Add variables for lib/pythonX.Y and lib/pythonX.Y/site-packages dirs.
- module.python_lib_dir = os.path.join(ext_spec.prefix, self.python_lib_dir)
- module.python_include_dir = os.path.join(ext_spec.prefix, self.python_include_dir)
- module.site_packages_dir = os.path.join(ext_spec.prefix, self.site_packages_dir)
+ module.python = Executable(python_path)
+ module.setup_py = Executable(python_path + ' setup.py --no-user-cfg')
+
+ distutil_vars = self._load_distutil_vars()
- # Make the site packages directory for extensions, if it does not exist already.
+ if distutil_vars:
+ for key, value in distutil_vars.iteritems():
+ module.setup_py.add_default_env(key, value)
+
+ # Add variables for lib/pythonX.Y and lib/pythonX.Y/site-packages dirs.
+ module.python_lib_dir = join_path(ext_spec.prefix,
+ self.python_lib_dir)
+ module.python_include_dir = join_path(ext_spec.prefix,
+ self.python_include_dir)
+ module.site_packages_dir = join_path(ext_spec.prefix,
+ self.site_packages_dir)
+
+ # Make the site packages directory for extensions
if ext_spec.package.is_extension:
mkdirp(module.site_packages_dir)
@@ -167,25 +408,30 @@ class Python(Package):
ignore_arg = args.get('ignore', lambda f: False)
# Always ignore easy-install.pth, as it needs to be merged.
- patterns = [r'easy-install\.pth$']
+ patterns = [r'site-packages/easy-install\.pth$']
# Ignore pieces of setuptools installed by other packages.
+ # Must include directory name or it will remove all site*.py files.
if ext_pkg.name != 'py-setuptools':
- patterns.append(r'/site[^/]*\.pyc?$')
- patterns.append(r'setuptools\.pth')
- patterns.append(r'bin/easy_install[^/]*$')
- patterns.append(r'setuptools.*egg$')
+ patterns.extend([
+ r'bin/easy_install[^/]*$',
+ r'site-packages/setuptools[^/]*\.egg$',
+ r'site-packages/setuptools\.pth$',
+ r'site-packages/site[^/]*\.pyc?$',
+ r'site-packages/__pycache__/site[^/]*\.pyc?$'
+ ])
+ if ext_pkg.name != 'py-pygments':
+ patterns.append(r'bin/pygmentize$')
if ext_pkg.name != 'py-numpy':
- patterns.append(r'bin/f2py$')
+ patterns.append(r'bin/f2py3?$')
return match_predicate(ignore_arg, patterns)
-
def write_easy_install_pth(self, exts):
paths = []
for ext in sorted(exts.values()):
- ext_site_packages = os.path.join(ext.prefix, self.site_packages_dir)
- easy_pth = "%s/easy-install.pth" % ext_site_packages
+ ext_site_packages = join_path(ext.prefix, self.site_packages_dir)
+ easy_pth = join_path(ext_site_packages, "easy-install.pth")
if not os.path.isfile(easy_pth):
continue
@@ -195,15 +441,18 @@ class Python(Package):
line = line.rstrip()
# Skip lines matching these criteria
- if not line: continue
- if re.search(r'^(import|#)', line): continue
+ if not line:
+ continue
+ if re.search(r'^(import|#)', line):
+ continue
if (ext.name != 'py-setuptools' and
- re.search(r'setuptools.*egg$', line)): continue
+ re.search(r'setuptools.*egg$', line)):
+ continue
paths.append(line)
- site_packages = os.path.join(self.prefix, self.site_packages_dir)
- main_pth = "%s/easy-install.pth" % site_packages
+ site_packages = join_path(self.prefix, self.site_packages_dir)
+ main_pth = join_path(site_packages, "easy-install.pth")
if not paths:
if os.path.isfile(main_pth):
@@ -211,29 +460,36 @@ class Python(Package):
else:
with closing(open(main_pth, 'w')) as f:
- f.write("import sys; sys.__plen = len(sys.path)\n")
+ f.write("""
+import sys
+sys.__plen = len(sys.path)
+""")
for path in paths:
- f.write("%s\n" % path)
- f.write("import sys; new=sys.path[sys.__plen:]; del sys.path[sys.__plen:]; "
- "p=getattr(sys,'__egginsert',0); sys.path[p:p]=new; sys.__egginsert = p+len(new)\n")
-
+ f.write("{0}\n".format(path))
+ f.write("""
+new = sys.path[sys.__plen:]
+del sys.path[sys.__plen:]
+p = getattr(sys, '__egginsert', 0)
+sys.path[p:p] = new
+sys.__egginsert = p + len(new)
+""")
def activate(self, ext_pkg, **args):
- ignore=self.python_ignore(ext_pkg, args)
+ ignore = self.python_ignore(ext_pkg, args)
args.update(ignore=ignore)
super(Python, self).activate(ext_pkg, **args)
- exts = spack.install_layout.extension_map(self.spec)
+ exts = spack.store.layout.extension_map(self.spec)
exts[ext_pkg.name] = ext_pkg.spec
self.write_easy_install_pth(exts)
-
def deactivate(self, ext_pkg, **args):
args.update(ignore=self.python_ignore(ext_pkg, args))
super(Python, self).deactivate(ext_pkg, **args)
- exts = spack.install_layout.extension_map(self.spec)
- if ext_pkg.name in exts: # Make deactivate idempotent.
+ exts = spack.store.layout.extension_map(self.spec)
+ # Make deactivate idempotent
+ if ext_pkg.name in exts:
del exts[ext_pkg.name]
self.write_easy_install_pth(exts)
diff --git a/var/spack/repos/builtin/packages/qhull/package.py b/var/spack/repos/builtin/packages/qhull/package.py
index 280c9cc12c..4456c16bd2 100644
--- a/var/spack/repos/builtin/packages/qhull/package.py
+++ b/var/spack/repos/builtin/packages/qhull/package.py
@@ -24,7 +24,8 @@
##############################################################################
from spack import *
-class Qhull(Package):
+
+class Qhull(CMakePackage):
"""Qhull computes the convex hull, Delaunay triangulation, Voronoi
diagram, halfspace intersection about a point, furt hest-site
Delaunay triangulation, and furthest-site Voronoi diagram. The
@@ -36,19 +37,10 @@ class Qhull(Package):
homepage = "http://www.qhull.org"
- version('7.2.0', 'e6270733a826a6a7c32b796e005ec3dc',
+ version('2015.2', 'e6270733a826a6a7c32b796e005ec3dc',
url="http://www.qhull.org/download/qhull-2015-src-7.2.0.tgz")
- version('1.0', 'd0f978c0d8dfb2e919caefa56ea2953c',
+ version('2012.1', 'd0f978c0d8dfb2e919caefa56ea2953c',
url="http://www.qhull.org/download/qhull-2012.1-src.tgz")
- # https://github.com/qhull/qhull/pull/5
- patch('qhull-iterator.patch', when='@1.0')
-
- depends_on('cmake')
-
- def install(self, spec, prefix):
- with working_dir('spack-build', create=True):
- cmake('..', *std_cmake_args)
- make()
- make("install")
+ depends_on('cmake@2.6:', type='build')
diff --git a/var/spack/repos/builtin/packages/qhull/qhull-iterator.patch b/var/spack/repos/builtin/packages/qhull/qhull-iterator.patch
deleted file mode 100644
index 88e931d84f..0000000000
--- a/var/spack/repos/builtin/packages/qhull/qhull-iterator.patch
+++ /dev/null
@@ -1,45 +0,0 @@
-From 93f4b306c54bb5be7724dcc19c6e747b62ac76dd Mon Sep 17 00:00:00 2001
-From: Ben Boeckel <mathstuf@gmail.com>
-Date: Thu, 28 May 2015 11:12:25 -0400
-Subject: [PATCH] iterator: use the header
-
-Standard libraries are doing funky things with inline namespaces which
-make these declarations impossible to get right. Just include the
-header.
----
- src/libqhullcpp/QhullIterator.h | 3 +--
- src/libqhullcpp/QhullLinkedList.h | 5 +----
- 2 files changed, 2 insertions(+), 6 deletions(-)
-
-diff --git a/src/libqhullcpp/QhullIterator.h b/src/libqhullcpp/QhullIterator.h
-index 9dde894..49f3a3b 100644
---- a/src/libqhullcpp/QhullIterator.h
-+++ b/src/libqhullcpp/QhullIterator.h
-@@ -14,10 +14,9 @@ extern "C" {
- }
-
- #include <assert.h>
-+#include <iterator>
- #include <string>
- #include <vector>
--//! Avoid dependence on <iterator>
--namespace std { struct bidirectional_iterator_tag; struct random_access_iterator_tag; }
-
- namespace orgQhull {
-
-diff --git a/src/libqhullcpp/QhullLinkedList.h b/src/libqhullcpp/QhullLinkedList.h
-index d828ac6..00b9008 100644
---- a/src/libqhullcpp/QhullLinkedList.h
-+++ b/src/libqhullcpp/QhullLinkedList.h
-@@ -9,10 +9,7 @@
- #ifndef QHULLLINKEDLIST_H
- #define QHULLLINKEDLIST_H
-
--namespace std {
-- struct bidirectional_iterator_tag;
-- struct random_access_iterator_tag;
--}//std
-+#include <iterator>
-
- #include "QhullError.h"
- extern "C" {
diff --git a/var/spack/repos/builtin/packages/qrupdate/package.py b/var/spack/repos/builtin/packages/qrupdate/package.py
index 314950dc36..f6b4c80cf4 100644
--- a/var/spack/repos/builtin/packages/qrupdate/package.py
+++ b/var/spack/repos/builtin/packages/qrupdate/package.py
@@ -24,6 +24,7 @@
##############################################################################
from spack import *
+
class Qrupdate(Package):
"""qrupdate is a Fortran library for fast updates of QR and
Cholesky decompositions."""
diff --git a/var/spack/repos/builtin/packages/qt-creator/package.py b/var/spack/repos/builtin/packages/qt-creator/package.py
new file mode 100644
index 0000000000..347cf4d6ee
--- /dev/null
+++ b/var/spack/repos/builtin/packages/qt-creator/package.py
@@ -0,0 +1,46 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+import os
+
+
+class QtCreator(Package):
+ """The Qt Creator IDE."""
+ homepage = 'https://www.qt.io/ide/'
+ url = 'http://download.qt.io/official_releases/qtcreator/4.1/4.1.0/qt-creator-opensource-src-4.1.0.tar.gz'
+
+ list_url = 'http://download.qt.io/official_releases/qtcreator/'
+ list_depth = 3
+
+ version('4.1.0', '657727e4209befa4bf5889dff62d9e0a')
+
+ depends_on("qt")
+
+ def install(self, spec, prefix):
+ os.environ['INSTALL_ROOT'] = self.prefix
+ qmake = which('qmake')
+ qmake('-r')
+ make()
+ make("install")
diff --git a/var/spack/repos/builtin/packages/qt/btn_trigger_happy.patch b/var/spack/repos/builtin/packages/qt/btn_trigger_happy.patch
new file mode 100644
index 0000000000..e6a27d5fab
--- /dev/null
+++ b/var/spack/repos/builtin/packages/qt/btn_trigger_happy.patch
@@ -0,0 +1,17 @@
+--- a/qtgamepad/src/plugins/gamepads/evdev/qevdevgamepadbackend.cpp 2016-08-08 11:34:44.517184658 -0500
++++ b/qtgamepad/src/plugins/gamepads/evdev/qevdevgamepadbackend.cpp 2016-08-08 11:36:42.371995567 -0500
+@@ -262,10 +262,10 @@
+ m_buttonsMap[BTN_TR2] = QGamepadManager::ButtonR2;
+ m_buttonsMap[BTN_THUMB] = m_buttonsMap[BTN_THUMBL] = QGamepadManager::ButtonL3;
+ m_buttonsMap[BTN_THUMBR] = QGamepadManager::ButtonR3;
+- m_buttonsMap[BTN_TRIGGER_HAPPY1] = QGamepadManager::ButtonLeft;
+- m_buttonsMap[BTN_TRIGGER_HAPPY2] = QGamepadManager::ButtonRight;
+- m_buttonsMap[BTN_TRIGGER_HAPPY3] = QGamepadManager::ButtonUp;
+- m_buttonsMap[BTN_TRIGGER_HAPPY4] = QGamepadManager::ButtonDown;
++ m_buttonsMap[0x2c0] = QGamepadManager::ButtonLeft;
++ m_buttonsMap[0x2c1] = QGamepadManager::ButtonRight;
++ m_buttonsMap[0x2c2] = QGamepadManager::ButtonUp;
++ m_buttonsMap[0x2c3] = QGamepadManager::ButtonDown;
+
+ if (m_productId)
+ m_backend->saveSettings(m_productId, QVariant());
diff --git a/var/spack/repos/builtin/packages/qt/package.py b/var/spack/repos/builtin/packages/qt/package.py
index 0c91a5ce87..95698018fa 100644
--- a/var/spack/repos/builtin/packages/qt/package.py
+++ b/var/spack/repos/builtin/packages/qt/package.py
@@ -24,11 +24,17 @@
##############################################################################
from spack import *
import os
+import sys
+
class Qt(Package):
"""Qt is a comprehensive cross-platform C++ application framework."""
homepage = 'http://qt.io'
+ url = 'http://download.qt.io/archive/qt/5.7/5.7.0/single/qt-everywhere-opensource-src-5.7.0.tar.gz'
+ list_url = 'http://download.qt.io/archive/qt/'
+ list_depth = 4
+ version('5.7.0', '9a46cce61fc64c20c3ac0a0e0fa41b42')
version('5.5.1', '59f0216819152b77536cf660b015d784')
version('5.4.2', 'fa1c4d819b401b267eb246a543a63ea5')
version('5.4.0', 'e8654e4b37dd98039ba20da7a53877e6')
@@ -37,91 +43,123 @@ class Qt(Package):
version('4.8.6', '2edbe4d6c2eff33ef91732602f3518eb')
version('3.3.8b', '9f05b4125cfe477cc52c9742c3c09009')
- # Add patch for compile issues with qt3 found with use in the OpenSpeedShop project
+ # Add patch for compile issues with qt3 found with use in the
+ # OpenSpeedShop project
variant('krellpatch', default=False, description="Build with openspeedshop based patch.")
variant('mesa', default=False, description="Depend on mesa.")
variant('gtk', default=False, description="Build with gtkplus.")
+ variant('webkit', default=False, description="Build the Webkit extension")
+ variant('examples', default=False, description="Build examples.")
+ variant('dbus', default=False, description="Build with D-Bus support.")
+ variant('phonon', default=False, description="Build with phonon support.")
patch('qt3krell.patch', when='@3.3.8b+krellpatch')
- # Use system openssl for security.
- #depends_on("openssl")
+ # https://github.com/xboxdrv/xboxdrv/issues/188
+ patch('btn_trigger_happy.patch', when='@5.7.0:')
+
+ patch('qt4-corewlan-new-osx.patch', when='@4')
+ patch('qt4-pcre-include-conflict.patch', when='@4')
+ patch('qt4-el-capitan.patch', when='@4')
- depends_on("glib")
+ # Use system openssl for security.
+ depends_on("openssl")
+ depends_on("glib", when='@4:')
depends_on("gtkplus", when='+gtk')
depends_on("libxml2")
depends_on("zlib")
- depends_on("dbus", when='@4:')
+ depends_on("dbus", when='@4:+dbus')
depends_on("libtiff")
- depends_on("libpng@1.2.56", when='@3')
+ depends_on("libpng@1.2.57", when='@3')
depends_on("libpng", when='@4:')
depends_on("libmng")
depends_on("jpeg")
-
- # Webkit
- # depends_on("gperf")
- # depends_on("flex")
- # depends_on("bison")
- # depends_on("ruby")
- # depends_on("icu4c")
+ depends_on("icu4c")
# OpenGL hardware acceleration
depends_on("mesa", when='@4:+mesa')
- depends_on("libxcb")
+ depends_on("libxcb", when=sys.platform != 'darwin')
+
+ # Webkit
+ depends_on("flex", when='+webkit', type='build')
+ depends_on("bison", when='+webkit', type='build')
+ depends_on("gperf", when='+webkit')
+ depends_on("fontconfig", when='+webkit')
+
+ # Multimedia
+ # depends_on("gstreamer", when='+multimedia')
+ # depends_on("pulse", when='+multimedia')
+ # depends_on("flac", when='+multimedia')
+ # depends_on("ogg", when='+multimedia')
+ use_xcode = True
def url_for_version(self, version):
- url = "http://download.qt.io/archive/qt/"
+ # URL keeps getting more complicated with every release
+ url = self.list_url
+
+ if version >= Version('4.0'):
+ url += version.up_to(2) + '/'
+ else:
+ url += version.up_to(1) + '/'
+
+ if version >= Version('4.8'):
+ url += str(version) + '/'
if version >= Version('5'):
- url += "%s/%s/single/qt-everywhere-opensource-src-%s.tar.gz" % \
- (version.up_to(2), version, version)
- elif version >= Version('4.8'):
- url += "%s/%s/qt-everywhere-opensource-src-%s.tar.gz" % \
- (version.up_to(2), version, version)
- elif version >= Version('4.6'):
- url += "%s/qt-everywhere-opensource-src-%s.tar.gz" % \
- (version.up_to(2), version)
- elif version >= Version('4.0'):
- url += "%s/qt-x11-opensource-src-%s.tar.gz" % \
- (version.up_to(2), version)
- elif version >= Version('3'):
- url += "%s/qt-x11-free-%s.tar.gz" % \
- (version.up_to(1), version)
+ url += 'single/'
+
+ url += 'qt-'
+
+ if version >= Version('4.6'):
+ url += 'everywhere-'
elif version >= Version('2.1'):
- url += "%s/qt-x11-%s.tar.gz" % \
- (version.up_to(1), version)
- else:
- url += "%s/qt-%s.tar.gz" % \
- (version.up_to(1), version)
+ url += 'x11-'
- return url
+ if version >= Version('4.0'):
+ url += 'opensource-src-'
+ elif version >= Version('3'):
+ url += 'free-'
+ url += str(version) + '.tar.gz'
- def setup_environment(self, spack_env, env):
- env.set('QTDIR', self.prefix)
+ return url
+ def setup_environment(self, spack_env, run_env):
+ run_env.set('QTDIR', self.prefix)
def setup_dependent_environment(self, spack_env, run_env, dspec):
spack_env.set('QTDIR', self.prefix)
+ def setup_dependent_package(self, module, ext_spec):
+ module.qmake = Executable(join_path(self.spec.prefix.bin, 'qmake'))
def patch(self):
if self.spec.satisfies('@4'):
- qmake_conf = 'mkspecs/common/g++-base.conf'
- qmake_unix_conf = 'mkspecs/common/g++-unix.conf'
- elif self.spec.satisfies('@5'):
- qmake_conf = 'qtbase/mkspecs/common/g++-base.conf'
- qmake_unix_conf = 'qtbase/mkspecs/common/g++-unix.conf'
- else:
- return
+ # Fix qmake compilers in the default mkspec
+ filter_file('^QMAKE_CC .*', 'QMAKE_CC = cc',
+ 'mkspecs/common/g++-base.conf')
+ filter_file('^QMAKE_CXX .*', 'QMAKE_CXX = c++',
+ 'mkspecs/common/g++-base.conf')
+
+ # Necessary to build with GCC 6 and other modern compilers
+ # http://stackoverflow.com/questions/10354371/
+ filter_file('(^QMAKE_CXXFLAGS .*)', r'\1 -std=gnu++98',
+ 'mkspecs/common/gcc-base.conf')
- # Fix qmake compilers in the default mkspec
- filter_file(r'^QMAKE_COMPILER *=.*$', 'QMAKE_COMPILER = cc', qmake_conf)
- filter_file(r'^QMAKE_CC *=.*$', 'QMAKE_CC = cc', qmake_conf)
- filter_file(r'^QMAKE_CXX *=.*$', 'QMAKE_CXX = c++', qmake_conf)
- filter_file(r'^QMAKE_LFLAGS_NOUNDEF *\+?=.*$', 'QMAKE_LFLAGS_NOUNDEF =', qmake_unix_conf)
+ filter_file('^QMAKE_LFLAGS_NOUNDEF .*', 'QMAKE_LFLAGS_NOUNDEF = ',
+ 'mkspecs/common/g++-unix.conf')
+ elif self.spec.satisfies('@5:'):
+ # Fix qmake compilers in the default mkspec
+ filter_file('^QMAKE_COMPILER .*', 'QMAKE_COMPILER = cc',
+ 'qtbase/mkspecs/common/g++-base.conf')
+ filter_file('^QMAKE_CC .*', 'QMAKE_CC = cc',
+ 'qtbase/mkspecs/common/g++-base.conf')
+ filter_file('^QMAKE_CXX .*', 'QMAKE_CXX = c++',
+ 'qtbase/mkspecs/common/g++-base.conf')
+ filter_file('^QMAKE_LFLAGS_NOUNDEF .*', 'QMAKE_LFLAGS_NOUNDEF = ',
+ 'qtbase/mkspecs/common/g++-unix.conf')
@property
def common_config_args(self):
@@ -134,7 +172,6 @@ class Qt(Package):
'-shared',
'-confirm-license',
'-openssl-linked',
- '-dbus-linked',
'-optimized-qmake',
'-no-openvg',
'-no-pch',
@@ -142,10 +179,45 @@ class Qt(Package):
'-no-nis'
]
- if '+gtk' in self.spec:
- config_args.append('-gtkstyle')
+ if '~examples' in self.spec:
+ config_args.extend(['-nomake', 'examples'])
+
+ if '@4' in self.spec and '~phonon' in self.spec:
+ config_args.append('-no-phonon')
+
+ if '+dbus' in self.spec:
+ config_args.append('-dbus-linked')
else:
- config_args.append('-no-gtkstyle')
+ config_args.append('-no-dbus')
+
+ if '@5:' in self.spec and sys.platform == 'darwin':
+ config_args.extend([
+ '-no-xinput2',
+ '-no-xcb-xlib',
+ '-no-pulseaudio',
+ '-no-alsa',
+ ])
+
+ if '@4' in self.spec and sys.platform == 'darwin':
+ sdkpath = which('xcrun')('--show-sdk-path',
+ # XXX(macos): 10.11 SDK fails to configure
+ '--sdk', 'macosx10.9',
+ output=str)
+ config_args.extend([
+ '-sdk', sdkpath.strip(),
+ ])
+ use_clang_platform = False
+ if self.spec.compiler.name == 'clang' and \
+ str(self.spec.compiler.version).endwith('-apple'):
+ use_clang_platform = True
+ # No one uses gcc-4.2.1 anymore; this is clang.
+ if self.spec.compiler.name == 'gcc' and \
+ str(self.spec.compiler.version) == '4.2.1':
+ use_clang_platform = True
+ if use_clang_platform:
+ config_args.extend([
+ '-platform', 'unsupported/macx-clang',
+ ])
return config_args
@@ -154,8 +226,8 @@ class Qt(Package):
@when('@3')
def configure(self):
- # An user report that this was necessary to link Qt3 on ubuntu
- os.environ['LD_LIBRARY_PATH'] = os.getcwd()+'/lib'
+ # A user reported that this was necessary to link Qt3 on ubuntu
+ os.environ['LD_LIBRARY_PATH'] = os.getcwd() + '/lib'
configure('-prefix', self.prefix,
'-v',
'-thread',
@@ -166,19 +238,37 @@ class Qt(Package):
@when('@4')
def configure(self):
configure('-fast',
- '-no-webkit',
+ '-{0}gtkstyle'.format('' if '+gtk' in self.spec else 'no-'),
+ '-{0}webkit'.format('' if '+webkit' in self.spec else 'no-'),
+ '-arch', str(self.spec.architecture.target),
*self.common_config_args)
-
- @when('@5')
+ @when('@5.0:5.6')
def configure(self):
+ webkit_args = [] if '+webkit' in self.spec else ['-skip', 'qtwebkit']
configure('-no-eglfs',
'-no-directfb',
- '-qt-xcb',
- # If someone wants to get a webkit build working, be my guest!
- '-skip', 'qtwebkit',
- *self.common_config_args)
+ '-{0}gtkstyle'.format('' if '+gtk' in self.spec else 'no-'),
+ *(webkit_args + self.common_config_args))
+
+ @when('@5.7:')
+ def configure(self):
+ config_args = self.common_config_args
+ if not sys.platform == 'darwin':
+ config_args.extend([
+ '-qt-xcb',
+ ])
+
+ if '~webkit' in self.spec:
+ config_args.extend([
+ '-skip', 'webengine',
+ ])
+
+ configure('-no-eglfs',
+ '-no-directfb',
+ '-{0}gtk'.format('' if '+gtk' in self.spec else 'no-'),
+ *config_args)
def install(self, spec, prefix):
self.configure()
diff --git a/var/spack/repos/builtin/packages/qt/qt4-corewlan-new-osx.patch b/var/spack/repos/builtin/packages/qt/qt4-corewlan-new-osx.patch
new file mode 100644
index 0000000000..370edf7fa5
--- /dev/null
+++ b/var/spack/repos/builtin/packages/qt/qt4-corewlan-new-osx.patch
@@ -0,0 +1,16 @@
+diff -Nr -U5 qt-everywhere-opensource-src-4.8.6/src/plugins/bearer/corewlan/corewlan.pro qt-everywhere-opensource-src-4.8.6.corewlan-new-osx/src/plugins/bearer/corewlan/corewlan.pro
+--- qt-everywhere-opensource-src-4.8.6/src/plugins/bearer/corewlan/corewlan.pro 2014-04-10 14:37:12.000000000 -0400
++++ qt-everywhere-opensource-src-4.8.6.corewlan-new-osx/src/plugins/bearer/corewlan/corewlan.pro 2015-12-02 12:21:34.608585392 -0500
+@@ -3,11 +3,11 @@
+
+ QT = core network
+ LIBS += -framework Foundation -framework SystemConfiguration
+
+ contains(QT_CONFIG, corewlan) {
+- isEmpty(QMAKE_MAC_SDK)|contains(QMAKE_MAC_SDK, "/Developer/SDKs/MacOSX10\.[67]\.sdk") {
++ isEmpty(QMAKE_MAC_SDK)|contains(QMAKE_MAC_SDK, ".*MacOSX10\.([6789]|1[01])\.sdk") {
+ LIBS += -framework CoreWLAN -framework Security
+ }
+ }
+
+ HEADERS += qcorewlanengine.h \
diff --git a/var/spack/repos/builtin/packages/qt/qt4-el-capitan.patch b/var/spack/repos/builtin/packages/qt/qt4-el-capitan.patch
new file mode 100644
index 0000000000..35f154d3b0
--- /dev/null
+++ b/var/spack/repos/builtin/packages/qt/qt4-el-capitan.patch
@@ -0,0 +1,31 @@
+From 27aa46933bb32a88c310fe5918a49a3f34d65dfe Mon Sep 17 00:00:00 2001
+From: Mike McQuaid <mike@mikemcquaid.com>
+Date: Sun, 13 Sep 2015 11:55:59 +0100
+Subject: [PATCH] Fix El Capitan build.
+
+---
+ src/gui/painting/qpaintengine_mac.cpp | 8 +-------
+ 1 file changed, 1 insertion(+), 7 deletions(-)
+
+diff --git a/src/gui/painting/qpaintengine_mac.cpp b/src/gui/painting/qpaintengine_mac.cpp
+index 4aa0668..63b646d 100644
+--- a/src/gui/painting/qpaintengine_mac.cpp
++++ b/src/gui/painting/qpaintengine_mac.cpp
+@@ -340,13 +340,7 @@ CGColorSpaceRef QCoreGraphicsPaintEngine::macDisplayColorSpace(const QWidget *wi
+ }
+
+ // Get the color space from the display profile.
+- CGColorSpaceRef colorSpace = 0;
+- CMProfileRef displayProfile = 0;
+- CMError err = CMGetProfileByAVID((CMDisplayIDType)displayID, &displayProfile);
+- if (err == noErr) {
+- colorSpace = CGColorSpaceCreateWithPlatformColorSpace(displayProfile);
+- CMCloseProfile(displayProfile);
+- }
++ CGColorSpaceRef colorSpace = CGDisplayCopyColorSpace(displayID);
+
+ // Fallback: use generic DeviceRGB
+ if (colorSpace == 0)
+--
+2.3.8 (Apple Git-58)
+
diff --git a/var/spack/repos/builtin/packages/qt/qt4-pcre-include-conflict.patch b/var/spack/repos/builtin/packages/qt/qt4-pcre-include-conflict.patch
new file mode 100644
index 0000000000..854e564bfb
--- /dev/null
+++ b/var/spack/repos/builtin/packages/qt/qt4-pcre-include-conflict.patch
@@ -0,0 +1,16 @@
+diff -U5 -Nru qt-everywhere-opensource-src-4.8.6/src/3rdparty/javascriptcore/JavaScriptCore/yarr/RegexJIT.h qt-everywhere-opensource-src-4.8.6.pcre/src/3rdparty/javascriptcore/JavaScriptCore/yarr/RegexJIT.h
+--- qt-everywhere-opensource-src-4.8.6/src/3rdparty/javascriptcore/JavaScriptCore/yarr/RegexJIT.h 2014-04-10 14:37:12.000000000 -0400
++++ qt-everywhere-opensource-src-4.8.6.pcre/src/3rdparty/javascriptcore/JavaScriptCore/yarr/RegexJIT.h 2016-08-29 15:30:02.216546252 -0400
+@@ -32,11 +32,11 @@
+
+ #include "MacroAssembler.h"
+ #include "RegexPattern.h"
+ #include <UString.h>
+
+-#include <pcre.h>
++#include <pcre/pcre.h>
+ struct JSRegExp; // temporary, remove when fallback is removed.
+
+ #if CPU(X86) && !COMPILER(MSVC)
+ #define YARR_CALL __attribute__ ((regparm (3)))
+ #else
diff --git a/var/spack/repos/builtin/packages/qthreads/ldflags.patch b/var/spack/repos/builtin/packages/qthreads/ldflags.patch
deleted file mode 100644
index 0c15eab386..0000000000
--- a/var/spack/repos/builtin/packages/qthreads/ldflags.patch
+++ /dev/null
@@ -1,11 +0,0 @@
---- a/configure
-+++ b/configure
-@@ -40456,7 +40456,7 @@
- hwloc_saved_LDFLAGS="$LDFLAGS"
- if test "x$with_hwloc" != x; then
- CPPFLAGS="-I$with_hwloc/include $CPPFLAGS"
-- LDFLAGS="-L$with_hwloc/lib $CPPFLAGS"
-+ LDFLAGS="-L$with_hwloc/lib $LDFLAGS"
- fi
-
-
diff --git a/var/spack/repos/builtin/packages/qthreads/package.py b/var/spack/repos/builtin/packages/qthreads/package.py
index 47b5706063..aa2f0ac67a 100644
--- a/var/spack/repos/builtin/packages/qthreads/package.py
+++ b/var/spack/repos/builtin/packages/qthreads/package.py
@@ -24,6 +24,7 @@
##############################################################################
from spack import *
+
class Qthreads(Package):
"""The qthreads API is designed to make using large numbers of
threads convenient and easy, and to allow portable access to
@@ -36,16 +37,20 @@ class Qthreads(Package):
either full or empty, and a thread can wait for any word to
attain either state."""
homepage = "http://www.cs.sandia.gov/qthreads/"
- url = "https://qthreads.googlecode.com/files/qthread-1.10.tar.bz2"
- version('1.10', '5af8c8bbe88c2a6d45361643780d1671')
+ url = "https://github.com/Qthreads/qthreads/releases/download/1.10/qthread-1.10.tar.bz2"
+ version("1.11", "68b5f9a41cfd1a2ac112cc4db0612326")
+ version("1.10", "d1cf3cf3f30586921359f7840171e551")
+
+ patch("restrict.patch", when="@:1.10")
+ patch("trap.patch", when="@:1.10")
- patch("ldflags.patch")
- patch("restrict.patch")
- patch("trap.patch")
+ depends_on("hwloc")
def install(self, spec, prefix):
configure("--prefix=%s" % prefix,
- "--enable-guard-pages")
+ "--enable-guard-pages",
+ "--with-topology=hwloc",
+ "--with-hwloc=%s" % spec["hwloc"].prefix)
make()
make("install")
diff --git a/var/spack/repos/builtin/packages/r-abind/package.py b/var/spack/repos/builtin/packages/r-abind/package.py
index 34a1eee79f..81fa319a90 100644
--- a/var/spack/repos/builtin/packages/r-abind/package.py
+++ b/var/spack/repos/builtin/packages/r-abind/package.py
@@ -24,7 +24,8 @@
##############################################################################
from spack import *
-class RAbind(Package):
+
+class RAbind(RPackage):
"""Combine multidimensional arrays into a single array. This is a
generalization of 'cbind' and 'rbind'. Works with vectors, matrices, and
higher-dimensional arrays. Also provides functions 'adrop', 'asub', and
@@ -32,11 +33,6 @@ class RAbind(Package):
homepage = "https://cran.r-project.org/"
url = "https://cran.r-project.org/src/contrib/abind_1.4-3.tar.gz"
+ list_url = "https://cran.r-project.org/src/contrib/Archive/abind"
- version('1.4-3', '10fcf80c677b991bf263d38be35a1fc5', expand=False)
-
- extends('R')
-
- def install(self, spec, prefix):
-
- R('CMD', 'INSTALL', '--library=%s' % self.module.r_lib_dir, '%s' % self.stage.archive_file)
+ version('1.4-3', '10fcf80c677b991bf263d38be35a1fc5')
diff --git a/var/spack/repos/builtin/packages/r-assertthat/package.py b/var/spack/repos/builtin/packages/r-assertthat/package.py
new file mode 100644
index 0000000000..97c29b4a99
--- /dev/null
+++ b/var/spack/repos/builtin/packages/r-assertthat/package.py
@@ -0,0 +1,37 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class RAssertthat(RPackage):
+ """assertthat is an extension to stopifnot() that makes it easy to declare
+ the pre and post conditions that you code should satisfy, while also
+ producing friendly error messages so that your users know what they've done
+ wrong."""
+
+ homepage = "https://cran.r-project.org/web/packages/assertthat/index.html"
+ url = "https://cran.r-project.org/src/contrib/assertthat_0.1.tar.gz"
+
+ version('0.1', '59f9d7f7c00077ea54d763b78eeb5798')
diff --git a/var/spack/repos/builtin/packages/r-base64enc/package.py b/var/spack/repos/builtin/packages/r-base64enc/package.py
new file mode 100644
index 0000000000..698e27a29e
--- /dev/null
+++ b/var/spack/repos/builtin/packages/r-base64enc/package.py
@@ -0,0 +1,36 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class RBase64enc(RPackage):
+ """This package provides tools for handling base64 encoding. It is more
+ flexible than the orphaned base64 package."""
+
+ homepage = "http://www.rforge.net/base64enc"
+ url = "https://cran.r-project.org/src/contrib/base64enc_0.1-3.tar.gz"
+ list_url = "https://cran.r-project.org/src/contrib/Archive/base64enc"
+
+ version('0.1-3', '0f476dacdd11a3e0ad56d13f5bc2f190')
diff --git a/var/spack/repos/builtin/packages/r-bh/package.py b/var/spack/repos/builtin/packages/r-bh/package.py
new file mode 100644
index 0000000000..683ba24d86
--- /dev/null
+++ b/var/spack/repos/builtin/packages/r-bh/package.py
@@ -0,0 +1,48 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class RBh(RPackage):
+ """Boost provides free peer-reviewed portable C++ source libraries. A large
+ part of Boost is provided as C++ template code which is resolved entirely
+ at compile-time without linking. This package aims to provide the most
+ useful subset of Boost libraries for template use among CRAN package. By
+ placing these libraries in this package, we offer a more efficient
+ distribution system for CRAN as replication of this code in the sources of
+ other packages is avoided. As of release 1.60.0-2, the following Boost
+ libraries are included: 'algorithm' 'any' 'bimap' 'bind' 'circular_buffer'
+ 'concept' 'config' 'container' 'date'_'time' 'detail' 'dynamic_bitset'
+ 'exception' 'filesystem' 'flyweight' 'foreach' 'functional' 'fusion'
+ 'geometry' 'graph' 'heap' 'icl' 'integer' 'interprocess' 'intrusive' 'io'
+ 'iostreams' 'iterator' 'math' 'move' 'mpl' 'multiprcecision' 'numeric'
+ 'pending' 'phoenix' 'preprocessor' 'random' 'range' 'smart_ptr' 'spirit'
+ 'tuple' 'type_trains' 'typeof' 'unordered' 'utility' 'uuid'."""
+
+ homepage = "https://cran.r-project.org/web/packages/BH/index.html"
+ url = "https://cran.r-project.org/src/contrib/BH_1.60.0-2.tar.gz"
+ list_url = "https://cran.r-project.org/src/contrib/Archive/BH"
+
+ version('1.60.0-2', 'b50fdc85285da05add4e9da664a2d551')
diff --git a/var/spack/repos/builtin/packages/r-BiocGenerics/package.py b/var/spack/repos/builtin/packages/r-biocgenerics/package.py
index 0d3b6a3e57..654e7f1b2a 100644
--- a/var/spack/repos/builtin/packages/r-BiocGenerics/package.py
+++ b/var/spack/repos/builtin/packages/r-biocgenerics/package.py
@@ -24,15 +24,17 @@
##############################################################################
from spack import *
-class RBiocgenerics(Package):
- """S4 generic functions needed by many Bioconductor packages."""
-
- homepage = 'https://www.bioconductor.org/packages/release/bioc/html/BiocGenerics.html'
- url = "https://www.bioconductor.org/packages/release/bioc/src/contrib/BiocGenerics_0.16.1.tar.gz"
- version('0.16.1', 'c2148ffd86fc6f1f819c7f68eb2c744f', expand=False)
+class RBiocgenerics(RPackage):
+ """S4 generic functions needed by many Bioconductor packages."""
- extends('R')
+ homepage = 'https://bioconductor.org/packages/BiocGenerics/'
+ version('3.3',
+ git='https://github.com/Bioconductor-mirror/BiocGenerics.git',
+ branch='release-3.3')
+ version('3.2',
+ git='https://github.com/Bioconductor-mirror/BiocGenerics.git',
+ branch='release-3.2')
- def install(self, spec, prefix):
- R('CMD', 'INSTALL', '--library=%s' % self.module.r_lib_dir, '%s' % self.stage.archive_file)
+ depends_on('r@3.3.0:3.3.9', when='@3.3')
+ depends_on('r@3.2.0:3.2.9', when='@3.2')
diff --git a/var/spack/repos/builtin/packages/r-bitops/package.py b/var/spack/repos/builtin/packages/r-bitops/package.py
new file mode 100644
index 0000000000..67bb0fe777
--- /dev/null
+++ b/var/spack/repos/builtin/packages/r-bitops/package.py
@@ -0,0 +1,36 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+
+from spack import *
+
+
+class RBitops(RPackage):
+ """Functions for bitwise operations on integer vectors."""
+
+ homepage = "https://cran.r-project.org/package=bitops"
+ url = "https://cran.r-project.org/src/contrib/bitops_1.0-6.tar.gz"
+ list_url = "https://cran.r-project.org/src/contrib/Archive/bitops"
+
+ version('1.0-6', 'fba16485a51b1ccd354abde5816b6bdd')
diff --git a/var/spack/repos/builtin/packages/r-boot/package.py b/var/spack/repos/builtin/packages/r-boot/package.py
new file mode 100644
index 0000000000..1361920673
--- /dev/null
+++ b/var/spack/repos/builtin/packages/r-boot/package.py
@@ -0,0 +1,37 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class RBoot(RPackage):
+ """Functions and datasets for bootstrapping from the book "Bootstrap
+ Methods and Their Application" by A. C. Davison and D. V. Hinkley (1997,
+ CUP), originally written by Angelo Canty for S."""
+
+ homepage = "https://cran.r-project.org/package=boot"
+ url = "https://cran.r-project.org/src/contrib/boot_1.3-18.tar.gz"
+ list_url = "https://cran.r-project.org/src/contrib/Archive/boot"
+
+ version('1.3-18', '711dd58af14e1027eb8377d9202e9b6f')
diff --git a/var/spack/repos/builtin/packages/r-brew/package.py b/var/spack/repos/builtin/packages/r-brew/package.py
new file mode 100644
index 0000000000..558d830a2b
--- /dev/null
+++ b/var/spack/repos/builtin/packages/r-brew/package.py
@@ -0,0 +1,37 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class RBrew(RPackage):
+ """brew implements a templating framework for mixing text and R code for
+ report generation. brew template syntax is similar to PHP, Ruby's erb
+ module, Java Server Pages, and Python's psp module."""
+
+ homepage = "https://cran.r-project.org/package=brew"
+ url = "https://cran.r-project.org/src/contrib/brew_1.0-6.tar.gz"
+ list_url = "https://cran.r-project.org/src/contrib/Archive/brew"
+
+ version('1.0-6', '4aaca5e6ec145e0fc0fe6375ce1f3806')
diff --git a/var/spack/repos/builtin/packages/r-c50/package.py b/var/spack/repos/builtin/packages/r-c50/package.py
new file mode 100644
index 0000000000..571f8f461b
--- /dev/null
+++ b/var/spack/repos/builtin/packages/r-c50/package.py
@@ -0,0 +1,38 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+
+from spack import *
+
+
+class RC50(RPackage):
+ """C5.0 decision trees and rule-based models for pattern recognition."""
+
+ homepage = "https://cran.r-project.org/package=C50"
+ url = "https://cran.r-project.org/src/contrib/C50_0.1.0-24.tar.gz"
+ list_url = "https://cran.r-project.org/src/contrib/Archive/C50"
+
+ version('0.1.0-24', '42631e65c5c579532cc6edf5ea175949')
+
+ depends_on('r-partykit', type=('build','run'))
diff --git a/var/spack/repos/builtin/packages/r-car/package.py b/var/spack/repos/builtin/packages/r-car/package.py
new file mode 100644
index 0000000000..80a0206a8a
--- /dev/null
+++ b/var/spack/repos/builtin/packages/r-car/package.py
@@ -0,0 +1,42 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class RCar(RPackage):
+ """Functions and Datasets to Accompany J. Fox and S. Weisberg, An R
+ Companion to Applied Regression, Second Edition, Sage, 2011."""
+
+ homepage = "https://r-forge.r-project.org/projects/car/"
+ url = "https://cran.r-project.org/src/contrib/car_2.1-2.tar.gz"
+ list_url = "https://cran.r-project.org/src/contrib/Archive/car"
+
+ version('2.1-2', '0f78ad74ef7130126d319acec23951a0')
+
+ depends_on('r-mass', type=('build','run'))
+ depends_on('r-mgcv', type=('build','run'))
+ depends_on('r-nnet', type=('build','run'))
+ depends_on('r-pbkrtest', type=('build','run'))
+ depends_on('r-quantreg', type=('build','run'))
diff --git a/var/spack/repos/builtin/packages/r-caret/package.py b/var/spack/repos/builtin/packages/r-caret/package.py
new file mode 100644
index 0000000000..d795cfa204
--- /dev/null
+++ b/var/spack/repos/builtin/packages/r-caret/package.py
@@ -0,0 +1,44 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class RCaret(RPackage):
+ """Misc functions for training and plotting classification and regression
+ models."""
+
+ homepage = "https://github.com/topepo/caret/"
+ url = "https://cran.r-project.org/src/contrib/caret_6.0-70.tar.gz"
+ list_url = "https://cran.r-project.org/src/contrib/Archive/caret"
+
+ version('6.0-70', '202d7abb6a679af716ea69fb2573f108')
+
+ depends_on('r-lattice', type=('build','run'))
+ depends_on('r-ggplot2', type=('build','run'))
+ depends_on('r-car', type=('build','run'))
+ depends_on('r-foreach', type=('build','run'))
+ depends_on('r-plyr', type=('build','run'))
+ depends_on('r-nlme', type=('build','run'))
+ depends_on('r-reshape2', type=('build','run'))
diff --git a/var/spack/repos/builtin/packages/r-catools/package.py b/var/spack/repos/builtin/packages/r-catools/package.py
new file mode 100644
index 0000000000..7b82a19c01
--- /dev/null
+++ b/var/spack/repos/builtin/packages/r-catools/package.py
@@ -0,0 +1,41 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+
+from spack import *
+
+
+class RCatools(RPackage):
+ """Contains several basic utility functions including: moving (rolling,
+ running) window statistic functions, read/write for GIF and ENVI binary
+ files, fast calculation of AUC, LogitBoost classifier, base64
+ encoder/decoder, round-off-error-free sum and cumsum, etc."""
+
+ homepage = "https://cran.r-project.org/package=caTools"
+ url = "https://cran.r-project.org/src/contrib/caTools_1.17.1.tar.gz"
+ list_url = "https://cran.r-project.org/src/contrib/Archive/caTools"
+
+ version('1.17.1', '5c872bbc78b177b306f36709deb44498')
+
+ depends_on('r-bitops', type=('build','run'))
diff --git a/var/spack/repos/builtin/packages/r-chron/package.py b/var/spack/repos/builtin/packages/r-chron/package.py
new file mode 100644
index 0000000000..e1731424b3
--- /dev/null
+++ b/var/spack/repos/builtin/packages/r-chron/package.py
@@ -0,0 +1,35 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class RChron(RPackage):
+ """Chronological objects which can handle dates and times."""
+
+ homepage = "https://cran.r-project.org/package=chron"
+ url = "https://cran.r-project.org/src/contrib/chron_2.3-47.tar.gz"
+ list_url = "https://cran.r-project.org/src/contrib/Archive/chron"
+
+ version('2.3-47', 'b8890cdc5f2337f8fd775b0becdcdd1f')
diff --git a/var/spack/repos/builtin/packages/r-class/package.py b/var/spack/repos/builtin/packages/r-class/package.py
new file mode 100644
index 0000000000..de81e7588e
--- /dev/null
+++ b/var/spack/repos/builtin/packages/r-class/package.py
@@ -0,0 +1,38 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class RClass(RPackage):
+ """Various functions for classification, including k-nearest neighbour,
+ Learning Vector Quantization and Self-Organizing Maps."""
+
+ homepage = "http://www.stats.ox.ac.uk/pub/MASS4/"
+ url = "https://cran.r-project.org/src/contrib/class_7.3-14.tar.gz"
+ list_url = "https://cran.r-project.org/src/contrib/Archive/class"
+
+ version('7.3-14', '6a21dd206fe4ea29c55faeb65fb2b71e')
+
+ depends_on('r-mass', type=('build','run'))
diff --git a/var/spack/repos/builtin/packages/r-cluster/package.py b/var/spack/repos/builtin/packages/r-cluster/package.py
new file mode 100644
index 0000000000..29e16c2271
--- /dev/null
+++ b/var/spack/repos/builtin/packages/r-cluster/package.py
@@ -0,0 +1,37 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class RCluster(RPackage):
+ """Methods for Cluster analysis. Much extended the original from Peter
+ Rousseeuw, Anja Struyf and Mia Hubert, based on Kaufman and Rousseeuw
+ (1990) "Finding Groups in Data"."""
+
+ homepage = "https://cran.r-project.org/web/packages/cluster/index.html"
+ url = "https://cran.r-project.org/src/contrib/cluster_2.0.4.tar.gz"
+ list_url = "https://cran.r-project.org/src/contrib/Archive/cluster"
+
+ version('2.0.4', 'bb4deceaafb1c42bb1278d5d0dc11e59')
diff --git a/var/spack/repos/builtin/packages/r-codetools/package.py b/var/spack/repos/builtin/packages/r-codetools/package.py
new file mode 100644
index 0000000000..39186bf54a
--- /dev/null
+++ b/var/spack/repos/builtin/packages/r-codetools/package.py
@@ -0,0 +1,35 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class RCodetools(RPackage):
+ """Code analysis tools for R."""
+
+ homepage = "https://cran.r-project.org/web/packages/codetools/index.html"
+ url = "https://cran.r-project.org/src/contrib/codetools_0.2-14.tar.gz"
+ list_url = "https://cran.r-project.org/src/contrib/Archive/codetools"
+
+ version('0.2-14', '7ec41d4f8bd6ba85facc8c5e6adc1f4d')
diff --git a/var/spack/repos/builtin/packages/r-colorspace/package.py b/var/spack/repos/builtin/packages/r-colorspace/package.py
new file mode 100644
index 0000000000..b7561ea360
--- /dev/null
+++ b/var/spack/repos/builtin/packages/r-colorspace/package.py
@@ -0,0 +1,38 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class RColorspace(RPackage):
+ """Carries out mapping between assorted color spaces including RGB, HSV,
+ HLS, CIEXYZ, CIELUV, HCL (polar CIELUV), CIELAB and polar CIELAB.
+ Qualitative, sequential, and diverging color palettes based on HCL colors
+ are provided."""
+
+ homepage = "https://cran.r-project.org/web/packages/colorspace/index.html"
+ url = "https://cran.r-project.org/src/contrib/colorspace_1.2-6.tar.gz"
+ list_url = "https://cran.r-project.org/src/contrib/Archive/colorspace"
+
+ version('1.2-6', 'a30191e9caf66f77ff4e99c062e9dce1')
diff --git a/var/spack/repos/builtin/packages/r-crayon/package.py b/var/spack/repos/builtin/packages/r-crayon/package.py
new file mode 100644
index 0000000000..2002ea5419
--- /dev/null
+++ b/var/spack/repos/builtin/packages/r-crayon/package.py
@@ -0,0 +1,39 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class RCrayon(RPackage):
+ """Colored terminal output on terminals that support 'ANSI' color and
+ highlight codes. It also works in 'Emacs' 'ESS'. 'ANSI' color support is
+ automatically detected. Colors and highlighting can be combined and nested.
+ New styles can also be created easily. This package was inspired by the
+ 'chalk' 'JavaScript' project."""
+
+ homepage = "https://github.com/gaborcsardi/crayon"
+ url = "https://cran.r-project.org/src/contrib/crayon_1.3.2.tar.gz"
+ list_url = "https://cran.r-project.org/src/contrib/Archive/crayon"
+
+ version('1.3.2', 'fe29c6204d2d6ff4c2f9d107a03d0cb9')
diff --git a/var/spack/repos/builtin/packages/r-cubature/package.py b/var/spack/repos/builtin/packages/r-cubature/package.py
new file mode 100644
index 0000000000..918f8e9e3d
--- /dev/null
+++ b/var/spack/repos/builtin/packages/r-cubature/package.py
@@ -0,0 +1,35 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class RCubature(RPackage):
+ """Adaptive multivariate integration over hypercubes"""
+
+ homepage = "https://cran.r-project.org/package=cubature"
+ url = "https://cran.r-project.org/src/contrib/cubature_1.1-2.tar.gz"
+ list_url = "https://cran.r-project.org/src/contrib/Archive/cubature"
+
+ version('1.1-2', '5617e1d82baa803a3814d92461da45c9')
diff --git a/var/spack/repos/builtin/packages/r-curl/package.py b/var/spack/repos/builtin/packages/r-curl/package.py
new file mode 100644
index 0000000000..7b62d1be60
--- /dev/null
+++ b/var/spack/repos/builtin/packages/r-curl/package.py
@@ -0,0 +1,46 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class RCurl(RPackage):
+ """The curl() and curl_download() functions provide highly configurable
+ drop-in replacements for base url() and download.file() with better
+ performance, support for encryption (https, ftps), gzip compression,
+ authentication, and other libcurl goodies. The core of the package
+ implements a framework for performing fully customized requests where data
+ can be processed either in memory, on disk, or streaming via the callback
+ or connection interfaces. Some knowledge of libcurl is recommended; for a
+ more-user-friendly web client see the 'httr' package which builds on this
+ package with http specific tools and logic."""
+
+ homepage = "https://github.com/jeroenooms/curl"
+ url = "https://cran.r-project.org/src/contrib/curl_0.9.7.tar.gz"
+ list_url = "https://cran.r-project.org/src/contrib/Archive/curl"
+
+ version('1.0', '93d34926d6071e1fba7e728b482f0dd9')
+ version('0.9.7', 'a101f7de948cb828fef571c730f39217')
+
+ depends_on('curl')
diff --git a/var/spack/repos/builtin/packages/r-datatable/package.py b/var/spack/repos/builtin/packages/r-datatable/package.py
new file mode 100644
index 0000000000..23802524e8
--- /dev/null
+++ b/var/spack/repos/builtin/packages/r-datatable/package.py
@@ -0,0 +1,40 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class RDatatable(RPackage):
+ """Fast aggregation of large data (e.g. 100GB in RAM), fast ordered joins,
+ fast add/modify/delete of columns by group using no copies at all, list
+ columns and a fast file reader (fread). Offers a natural and flexible
+ syntax, for faster development."""
+
+ homepage = "https://github.com/Rdatatable/data.table/wiki"
+ url = "https://cran.r-project.org/src/contrib/data.table_1.9.6.tar.gz"
+ list_url = "https://cran.r-project.org/src/contrib/Archive/data.table"
+
+ version('1.9.6', 'b1c0c7cce490bdf42ab288541cc55372')
+
+ depends_on('r-chron', type=('build', 'run'))
diff --git a/var/spack/repos/builtin/packages/r-dbi/package.py b/var/spack/repos/builtin/packages/r-dbi/package.py
new file mode 100644
index 0000000000..f00100bdf0
--- /dev/null
+++ b/var/spack/repos/builtin/packages/r-dbi/package.py
@@ -0,0 +1,37 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class RDbi(RPackage):
+ """A database interface definition for communication between R and
+ relational database management systems. All classes in this package are
+ virtual and need to be extended by the various R/DBMS implementations."""
+
+ homepage = "https://github.com/rstats-db/DBI"
+ url = "https://cran.r-project.org/src/contrib/DBI_0.4-1.tar.gz"
+ list_url = "https://cran.r-project.org/src/contrib/Archive/DBI"
+
+ version('0.4-1', 'c7ee8f1c5037c2284e99c62698d0f087')
diff --git a/var/spack/repos/builtin/packages/r-deoptim/package.py b/var/spack/repos/builtin/packages/r-deoptim/package.py
new file mode 100644
index 0000000000..5334953d46
--- /dev/null
+++ b/var/spack/repos/builtin/packages/r-deoptim/package.py
@@ -0,0 +1,37 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+
+from spack import *
+
+
+class RDeoptim(RPackage):
+ """Implements the differential evolution algorithm for global optimization
+ of a real-valued function of a real-valued parameter vector."""
+
+ homepage = "https://cran.r-project.org/package=DEoptim"
+ url = "https://cran.r-project.org/src/contrib/DEoptim_2.2-3.tar.gz"
+ list_url = "https://cran.r-project.org/src/contrib/Archive/DEoptim"
+
+ version('2.2-3', 'ed406e6790f8f1568aa9bec159f80326')
diff --git a/var/spack/repos/builtin/packages/r-devtools/package.py b/var/spack/repos/builtin/packages/r-devtools/package.py
new file mode 100644
index 0000000000..9895a3b390
--- /dev/null
+++ b/var/spack/repos/builtin/packages/r-devtools/package.py
@@ -0,0 +1,44 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class RDevtools(RPackage):
+ """Collection of package development tools."""
+
+ homepage = "https://github.com/hadley/devtools"
+ url = "https://cran.r-project.org/src/contrib/devtools_1.11.1.tar.gz"
+ list_url = "https://cran.r-project.org/src/contrib/Archive/devtools"
+
+ version('1.11.1', '242672ee27d24dddcbdaac88c586b6c2')
+
+ depends_on('r-httr', type=('build', 'run'))
+ depends_on('r-memoise', type=('build', 'run'))
+ depends_on('r-whisker', type=('build', 'run'))
+ depends_on('r-digest', type=('build', 'run'))
+ depends_on('r-rstudioapi', type=('build', 'run'))
+ depends_on('r-jsonlite', type=('build', 'run'))
+ depends_on('r-git2r', type=('build', 'run'))
+ depends_on('r-withr', type=('build', 'run'))
diff --git a/var/spack/repos/builtin/packages/r-diagrammer/package.py b/var/spack/repos/builtin/packages/r-diagrammer/package.py
new file mode 100644
index 0000000000..5f8e27a102
--- /dev/null
+++ b/var/spack/repos/builtin/packages/r-diagrammer/package.py
@@ -0,0 +1,43 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class RDiagrammer(RPackage):
+ """Create graph diagrams and flowcharts using R."""
+
+ homepage = "https://github.com/rich-iannone/DiagrammeR"
+ url = "https://cran.r-project.org/src/contrib/DiagrammeR_0.8.4.tar.gz"
+ list_url = "https://cran.r-project.org/src/contrib/Archive/DiagrammeR"
+
+ version('0.8.4', '9ee295c744f5d4ba9a84289ca7bdaf1a')
+
+ depends_on('r-htmlwidgets', type=('build', 'run'))
+ depends_on('r-igraph', type=('build', 'run'))
+ depends_on('r-influencer', type=('build', 'run'))
+ depends_on('r-rstudioapi@0.6:', type=('build', 'run'))
+ depends_on('r-stringr', type=('build', 'run'))
+ depends_on('r-visnetwork', type=('build', 'run'))
+ depends_on('r-scales', type=('build', 'run'))
diff --git a/var/spack/repos/builtin/packages/r-dichromat/package.py b/var/spack/repos/builtin/packages/r-dichromat/package.py
new file mode 100644
index 0000000000..ea465e2d6c
--- /dev/null
+++ b/var/spack/repos/builtin/packages/r-dichromat/package.py
@@ -0,0 +1,36 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class RDichromat(RPackage):
+ """Collapse red-green or green-blue distinctions to simulate the effects of
+ different types of color-blindness."""
+
+ homepage = "https://cran.r-project.org/web/packages/dichromat/index.html"
+ url = "https://cran.r-project.org/src/contrib/dichromat_2.0-0.tar.gz"
+ list_url = "https://cran.r-project.org/src/contrib/Archive/dichromat"
+
+ version('2.0-0', '84e194ac95a69763d740947a7ee346a6')
diff --git a/var/spack/repos/builtin/packages/r-digest/package.py b/var/spack/repos/builtin/packages/r-digest/package.py
new file mode 100644
index 0000000000..7e077442f9
--- /dev/null
+++ b/var/spack/repos/builtin/packages/r-digest/package.py
@@ -0,0 +1,50 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class RDigest(RPackage):
+ """Implementation of a function 'digest()' for the creation of hash digests
+ of arbitrary R objects (using the md5, sha-1, sha-256, crc32, xxhash and
+ murmurhash algorithms) permitting easy comparison of R language objects, as
+ well as a function 'hmac()' to create hash-based message authentication
+ code. The md5 algorithm by Ron Rivest is specified in RFC 1321, the sha-1
+ and sha-256 algorithms are specified in FIPS-180-1 and FIPS-180-2, and the
+ crc32 algorithm is described in
+ ftp://ftp.rocksoft.com/cliens/rocksoft/papers/crc_v3.txt. For md5, sha-1,
+ sha-256 and aes, this package uses small standalone implementations that
+ were provided by Christophe Devine. For crc32, code from the zlib library
+ is used. For sha-512, an implementation by Aaron D. Gifford is used. For
+ xxhash, the implementation by Yann Collet is used. For murmurhash, an
+ implementation by Shane Day is used. Please note that this package is not
+ meant to be deployed for cryptographic purposes for which more
+ comprehensive (and widely tested) libraries such as OpenSSL should be
+ used."""
+
+ homepage = "http://dirk.eddelbuettel.com/code/digest.html"
+ url = "https://cran.r-project.org/src/contrib/digest_0.6.9.tar.gz"
+ list_url = "https://cran.r-project.org/src/contrib/Archive/digest"
+
+ version('0.6.9', '48048ce6c466bdb124716e45ba4a0e83')
diff --git a/var/spack/repos/builtin/packages/r-doparallel/package.py b/var/spack/repos/builtin/packages/r-doparallel/package.py
new file mode 100644
index 0000000000..fa039568c6
--- /dev/null
+++ b/var/spack/repos/builtin/packages/r-doparallel/package.py
@@ -0,0 +1,39 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class RDoparallel(RPackage):
+ """Provides a parallel backend for the %dopar% function using the parallel
+ package."""
+
+ homepage = "https://cran.r-project.org/web/packages/doParallel/index.html"
+ url = "https://cran.r-project.org/src/contrib/doParallel_1.0.10.tar.gz"
+ list_url = "https://cran.r-project.org/src/contrib/Archive/doParallel"
+
+ version('1.0.10', 'd9fbde8f315d98d055483ee3493c9b43')
+
+ depends_on('r-foreach', type=('build', 'run'))
+ depends_on('r-iterators', type=('build', 'run'))
diff --git a/var/spack/repos/builtin/packages/r-dplyr/package.py b/var/spack/repos/builtin/packages/r-dplyr/package.py
new file mode 100644
index 0000000000..6ffa48adc8
--- /dev/null
+++ b/var/spack/repos/builtin/packages/r-dplyr/package.py
@@ -0,0 +1,45 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class RDplyr(RPackage):
+ """A fast, consistent tool for working with data frame like objects, both
+ in memory and out of memory."""
+
+ homepage = "https://github.com/hadley/dplyr"
+ url = "https://cran.r-project.org/src/contrib/dplyr_0.5.0.tar.gz"
+ list_url = "https://cran.r-project.org/src/contrib/Archive/dplyr"
+
+ version('0.5.0', '1fcafcacca70806eea2e6d465cdb94ef')
+
+ depends_on('r-assertthat', type=('build', 'run'))
+ depends_on('r-r6', type=('build', 'run'))
+ depends_on('r-rcpp', type=('build', 'run'))
+ depends_on('r-tibble', type=('build', 'run'))
+ depends_on('r-magrittr', type=('build', 'run'))
+ depends_on('r-lazyeval', type=('build', 'run'))
+ depends_on('r-dbi', type=('build', 'run'))
+ depends_on('r-bh', type=('build', 'run'))
diff --git a/var/spack/repos/builtin/packages/r-dt/package.py b/var/spack/repos/builtin/packages/r-dt/package.py
new file mode 100644
index 0000000000..ae92f5fd24
--- /dev/null
+++ b/var/spack/repos/builtin/packages/r-dt/package.py
@@ -0,0 +1,42 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class RDt(RPackage):
+ """Data objects in R can be rendered as HTML tables using the JavaScript
+ library 'DataTables' (typically via R Markdown or Shiny). The 'DataTables'
+ library has been included in this R package. The package name 'DT' is an
+ abbreviation of 'DataTables'."""
+
+ homepage = "http://rstudio.github.io/DT"
+ url = "https://cran.r-project.org/src/contrib/DT_0.1.tar.gz"
+ list_url = "https://cran.r-project.org/src/contrib/Archive/DT/"
+
+ version('0.1', '5c8df984921fa484784ec4b8a4fb6f3c')
+
+ depends_on('r-htmltools', type=('build', 'run'))
+ depends_on('r-htmlwidgets', type=('build', 'run'))
+ depends_on('r-magrittr', type=('build', 'run'))
diff --git a/var/spack/repos/builtin/packages/r-dygraphs/package.py b/var/spack/repos/builtin/packages/r-dygraphs/package.py
new file mode 100644
index 0000000000..323fb6d584
--- /dev/null
+++ b/var/spack/repos/builtin/packages/r-dygraphs/package.py
@@ -0,0 +1,44 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class RDygraphs(RPackage):
+ """An R interface to the 'dygraphs' JavaScript charting library (a copy of
+ which is included in the package). Provides rich facilities for charting
+ time-series data in R, including highly configurable series- and
+ axis-display and interactive features like zoom/pan and series/point
+ highlighting."""
+
+ homepage = "https://cran.r-project.org/web/packages/dygraphs/index.html"
+ url = "https://cran.r-project.org/src/contrib/dygraphs_0.9.tar.gz"
+ list_url = "https://cran.r-project.org/src/contrib/Archive/dygraphs"
+
+ version('0.9', '7f0ce4312bcd3f0a58b8c03b2772f833')
+
+ depends_on('r-magrittr', type=('build', 'run'))
+ depends_on('r-htmlwidgets', type=('build', 'run'))
+ depends_on('r-zoo', type=('build', 'run'))
+ depends_on('r-xts', type=('build', 'run'))
diff --git a/var/spack/repos/builtin/packages/r-e1071/package.py b/var/spack/repos/builtin/packages/r-e1071/package.py
new file mode 100644
index 0000000000..4d79fcccd7
--- /dev/null
+++ b/var/spack/repos/builtin/packages/r-e1071/package.py
@@ -0,0 +1,39 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class RE1071(RPackage):
+ """Functions for latent class analysis, short time Fourier transform, fuzzy
+ clustering, support vector machines, shortest path computation, bagged
+ clustering, naive Bayes classifier, ..."""
+
+ homepage = "https://cran.r-project.org/package=e1071"
+ url = "https://cran.r-project.org/src/contrib/e1071_1.6-7.tar.gz"
+ list_url = "https://cran.r-project.org/src/contrib/Archive/e1071"
+
+ version('1.6-7', 'd109a7e3dd0c905d420e327a9a921f5a')
+
+ depends_on('r-class', type=('build', 'run'))
diff --git a/var/spack/repos/builtin/packages/r-evaluate/package.py b/var/spack/repos/builtin/packages/r-evaluate/package.py
new file mode 100644
index 0000000000..cf6c72dc62
--- /dev/null
+++ b/var/spack/repos/builtin/packages/r-evaluate/package.py
@@ -0,0 +1,40 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+
+
+from spack import *
+
+
+class REvaluate(RPackage):
+ """Parsing and evaluation tools that make it easy to recreate the command
+ line behaviour of R."""
+
+ homepage = "https://github.com/hadley/evaluate"
+ url = "https://cran.rstudio.com/src/contrib/evaluate_0.9.tar.gz"
+ list_url = "https://cran.rstudio.com/src/contrib/Archive/evaluate"
+
+ version('0.9', '877d89ce8a9ef7f403b1089ca1021775')
+
+ depends_on('r-stringr', type=('build', 'run'))
diff --git a/var/spack/repos/builtin/packages/r-filehash/package.py b/var/spack/repos/builtin/packages/r-filehash/package.py
index fffae68019..b17335ed11 100644
--- a/var/spack/repos/builtin/packages/r-filehash/package.py
+++ b/var/spack/repos/builtin/packages/r-filehash/package.py
@@ -24,7 +24,8 @@
##############################################################################
from spack import *
-class RFilehash(Package):
+
+class RFilehash(RPackage):
"""Implements a simple key-value style database where character string keys
are associated with data values that are stored on the disk. A simple
interface is provided for inserting, retrieving, and deleting data from the
@@ -37,10 +38,6 @@ class RFilehash(Package):
homepage = 'https://cran.r-project.org/'
url = "https://cran.r-project.org/src/contrib/filehash_2.3.tar.gz"
+ list_url = "https://cran.r-project.org/src/contrib/Archive/filehash"
- version('2.3', '01fffafe09b148ccadc9814c103bdc2f', expand=False)
-
- extends('R')
-
- def install(self, spec, prefix):
- R('CMD', 'INSTALL', '--library=%s' % self.module.r_lib_dir, '%s' % self.stage.archive_file)
+ version('2.3', '01fffafe09b148ccadc9814c103bdc2f')
diff --git a/var/spack/repos/builtin/packages/r-foreach/package.py b/var/spack/repos/builtin/packages/r-foreach/package.py
new file mode 100644
index 0000000000..78efe02188
--- /dev/null
+++ b/var/spack/repos/builtin/packages/r-foreach/package.py
@@ -0,0 +1,44 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class RForeach(RPackage):
+ """Support for the foreach looping construct. Foreach is an idiom that
+ allows for iterating over elements in a collection, without the use of an
+ explicit loop counter. This package in particular is intended to be used
+ for its return value, rather than for its side effects. In that sense, it
+ is similar to the standard lapply function, but doesn't require the
+ evaluation of a function. Using foreach without side effects also
+ facilitates executing the loop in parallel."""
+
+ homepage = "https://cran.r-project.org/web/packages/foreach/index.html"
+ url = "https://cran.r-project.org/src/contrib/foreach_1.4.3.tar.gz"
+ list_url = "https://cran.r-project.org/src/contrib/Archive/foreach"
+
+ version('1.4.3', 'ef45768126661b259f9b8994462c49a0')
+
+ depends_on('r-codetools', type=('build', 'run'))
+ depends_on('r-iterators', type=('build', 'run'))
diff --git a/var/spack/repos/builtin/packages/r-foreign/package.py b/var/spack/repos/builtin/packages/r-foreign/package.py
new file mode 100644
index 0000000000..b293f091c2
--- /dev/null
+++ b/var/spack/repos/builtin/packages/r-foreign/package.py
@@ -0,0 +1,37 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class RForeign(RPackage):
+ """Functions for reading and writing data stored by some versions of Epi
+ Info, Minitab, S, SAS, SPSS, Stata, Systat and Weka and for reading and
+ writing some dBase files."""
+
+ homepage = "https://cran.r-project.org/web/packages/foreign/index.html"
+ url = "https://cran.r-project.org/src/contrib/foreign_0.8-66.tar.gz"
+ list_url = "https://cran.r-project.org/src/contrib/Archive/foreign"
+
+ version('0.8-66', 'ff12190f4631dca31e30ca786c2c8f62')
diff --git a/var/spack/repos/builtin/packages/r-formatr/package.py b/var/spack/repos/builtin/packages/r-formatr/package.py
new file mode 100644
index 0000000000..011111af07
--- /dev/null
+++ b/var/spack/repos/builtin/packages/r-formatr/package.py
@@ -0,0 +1,45 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+
+from spack import *
+
+
+class RFormatr(RPackage):
+ """Provides a function tidy_source() to format R source code. Spaces and
+ indent will be added to the code automatically, and comments will be
+ preserved under certain conditions, so that R code will be more
+ human-readable and tidy. There is also a Shiny app as a user interface in
+ this package."""
+
+ homepage = "http://yihui.name/formatR"
+ url = "https://cran.r-project.org/src/contrib/formatR_1.4.tar.gz"
+ list_url = "https://cran.r-project.org/src/contrib/Archive/formatR"
+
+ version('1.4', '98b9b64b2785b35f9df403e1aab6c73c')
+
+ depends_on('r-codetools', type=('build', 'run'))
+ depends_on('r-shiny', type=('build', 'run'))
+ depends_on('r-testit', type=('build', 'run'))
+ # depends_on('r-knitr', type=('build', 'run')) - mutual dependency
diff --git a/var/spack/repos/builtin/packages/r-formula/package.py b/var/spack/repos/builtin/packages/r-formula/package.py
new file mode 100644
index 0000000000..5515ca91a3
--- /dev/null
+++ b/var/spack/repos/builtin/packages/r-formula/package.py
@@ -0,0 +1,37 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+
+from spack import *
+
+
+class RFormula(RPackage):
+ """Infrastructure for extended formulas with multiple parts on the right-hand
+ side and/or multiple responses on the left-hand side."""
+
+ homepage = "https://cran.r-project.org/package=Formula"
+ url = "https://cran.r-project.org/src/contrib/Formula_1.2-1.tar.gz"
+ list_url = "https://cran.r-project.org/src/contrib/Archive/Formula"
+
+ version('1.2-1', '2afb31e637cecd0c1106317aca1e4849')
diff --git a/var/spack/repos/builtin/packages/r-gdata/package.py b/var/spack/repos/builtin/packages/r-gdata/package.py
new file mode 100644
index 0000000000..ef001699bb
--- /dev/null
+++ b/var/spack/repos/builtin/packages/r-gdata/package.py
@@ -0,0 +1,53 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class RGdata(RPackage):
+ """Various R programming tools for data manipulation, including: - medical
+ unit conversions ('ConvertMedUnits', 'MedUnits'), - combining objects
+ ('bindData', 'cbindX', 'combine', 'interleave'), - character vector
+ operations ('centerText', 'startsWith', 'trim'), - factor manipulation
+ ('levels', 'reorder.factor', 'mapLevels'), - obtaining information about R
+ objects ('object.size', 'elem', 'env', 'humanReadable', 'is.what', 'll',
+ 'keep', 'ls.funs', 'Args','nPairs', 'nobs'), - manipulating MS-Excel
+ formatted files ('read.xls', 'installXLSXsupport', 'sheetCount',
+ 'xlsFormats'), - generating fixed-width format files ('write.fwf'), -
+ extricating components of date & time objects ('getYear', 'getMonth',
+ 'getDay', 'getHour', 'getMin', 'getSec'), - operations on columns of data
+ frames ('matchcols', 'rename.vars'), - matrix operations ('unmatrix',
+ 'upperTriangle', 'lowerTriangle'), - operations on vectors ('case',
+ 'unknownToNA', 'duplicated2', 'trimSum'), - operations on data frames
+ ('frameApply', 'wideByFactor'), - value of last evaluated expression
+ ('ans'), and - wrapper for 'sample' that ensures consistent behavior for
+ both scalar and vector arguments ('resample')."""
+
+ homepage = "https://cran.r-project.org/package=gdata"
+ url = "https://cran.r-project.org/src/contrib/gdata_2.17.0.tar.gz"
+ list_url = "https://cran.r-project.org/src/contrib/Archive/gdata"
+
+ version('2.17.0', 'c716b663b9dc16ad8cafe6acc781a75f')
+
+ depends_on('r-gtools', type=('build', 'run'))
diff --git a/var/spack/repos/builtin/packages/r-geosphere/package.py b/var/spack/repos/builtin/packages/r-geosphere/package.py
new file mode 100644
index 0000000000..d90594a3e0
--- /dev/null
+++ b/var/spack/repos/builtin/packages/r-geosphere/package.py
@@ -0,0 +1,39 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class RGeosphere(RPackage):
+ """Spherical trigonometry for geographic applications. That is, compute
+ distances and related measures for angular (longitude/latitude)
+ locations."""
+
+ homepage = "https://cran.r-project.org/package=geosphere"
+ url = "https://cran.r-project.org/src/contrib/geosphere_1.5-5.tar.gz"
+ list_url = "https://cran.r-project.org/src/contrib/Archive/geosphere"
+
+ version('1.5-5', '28efb7a8e266c7f076cdbcf642455f3e')
+
+ depends_on('r-sp', type=('build', 'run'))
diff --git a/var/spack/repos/builtin/packages/r-ggmap/package.py b/var/spack/repos/builtin/packages/r-ggmap/package.py
new file mode 100644
index 0000000000..65a69553a1
--- /dev/null
+++ b/var/spack/repos/builtin/packages/r-ggmap/package.py
@@ -0,0 +1,51 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class RGgmap(RPackage):
+ """A collection of functions to visualize spatial data and models on top of
+ static maps from various online sources (e.g Google Maps and Stamen Maps).
+ It includes tools common to those tasks, including functions for
+ geolocation and routing."""
+
+ homepage = "https://github.com/dkahle/ggmap"
+ url = "https://cran.r-project.org/src/contrib/ggmap_2.6.1.tar.gz"
+ list_url = "https://cran.r-project.org/src/contrib/Archive/ggmap"
+
+ version('2.6.1', '25ad414a3a1c6d59a227a9f22601211a')
+
+ depends_on('r-ggplot2', type=('build', 'run'))
+ depends_on('r-proto', type=('build', 'run'))
+ depends_on('r-rgooglemaps', type=('build', 'run'))
+ depends_on('r-png', type=('build', 'run'))
+ depends_on('r-plyr', type=('build', 'run'))
+ depends_on('r-reshape2', type=('build', 'run'))
+ depends_on('r-rjson', type=('build', 'run'))
+ depends_on('r-mapproj', type=('build', 'run'))
+ depends_on('r-jpeg', type=('build', 'run'))
+ depends_on('r-geosphere', type=('build', 'run'))
+ depends_on('r-digest', type=('build', 'run'))
+ depends_on('r-scales', type=('build', 'run'))
diff --git a/var/spack/repos/builtin/packages/r-ggplot2/package.py b/var/spack/repos/builtin/packages/r-ggplot2/package.py
new file mode 100644
index 0000000000..3b4c437f0c
--- /dev/null
+++ b/var/spack/repos/builtin/packages/r-ggplot2/package.py
@@ -0,0 +1,48 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class RGgplot2(RPackage):
+ """An implementation of the grammar of graphics in R. It combines the
+ advantages of both base and lattice graphics: conditioning and shared axes
+ are handled automatically, and you can still build up a plot step by step
+ from multiple data sources. It also implements a sophisticated
+ multidimensional conditioning system and a consistent interface to map data
+ to aesthetic attributes. See http://ggplot2.org for more information,
+ documentation and examples."""
+
+ homepage = "http://ggplot2.org/"
+ url = "https://cran.r-project.org/src/contrib/ggplot2_2.1.0.tar.gz"
+ list_url = "https://cran.r-project.org/src/contrib/Archive/ggplot2"
+
+ version('2.1.0', '771928cfb97c649c720423deb3ec7fd3')
+
+ depends_on('r-digest', type=('build', 'run'))
+ depends_on('r-gtable', type=('build', 'run'))
+ depends_on('r-mass', type=('build', 'run'))
+ depends_on('r-plyr', type=('build', 'run'))
+ depends_on('r-reshape2', type=('build', 'run'))
+ depends_on('r-scales', type=('build', 'run'))
diff --git a/var/spack/repos/builtin/packages/r-ggvis/package.py b/var/spack/repos/builtin/packages/r-ggvis/package.py
new file mode 100644
index 0000000000..5acbff04b0
--- /dev/null
+++ b/var/spack/repos/builtin/packages/r-ggvis/package.py
@@ -0,0 +1,45 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class RGgvis(RPackage):
+ """An implementation of an interactive grammar of graphics, taking the best
+ parts of 'ggplot2', combining them with the reactive framework from 'shiny'
+ and web graphics from 'vega'."""
+
+ homepage = "http://ggvis.rstudio.com/"
+ url = "https://cran.r-project.org/src/contrib/ggvis_0.4.2.tar.gz"
+ list_url = "https://cran.r-project.org/src/contrib/Archive/ggvis"
+
+ version('0.4.2', '039f45e5c7f1e0652779163d7d99f922')
+
+ depends_on('r-assertthat', type=('build', 'run'))
+ depends_on('r-jsonlite', type=('build', 'run'))
+ depends_on('r-shiny', type=('build', 'run'))
+ depends_on('r-magrittr', type=('build', 'run'))
+ depends_on('r-dplyr', type=('build', 'run'))
+ depends_on('r-lazyeval', type=('build', 'run'))
+ depends_on('r-htmltools', type=('build', 'run'))
diff --git a/var/spack/repos/builtin/packages/r-gistr/package.py b/var/spack/repos/builtin/packages/r-gistr/package.py
new file mode 100644
index 0000000000..67c34cfffc
--- /dev/null
+++ b/var/spack/repos/builtin/packages/r-gistr/package.py
@@ -0,0 +1,49 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+
+from spack import *
+
+
+class RGistr(RPackage):
+ """Work with 'GitHub' 'gists' from 'R'. This package allows the user to
+ create new 'gists', update 'gists' with new files, rename files, delete
+ files, get and delete 'gists', star and 'un-star' 'gists', fork 'gists',
+ open a 'gist' in your default browser, get embed code for a 'gist', list
+ 'gist' 'commits', and get rate limit information when 'authenticated'. Some
+ requests require authentication and some do not."""
+
+ homepage = "https://github.com/ropensci/gistr"
+ url = "https://cran.r-project.org/src/contrib/gistr_0.3.6.tar.gz"
+ list_url = "https://cran.r-project.org/src/contrib/Archive/gistr"
+
+ version('0.3.6', '49d548cb3eca0e66711aece37757a2c0')
+
+ depends_on('r-jsonlite', type=('build', 'run'))
+ depends_on('r-httr', type=('build', 'run'))
+ depends_on('r-magrittr', type=('build', 'run'))
+ depends_on('r-assertthat', type=('build', 'run'))
+ depends_on('r-knitr', type=('build', 'run'))
+ depends_on('r-rmarkdown', type=('build', 'run'))
+ depends_on('r-dplyr', type=('build', 'run'))
diff --git a/var/spack/repos/builtin/packages/r-git2r/package.py b/var/spack/repos/builtin/packages/r-git2r/package.py
new file mode 100644
index 0000000000..7c4ff3144b
--- /dev/null
+++ b/var/spack/repos/builtin/packages/r-git2r/package.py
@@ -0,0 +1,40 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class RGit2r(RPackage):
+ """Interface to the 'libgit2' library, which is a pure C implementation of
+ the 'Git' core methods. Provides access to 'Git' repositories to extract
+ data and running some basic 'Git' commands."""
+
+ homepage = "https://github.com/ropensci/git2r"
+ url = "https://cran.r-project.org/src/contrib/git2r_0.15.0.tar.gz"
+ list_url = "https://cran.r-project.org/src/contrib/Archive/git2r"
+
+ version('0.15.0', '57658b3298f9b9aadc0dd77b4ef6a1e1')
+
+ depends_on('zlib')
+ depends_on('openssl')
diff --git a/var/spack/repos/builtin/packages/r-glmnet/package.py b/var/spack/repos/builtin/packages/r-glmnet/package.py
new file mode 100644
index 0000000000..ac44d42c12
--- /dev/null
+++ b/var/spack/repos/builtin/packages/r-glmnet/package.py
@@ -0,0 +1,43 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class RGlmnet(RPackage):
+ """Extremely efficient procedures for fitting the entire lasso or
+ elastic-net regularization path for linear regression, logistic and
+ multinomial regression models, Poisson regression and the Cox model. Two
+ recent additions are the multiple-response Gaussian, and the grouped
+ multinomial. The algorithm uses cyclical coordinate descent in a path-wise
+ fashion, as described in the paper linked to via the URL below."""
+
+ homepage = "http://www.jstatsoft.org/v33/i01/"
+ url = "https://cran.r-project.org/src/contrib/glmnet_2.0-5.tar.gz"
+ list_url = "https://cran.r-project.org/src/contrib/Archive/glmnet"
+
+ version('2.0-5', '049b18caa29529614cd684db3beaec2a')
+
+ depends_on('r-matrix', type=('build', 'run'))
+ depends_on('r-foreach', type=('build', 'run'))
diff --git a/var/spack/repos/builtin/packages/r-googlevis/package.py b/var/spack/repos/builtin/packages/r-googlevis/package.py
new file mode 100644
index 0000000000..29916965b4
--- /dev/null
+++ b/var/spack/repos/builtin/packages/r-googlevis/package.py
@@ -0,0 +1,41 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class RGooglevis(RPackage):
+ """R interface to Google Charts API, allowing users to create interactive
+ charts based on data frames. Charts are displayed locally via the R HTTP
+ help server. A modern browser with an Internet connection is required and
+ for some charts a Flash player. The data remains local and is not uploaded
+ to Google."""
+
+ homepage = "https://github.com/mages/googleVis#googlevis"
+ url = "https://cran.r-project.org/src/contrib/googleVis_0.6.0.tar.gz"
+ list_url = "https://cran.r-project.org/src/contrib/Archive/googleVis"
+
+ version('0.6.0', 'ec36fd2a6884ddc7baa894007d0d0468')
+
+ depends_on('r-jsonlite', type=('build', 'run'))
diff --git a/var/spack/repos/builtin/packages/r-gridbase/package.py b/var/spack/repos/builtin/packages/r-gridbase/package.py
new file mode 100644
index 0000000000..73d87c7e49
--- /dev/null
+++ b/var/spack/repos/builtin/packages/r-gridbase/package.py
@@ -0,0 +1,35 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class RGridbase(RPackage):
+ """Integration of base and grid graphics."""
+
+ homepage = "https://cran.r-project.org/web/packages/gridBase/index.html"
+ url = "https://cran.r-project.org/src/contrib/gridBase_0.4-7.tar.gz"
+ list_url = "https://cran.r-project.org/src/contrib/Archive/gridBase"
+
+ version('0.4-7', '6d5064a85f5c966a92ee468ae44c5f1f')
diff --git a/var/spack/repos/builtin/packages/r-gridextra/package.py b/var/spack/repos/builtin/packages/r-gridextra/package.py
new file mode 100644
index 0000000000..304035dc06
--- /dev/null
+++ b/var/spack/repos/builtin/packages/r-gridextra/package.py
@@ -0,0 +1,38 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class RGridextra(RPackage):
+ """Provides a number of user-level functions to work with "grid" graphics,
+ notably to arrange multiple grid-based plots on a page, and draw tables."""
+
+ homepage = "https://github.com/baptiste/gridextra"
+ url = "https://cran.r-project.org/src/contrib/gridExtra_2.2.1.tar.gz"
+ list_url = "https://cran.r-project.org/src/contrib/Archive/gridExtra"
+
+ version('2.2.1', '7076c2122d387c7ef3add69a1c4fc1b2')
+
+ depends_on('r-gtable', type=('build', 'run'))
diff --git a/var/spack/repos/builtin/packages/r-gtable/package.py b/var/spack/repos/builtin/packages/r-gtable/package.py
new file mode 100644
index 0000000000..236416755b
--- /dev/null
+++ b/var/spack/repos/builtin/packages/r-gtable/package.py
@@ -0,0 +1,35 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class RGtable(RPackage):
+ """Tools to make it easier to work with "tables" of 'grobs'."""
+
+ homepage = "https://cran.r-project.org/web/packages/gtable/index.html"
+ url = "https://cran.r-project.org/src/contrib/gtable_0.2.0.tar.gz"
+ list_url = "https://cran.r-project.org/src/contrib/Archive/gtable"
+
+ version('0.2.0', '124090ae40b2dd3170ae11180e0d4cab')
diff --git a/var/spack/repos/builtin/packages/r-gtools/package.py b/var/spack/repos/builtin/packages/r-gtools/package.py
new file mode 100644
index 0000000000..632187b49e
--- /dev/null
+++ b/var/spack/repos/builtin/packages/r-gtools/package.py
@@ -0,0 +1,54 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class RGtools(RPackage):
+ """Functions to assist in R programming, including: - assist in developing,
+ updating, and maintaining R and R packages ('ask', 'checkRVersion',
+ 'getDependencies', 'keywords', 'scat'), - calculate the logit and inverse
+ logit transformations ('logit', 'inv.logit'), - test if a value is missing,
+ empty or contains only NA and NULL values ('invalid'), - manipulate R's
+ .Last function ('addLast'), - define macros ('defmacro'), - detect odd and
+ even integers ('odd', 'even'), - convert strings containing non-ASCII
+ characters (like single quotes) to plain ASCII ('ASCIIfy'), - perform a
+ binary search ('binsearch'), - sort strings containing both numeric and
+ character components ('mixedsort'), - create a factor variable from the
+ quantiles of a continuous variable ('quantcut'), - enumerate permutations
+ and combinations ('combinations', 'permutation'), - calculate and convert
+ between fold-change and log-ratio ('foldchange', 'logratio2foldchange',
+ 'foldchange2logratio'), - calculate probabilities and generate random
+ numbers from Dirichlet distributions ('rdirichlet', 'ddirichlet'), - apply
+ a function over adjacent subsets of a vector ('running'), - modify the
+ TCP\_NODELAY ('de-Nagle') flag for socket objects, - efficient 'rbind' of
+ data frames, even if the column names don't match ('smartbind'), - generate
+ significance stars from p-values ('stars.pval'), - convert characters
+ to/from ASCII codes."""
+
+ homepage = "https://cran.r-project.org/package=gtools"
+ url = "https://cran.r-project.org/src/contrib/gtools_3.5.0.tar.gz"
+ list_url = "https://cran.r-project.org/src/contrib/Archive/gtools"
+
+ version('3.5.0', '45f8800c0336d35046641fbacc56bdbb')
diff --git a/var/spack/repos/builtin/packages/r-hexbin/package.py b/var/spack/repos/builtin/packages/r-hexbin/package.py
new file mode 100644
index 0000000000..0a5c66c1a3
--- /dev/null
+++ b/var/spack/repos/builtin/packages/r-hexbin/package.py
@@ -0,0 +1,39 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+
+from spack import *
+
+
+class RHexbin(RPackage):
+ """Binning and plotting functions for hexagonal bins. Now uses and relies
+ on grid graphics and formal (S4) classes and methods."""
+
+ homepage = "http://github.com/edzer/hexbin"
+ url = "https://cran.r-project.org/src/contrib/hexbin_1.27.1.tar.gz"
+ list_url = "https://cran.r-project.org/src/contrib/Archive/hexbin"
+
+ version('1.27.1', '7f380390c6511e97df10a810a3b3bb7c')
+
+ depends_on('r-lattice', type=('build', 'run'))
diff --git a/var/spack/repos/builtin/packages/r-highr/package.py b/var/spack/repos/builtin/packages/r-highr/package.py
new file mode 100644
index 0000000000..13164f9c60
--- /dev/null
+++ b/var/spack/repos/builtin/packages/r-highr/package.py
@@ -0,0 +1,38 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+
+from spack import *
+
+
+class RHighr(RPackage):
+ """Provides syntax highlighting for R source code. Currently it supports
+ LaTeX and HTML output. Source code of other languages is supported via
+ Andre Simon's highlight package."""
+
+ homepage = "https://github.com/yihui/highr"
+ url = "https://cran.r-project.org/src/contrib/highr_0.6.tar.gz"
+ list_url = "https://cran.r-project.org/src/contrib/Archive/highr"
+
+ version('0.6', 'bf47388c5f57dc61962362fb7e1d8b16')
diff --git a/var/spack/repos/builtin/packages/r-htmltools/package.py b/var/spack/repos/builtin/packages/r-htmltools/package.py
new file mode 100644
index 0000000000..9374e1f9cc
--- /dev/null
+++ b/var/spack/repos/builtin/packages/r-htmltools/package.py
@@ -0,0 +1,38 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class RHtmltools(RPackage):
+ """Tools for HTML generation and output."""
+
+ homepage = "https://github.com/rstudio/htmltools"
+ url = "https://cran.r-project.org/src/contrib/htmltools_0.3.5.tar.gz"
+ list_url = "https://cran.r-project.org/src/contrib/Archive/htmltools"
+
+ version('0.3.5', '5f001aff4a39e329f7342dcec5139724')
+
+ depends_on('r-digest', type=('build', 'run'))
+ depends_on('r-rcpp', type=('build', 'run'))
diff --git a/var/spack/repos/builtin/packages/r-htmlwidgets/package.py b/var/spack/repos/builtin/packages/r-htmlwidgets/package.py
new file mode 100644
index 0000000000..85ab593245
--- /dev/null
+++ b/var/spack/repos/builtin/packages/r-htmlwidgets/package.py
@@ -0,0 +1,41 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class RHtmlwidgets(RPackage):
+ """A framework for creating HTML widgets that render in various contexts
+ including the R console, 'R Markdown' documents, and 'Shiny' web
+ applications."""
+
+ homepage = "https://github.com/ramnathv/htmlwidgets"
+ url = "https://cran.r-project.org/src/contrib/htmlwidgets_0.6.tar.gz"
+ list_url = "https://cran.r-project.org/src/contrib/Archive/htmlwidgets"
+
+ version('0.6', '7fa522d2eda97593978021bda9670c0e')
+
+ depends_on('r-htmltools', type=('build', 'run'))
+ depends_on('r-jsonlite', type=('build', 'run'))
+ depends_on('r-yaml', type=('build', 'run'))
diff --git a/var/spack/repos/builtin/packages/r-httpuv/package.py b/var/spack/repos/builtin/packages/r-httpuv/package.py
new file mode 100644
index 0000000000..e4b60893b6
--- /dev/null
+++ b/var/spack/repos/builtin/packages/r-httpuv/package.py
@@ -0,0 +1,43 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class RHttpuv(RPackage):
+ """Provides low-level socket and protocol support for handling HTTP and
+ WebSocket requests directly from within R. It is primarily intended as a
+ building block for other packages, rather than making it particularly easy
+ to create complete web applications using httpuv alone. httpuv is built on
+ top of the libuv and http-parser C libraries, both of which were developed
+ by Joyent, Inc. (See LICENSE file for libuv and http-parser license
+ information.)"""
+
+ homepage = "https://github.com/rstudio/httpuv"
+ url = "https://cran.r-project.org/src/contrib/httpuv_1.3.3.tar.gz"
+ list_url = "https://cran.r-project.org/src/contrib/Archive/httpuv"
+
+ version('1.3.3', 'c78ae068cf59e949b9791be987bb4489')
+
+ depends_on('r-rcpp', type=('build', 'run'))
diff --git a/var/spack/repos/builtin/packages/r-httr/package.py b/var/spack/repos/builtin/packages/r-httr/package.py
new file mode 100644
index 0000000000..55a5b0efab
--- /dev/null
+++ b/var/spack/repos/builtin/packages/r-httr/package.py
@@ -0,0 +1,43 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class RHttr(RPackage):
+ """Useful tools for working with HTTP organised by HTTP verbs (GET(),
+ POST(), etc). Configuration functions make it easy to control additional
+ request components (authenticate(), add_headers() and so on)."""
+
+ homepage = "https://github.com/hadley/httr"
+ url = "https://cran.r-project.org/src/contrib/httr_1.1.0.tar.gz"
+ list_url = "https://cran.r-project.org/src/contrib/Archive/httr"
+
+ version('1.1.0', '5ffbbc5c2529e49f00aaa521a2b35600')
+
+ depends_on('r-jsonlite', type=('build', 'run'))
+ depends_on('r-mime', type=('build', 'run'))
+ depends_on('r-curl', type=('build', 'run'))
+ depends_on('r-openssl', type=('build', 'run'))
+ depends_on('r-r6', type=('build', 'run'))
diff --git a/var/spack/repos/builtin/packages/r-igraph/package.py b/var/spack/repos/builtin/packages/r-igraph/package.py
new file mode 100644
index 0000000000..993a80ae1c
--- /dev/null
+++ b/var/spack/repos/builtin/packages/r-igraph/package.py
@@ -0,0 +1,44 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class RIgraph(RPackage):
+ """Routines for simple graphs and network analysis. It can handle large
+ graphs very well and provides functions for generating random and regular
+ graphs, graph visualization, centrality methods and much more."""
+
+ homepage = "http://igraph.org/"
+ url = "https://cran.r-project.org/src/contrib/igraph_1.0.1.tar.gz"
+ list_url = "https://cran.r-project.org/src/contrib/Archive/igraph"
+
+ version('1.0.1', 'ea33495e49adf4a331e4ba60ba559065')
+
+ depends_on('r-matrix', type=('build', 'run'))
+ depends_on('r-magrittr', type=('build', 'run'))
+ depends_on('r-nmf', type=('build', 'run'))
+ depends_on('r-irlba', type=('build', 'run'))
+ depends_on('gmp')
+ depends_on('libxml2')
diff --git a/var/spack/repos/builtin/packages/r-influencer/package.py b/var/spack/repos/builtin/packages/r-influencer/package.py
new file mode 100644
index 0000000000..bbfed54e25
--- /dev/null
+++ b/var/spack/repos/builtin/packages/r-influencer/package.py
@@ -0,0 +1,44 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class RInfluencer(RPackage):
+ """Provides functionality to compute various node centrality measures on
+ networks. Included are functions to compute betweenness centrality (by
+ utilizing Madduri and Bader's SNAP library), implementations of Burt's
+ constraint and effective network size (ENS) metrics, Borgatti's algorithm
+ to identify key players, and Valente's bridging metric. On Unix systems,
+ the betweenness, Key Players, and bridging implementations are parallelized
+ with OpenMP, which may run faster on systems which have OpenMP
+ configured."""
+
+ homepage = "https://github.com/rcc-uchicago/influenceR"
+ url = "https://cran.r-project.org/src/contrib/influenceR_0.1.0.tar.gz"
+
+ version('0.1.0', '6c8b6decd78c341364b5811fb3050ba5')
+
+ depends_on('r-igraph', type=('build', 'run'))
+ depends_on('r-matrix', type=('build', 'run'))
diff --git a/var/spack/repos/builtin/packages/r-inline/package.py b/var/spack/repos/builtin/packages/r-inline/package.py
new file mode 100644
index 0000000000..f30c87dc9b
--- /dev/null
+++ b/var/spack/repos/builtin/packages/r-inline/package.py
@@ -0,0 +1,37 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class RInline(RPackage):
+ """Functionality to dynamically define R functions and S4 methods with
+ inlined C, C++ or Fortran code supporting .C and .Call calling
+ conventions."""
+
+ homepage = "https://cran.r-project.org/web/packages/inline/index.html"
+ url = "https://cran.r-project.org/src/contrib/inline_0.3.14.tar.gz"
+ list_url = "https://cran.r-project.org/src/contrib/Archive/inline"
+
+ version('0.3.14', '9fe304a6ebf0e3889c4c6a7ad1c50bca')
diff --git a/var/spack/repos/builtin/packages/r-irdisplay/package.py b/var/spack/repos/builtin/packages/r-irdisplay/package.py
new file mode 100644
index 0000000000..f02c00d8ba
--- /dev/null
+++ b/var/spack/repos/builtin/packages/r-irdisplay/package.py
@@ -0,0 +1,39 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+
+from spack import *
+
+
+class RIrdisplay(RPackage):
+ """An interface to the rich display capabilities of Jupyter front-ends
+ (e.g. 'Jupyter Notebook') Designed to be used from a running IRkernel
+ session"""
+
+ homepage = "https://irkernel.github.io"
+ url = "https://cran.rstudio.com/src/contrib/IRdisplay_0.4.4.tar.gz"
+
+ version('0.4.4', '5be672fb82185b90f23bd99ac1e1cdb6')
+
+ depends_on('r-repr', type=('build', 'run'))
diff --git a/var/spack/repos/builtin/packages/r-irkernel/package.py b/var/spack/repos/builtin/packages/r-irkernel/package.py
new file mode 100644
index 0000000000..e69b77f9f0
--- /dev/null
+++ b/var/spack/repos/builtin/packages/r-irkernel/package.py
@@ -0,0 +1,45 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+
+from spack import *
+
+
+class RIrkernel(RPackage):
+ """R kernel for Jupyter"""
+
+ homepage = "https://irkernel.github.io/"
+
+ # Git repository
+ version('master', git='https://github.com/IRkernel/IRkernel.git',
+ tag='0.7')
+
+ depends_on('r-repr', type=('build', 'run'))
+ depends_on('r-irdisplay', type=('build', 'run'))
+ depends_on('r-evaluate', type=('build', 'run'))
+ depends_on('r-crayon', type=('build', 'run'))
+ depends_on('r-pbdzmq', type=('build', 'run'))
+ depends_on('r-devtools', type=('build', 'run'))
+ depends_on('r-uuid', type=('build', 'run'))
+ depends_on('r-digest', type=('build', 'run'))
diff --git a/var/spack/repos/builtin/packages/r-irlba/package.py b/var/spack/repos/builtin/packages/r-irlba/package.py
new file mode 100644
index 0000000000..e0d1b32565
--- /dev/null
+++ b/var/spack/repos/builtin/packages/r-irlba/package.py
@@ -0,0 +1,39 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class RIrlba(RPackage):
+ """Fast and memory efficient methods for truncated singular and eigenvalue
+ decompositions and principal component analysis of large sparse or dense
+ matrices."""
+
+ homepage = "https://cran.r-project.org/web/packages/irlba/index.html"
+ url = "https://cran.r-project.org/src/contrib/irlba_2.0.0.tar.gz"
+ list_url = "https://cran.r-project.org/src/contrib/Archive/irlba"
+
+ version('2.0.0', '557674cf8b68fea5b9f231058c324d26')
+
+ depends_on('r-matrix', type=('build', 'run'))
diff --git a/var/spack/repos/builtin/packages/r-iterators/package.py b/var/spack/repos/builtin/packages/r-iterators/package.py
new file mode 100644
index 0000000000..38dff8f9ac
--- /dev/null
+++ b/var/spack/repos/builtin/packages/r-iterators/package.py
@@ -0,0 +1,36 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class RIterators(RPackage):
+ """Support for iterators, which allow a programmer to traverse through all
+ the elements of a vector, list, or other collection of data."""
+
+ homepage = "https://cran.r-project.org/web/packages/iterators/index.html"
+ url = "https://cran.r-project.org/src/contrib/iterators_1.0.8.tar.gz"
+ list_url = "https://cran.r-project.org/src/contrib/Archive/iterators"
+
+ version('1.0.8', '2ded7f82cddd8174f1ec98607946c6ee')
diff --git a/var/spack/repos/builtin/packages/r-jpeg/package.py b/var/spack/repos/builtin/packages/r-jpeg/package.py
new file mode 100644
index 0000000000..02c42b5ff1
--- /dev/null
+++ b/var/spack/repos/builtin/packages/r-jpeg/package.py
@@ -0,0 +1,39 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class RJpeg(RPackage):
+ """This package provides an easy and simple way to read, write and display
+ bitmap images stored in the JPEG format. It can read and write both files
+ and in-memory raw vectors."""
+
+ homepage = "http://www.rforge.net/jpeg/"
+ url = "https://cran.r-project.org/src/contrib/jpeg_0.1-8.tar.gz"
+ list_url = "https://cran.r-project.org/src/contrib/Archive/jpeg"
+
+ version('0.1-8', '696007451d14395b1ed1d0e9af667a57')
+
+ depends_on('jpeg')
diff --git a/var/spack/repos/builtin/packages/r-jsonlite/package.py b/var/spack/repos/builtin/packages/r-jsonlite/package.py
new file mode 100644
index 0000000000..7368187af5
--- /dev/null
+++ b/var/spack/repos/builtin/packages/r-jsonlite/package.py
@@ -0,0 +1,45 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class RJsonlite(RPackage):
+ """A fast JSON parser and generator optimized for statistical data and the
+ web. Started out as a fork of 'RJSONIO', but has been completely rewritten
+ in recent versions. The package offers flexible, robust, high performance
+ tools for working with JSON in R and is particularly powerful for building
+ pipelines and interacting with a web API. The implementation is based on
+ the mapping described in the vignette (Ooms, 2014). In addition to
+ converting JSON data from/to R objects, 'jsonlite' contains functions to
+ stream, validate, and prettify JSON data. The unit tests included with the
+ package verify that all edge cases are encoded and decoded consistently for
+ use with dynamic data in systems and applications."""
+
+ homepage = "https://github.com/jeroenooms/jsonlite"
+ url = "https://cran.r-project.org/src/contrib/jsonlite_1.0.tar.gz"
+ list_url = "https://cran.r-project.org/src/contrib/Archive/jsonlite"
+
+ version('1.0', 'c8524e086de22ab39b8ac8000220cc87')
+ version('0.9.21', '4fc382747f88a79ff0718a0d06bed45d')
diff --git a/var/spack/repos/builtin/packages/r-knitr/package.py b/var/spack/repos/builtin/packages/r-knitr/package.py
new file mode 100644
index 0000000000..dd92474944
--- /dev/null
+++ b/var/spack/repos/builtin/packages/r-knitr/package.py
@@ -0,0 +1,46 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+
+from spack import *
+
+
+class RKnitr(RPackage):
+ """Provides a general-purpose tool for dynamic report generation in R using
+ Literate Programming techniques."""
+
+ homepage = "http://yihui.name/knitr/"
+ url = "https://cran.rstudio.com/src/contrib/knitr_1.14.tar.gz"
+ list_url = "https://cran.rstudio.com/src/contrib/Archive/knitr"
+
+ version('1.14', 'ef0fbeaa9372f99ffbc57212a7781511')
+ version('0.6', 'c67d6db84cd55594a9e870c90651a3db')
+
+ depends_on('r-evaluate', type=('build', 'run'))
+ depends_on('r-digest', type=('build', 'run'))
+ depends_on('r-formatr', type=('build', 'run'))
+ depends_on('r-highr', type=('build', 'run'))
+ depends_on('r-stringr', type=('build', 'run'))
+ depends_on('r-markdown', type=('build', 'run'))
+ depends_on('r-yaml', type=('build', 'run'))
diff --git a/var/spack/repos/builtin/packages/r-labeling/package.py b/var/spack/repos/builtin/packages/r-labeling/package.py
new file mode 100644
index 0000000000..7c288c63a4
--- /dev/null
+++ b/var/spack/repos/builtin/packages/r-labeling/package.py
@@ -0,0 +1,35 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class RLabeling(RPackage):
+ """Provides a range of axis labeling algorithms."""
+
+ homepage = "https://cran.r-project.org/web/packages/labeling/index.html"
+ url = "https://cran.r-project.org/src/contrib/labeling_0.3.tar.gz"
+ list_url = "https://cran.r-project.org/src/contrib/Archive/labeling"
+
+ version('0.3', 'ccd7082ec0b211aba8a89d85176bb534')
diff --git a/var/spack/repos/builtin/packages/r-lattice/package.py b/var/spack/repos/builtin/packages/r-lattice/package.py
new file mode 100644
index 0000000000..ed3c19f2e6
--- /dev/null
+++ b/var/spack/repos/builtin/packages/r-lattice/package.py
@@ -0,0 +1,38 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class RLattice(RPackage):
+ """A powerful and elegant high-level data visualization system inspired by
+ Trellis graphics, with an emphasis on multivariate data. Lattice is
+ sufficient for typical graphics needs, and is also flexible enough to
+ handle most nonstandard requirements. See ?Lattice for an introduction."""
+
+ homepage = "http://lattice.r-forge.r-project.org/"
+ url = "https://cran.r-project.org/src/contrib/lattice_0.20-34.tar.gz"
+ list_url = "https://cran.r-project.org/src/contrib/Archive/lattice"
+
+ version('0.20-34', 'c2a648b22d4206ae7526fb70b8e90fed')
diff --git a/var/spack/repos/builtin/packages/r-lazyeval/package.py b/var/spack/repos/builtin/packages/r-lazyeval/package.py
new file mode 100644
index 0000000000..ab41a39675
--- /dev/null
+++ b/var/spack/repos/builtin/packages/r-lazyeval/package.py
@@ -0,0 +1,37 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class RLazyeval(RPackage):
+ """An alternative approach to non-standard evaluation using formulas.
+ Provides a full implementation of LISP style 'quasiquotation', making it
+ easier to generate code with other code."""
+
+ homepage = "https://cran.r-project.org/web/packages/lazyeval/index.html"
+ url = "https://cran.r-project.org/src/contrib/lazyeval_0.2.0.tar.gz"
+ list_url = "https://cran.r-project.org/src/contrib/Archive/lazyeval"
+
+ version('0.2.0', 'df1daac908dcf02ae7e12f4335b1b13b')
diff --git a/var/spack/repos/builtin/packages/r-leaflet/package.py b/var/spack/repos/builtin/packages/r-leaflet/package.py
new file mode 100644
index 0000000000..62c2cf3c7c
--- /dev/null
+++ b/var/spack/repos/builtin/packages/r-leaflet/package.py
@@ -0,0 +1,48 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class RLeaflet(RPackage):
+ """Create and customize interactive maps using the 'Leaflet' JavaScript
+ library and the 'htmlwidgets' package. These maps can be used directly from
+ the R console, from 'RStudio', in Shiny apps and R Markdown documents."""
+
+ homepage = "http://rstudio.github.io/leaflet/"
+ url = "https://cran.r-project.org/src/contrib/leaflet_1.0.1.tar.gz"
+ list_url = "https://cran.r-project.org/src/contrib/Archive/leaflet"
+
+ version('1.0.1', '7f3d8b17092604d87d4eeb579f73d5df')
+
+ depends_on('r-base64enc', type=('build', 'run'))
+ depends_on('r-htmlwidgets', type=('build', 'run'))
+ depends_on('r-htmltools', type=('build', 'run'))
+ depends_on('r-magrittr', type=('build', 'run'))
+ depends_on('r-markdown', type=('build', 'run'))
+ depends_on('r-png', type=('build', 'run'))
+ depends_on('r-rcolorbrewer', type=('build', 'run'))
+ depends_on('r-raster', type=('build', 'run'))
+ depends_on('r-scales', type=('build', 'run'))
+ depends_on('r-sp', type=('build', 'run'))
diff --git a/var/spack/repos/builtin/packages/r-lme4/package.py b/var/spack/repos/builtin/packages/r-lme4/package.py
new file mode 100644
index 0000000000..0ca545ced9
--- /dev/null
+++ b/var/spack/repos/builtin/packages/r-lme4/package.py
@@ -0,0 +1,47 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class RLme4(RPackage):
+ """Fit linear and generalized linear mixed-effects models. The models and
+ their components are represented using S4 classes and methods. The core
+ computational algorithms are implemented using the 'Eigen' C++ library for
+ numerical linear algebra and 'RcppEigen' "glue"."""
+
+ homepage = "https://github.com/lme4/lme4/"
+ url = "https://cran.r-project.org/src/contrib/lme4_1.1-12.tar.gz"
+ list_url = "https://cran.r-project.org/src/contrib/Archive/lme4"
+
+ version('1.1-12', 'da8aaebb67477ecb5631851c46207804')
+
+ depends_on('r-matrix', type=('build', 'run'))
+ depends_on('r-mass', type=('build', 'run'))
+ depends_on('r-lattice', type=('build', 'run'))
+ depends_on('r-nlme', type=('build', 'run'))
+ depends_on('r-minqa', type=('build', 'run'))
+ depends_on('r-nloptr', type=('build', 'run'))
+ depends_on('r-rcpp', type=('build', 'run'))
+ depends_on('r-rcppeigen', type=('build', 'run'))
diff --git a/var/spack/repos/builtin/packages/r-lmtest/package.py b/var/spack/repos/builtin/packages/r-lmtest/package.py
new file mode 100644
index 0000000000..3d17dd2a7e
--- /dev/null
+++ b/var/spack/repos/builtin/packages/r-lmtest/package.py
@@ -0,0 +1,39 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class RLmtest(RPackage):
+ """A collection of tests, data sets, and examples for diagnostic checking
+ in linear regression models. Furthermore, some generic tools for inference
+ in parametric models are provided."""
+
+ homepage = "https://cran.r-project.org/package=lmtest"
+ url = "https://cran.r-project.org/src/contrib/lmtest_0.9-34.tar.gz"
+ list_url = "https://cran.r-project.org/src/contrib/Archive/lmtest"
+
+ version('0.9-34', 'fcdf7286bb5ccc2ca46be00bf25ac2fe')
+
+ depends_on('r-zoo', type=('build', 'run'))
diff --git a/var/spack/repos/builtin/packages/r-lubridate/package.py b/var/spack/repos/builtin/packages/r-lubridate/package.py
new file mode 100644
index 0000000000..159e84e292
--- /dev/null
+++ b/var/spack/repos/builtin/packages/r-lubridate/package.py
@@ -0,0 +1,42 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class RLubridate(RPackage):
+ """Functions to work with date-times and timespans: fast and user friendly
+ parsing of date-time data, extraction and updating of components of a
+ date-time (years, months, days, hours, minutes, and seconds), algebraic
+ manipulation on date-time and timespan objects. The 'lubridate' package has
+ a consistent and memorable syntax that makes working with dates easy and
+ fun."""
+
+ homepage = "https://cran.r-project.org/web/packages/lubridate/index.html"
+ url = "https://cran.r-project.org/src/contrib/lubridate_1.5.6.tar.gz"
+ list_url = "https://cran.r-project.org/src/contrib/Archive/lubridate"
+
+ version('1.5.6', 'a5dc44817548ee219d26a10bae92e611')
+
+ depends_on('r-stringr', type=('build', 'run'))
diff --git a/var/spack/repos/builtin/packages/r-magic/package.py b/var/spack/repos/builtin/packages/r-magic/package.py
index f86917ec0c..b0987adcd7 100644
--- a/var/spack/repos/builtin/packages/r-magic/package.py
+++ b/var/spack/repos/builtin/packages/r-magic/package.py
@@ -24,7 +24,8 @@
##############################################################################
from spack import *
-class RMagic(Package):
+
+class RMagic(RPackage):
"""A collection of efficient, vectorized algorithms for the creation and
investigation of magic squares and hypercubes, including a variety of
functions for the manipulation and analysis of arbitrarily dimensioned
@@ -32,12 +33,8 @@ class RMagic(Package):
homepage = "https://cran.r-project.org/"
url = "https://cran.r-project.org/src/contrib/magic_1.5-6.tar.gz"
+ list_url = "https://cran.r-project.org/src/contrib/Archive/magic"
- version('1.5-6', 'a68e5ced253b2196af842e1fc84fd029', expand=False)
-
- extends('R')
-
- depends_on('r-abind')
+ version('1.5-6', 'a68e5ced253b2196af842e1fc84fd029')
- def install(self, spec, prefix):
- R('CMD', 'INSTALL', '--library=%s' % self.module.r_lib_dir, '%s' % self.stage.archive_file)
+ depends_on('r-abind', type=('build', 'run'))
diff --git a/var/spack/repos/builtin/packages/r-magrittr/package.py b/var/spack/repos/builtin/packages/r-magrittr/package.py
new file mode 100644
index 0000000000..915797e11d
--- /dev/null
+++ b/var/spack/repos/builtin/packages/r-magrittr/package.py
@@ -0,0 +1,39 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class RMagrittr(RPackage):
+ """Provides a mechanism for chaining commands with a new forward-pipe
+ operator, %>%. This operator will forward a value, or the result of an
+ expression, into the next function call/expression. There is flexible
+ support for the type of right-hand side expressions. For more information,
+ see package vignette."""
+
+ homepage = "https://cran.r-project.org/web/packages/magrittr/index.html"
+ url = "https://cran.r-project.org/src/contrib/magrittr_1.5.tar.gz"
+ list_url = "https://cran.r-project.org/src/contrib/Archive/magrittr"
+
+ version('1.5', 'e74ab7329f2b9833f0c3c1216f86d65a')
diff --git a/var/spack/repos/builtin/packages/r-mapproj/package.py b/var/spack/repos/builtin/packages/r-mapproj/package.py
new file mode 100644
index 0000000000..3475868abd
--- /dev/null
+++ b/var/spack/repos/builtin/packages/r-mapproj/package.py
@@ -0,0 +1,37 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class RMapproj(RPackage):
+ """Converts latitude/longitude into projected coordinates."""
+
+ homepage = "https://cran.r-project.org/package=mapproj"
+ url = "https://cran.r-project.org/src/contrib/mapproj_1.2-4.tar.gz"
+ list_url = "https://cran.r-project.org/src/contrib/Archive/mapproj"
+
+ version('1.2-4', '10e22bde1c790e1540672f15ddcaee71')
+
+ depends_on('r-maps', type=('build', 'run'))
diff --git a/var/spack/repos/builtin/packages/r-maps/package.py b/var/spack/repos/builtin/packages/r-maps/package.py
new file mode 100644
index 0000000000..c399bc52f3
--- /dev/null
+++ b/var/spack/repos/builtin/packages/r-maps/package.py
@@ -0,0 +1,36 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class RMaps(RPackage):
+ """Display of maps. Projection code and larger maps are in separate
+ packages ('mapproj' and 'mapdata')."""
+
+ homepage = "https://cran.r-project.org/"
+ url = "https://cran.r-project.org/src/contrib/maps_3.1.1.tar.gz"
+ list_url = "https://cran.r-project.org/src/contrib/Archive/maps"
+
+ version('3.1.1', 'ff045eccb6d5a658db5a539116ddf764')
diff --git a/var/spack/repos/builtin/packages/r-maptools/package.py b/var/spack/repos/builtin/packages/r-maptools/package.py
new file mode 100644
index 0000000000..74d0673aea
--- /dev/null
+++ b/var/spack/repos/builtin/packages/r-maptools/package.py
@@ -0,0 +1,43 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class RMaptools(RPackage):
+ """Set of tools for manipulating and reading geographic data, in particular
+ ESRI shapefiles; C code used from shapelib. It includes binary access to
+ GSHHG shoreline files. The package also provides interface wrappers for
+ exchanging spatial objects with packages such as PBSmapping, spatstat,
+ maps, RArcInfo, Stata tmap, WinBUGS, Mondrian, and others."""
+
+ homepage = "http://r-forge.r-project.org/projects/maptools/"
+ url = "https://cran.r-project.org/src/contrib/maptools_0.8-39.tar.gz"
+ list_url = "https://cran.r-project.org/src/contrib/Archive/maptools"
+
+ version('0.8-39', '3690d96afba8ef22c8e27ae540ffb836')
+
+ depends_on('r-sp', type=('build', 'run'))
+ depends_on('r-foreign', type=('build', 'run'))
+ depends_on('r-lattice', type=('build', 'run'))
diff --git a/var/spack/repos/builtin/packages/r-markdown/package.py b/var/spack/repos/builtin/packages/r-markdown/package.py
new file mode 100644
index 0000000000..c0e03fef0a
--- /dev/null
+++ b/var/spack/repos/builtin/packages/r-markdown/package.py
@@ -0,0 +1,41 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class RMarkdown(RPackage):
+ """Provides R bindings to the 'Sundown' 'Markdown' rendering library
+ (https://github.com/vmg/sundown). 'Markdown' is a plain-text formatting
+ syntax that can be converted to 'XHTML' or other formats. See
+ http://en.wikipedia.org/wiki/Markdown for more information about
+ 'Markdown'."""
+
+ homepage = "https://github.com/rstudio/markdown"
+ url = "https://cran.r-project.org/src/contrib/markdown_0.7.7.tar.gz"
+ list_url = "https://cran.r-project.org/src/contrib/Archive/markdown"
+
+ version('0.7.7', '72deca9c675c7cc9343048edbc29f7ff')
+
+ depends_on('r-mime', type=('build', 'run'))
diff --git a/var/spack/repos/builtin/packages/r-mass/package.py b/var/spack/repos/builtin/packages/r-mass/package.py
new file mode 100644
index 0000000000..25d3b5869b
--- /dev/null
+++ b/var/spack/repos/builtin/packages/r-mass/package.py
@@ -0,0 +1,36 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class RMass(RPackage):
+ """Functions and datasets to support Venables and Ripley, "Modern Applied
+ Statistics with S" (4th edition, 2002)."""
+
+ homepage = "http://www.stats.ox.ac.uk/pub/MASS4/"
+ url = "https://cran.r-project.org/src/contrib/MASS_7.3-45.tar.gz"
+ list_url = "https://cran.r-project.org/src/contrib/Archive/MASS"
+
+ version('7.3-45', 'aba3d12fab30f1793bee168a1efea88b')
diff --git a/var/spack/repos/builtin/packages/r-matrix/package.py b/var/spack/repos/builtin/packages/r-matrix/package.py
new file mode 100644
index 0000000000..07c2eaf9da
--- /dev/null
+++ b/var/spack/repos/builtin/packages/r-matrix/package.py
@@ -0,0 +1,38 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class RMatrix(RPackage):
+ """Classes and methods for dense and sparse matrices and operations on them
+ using 'LAPACK' and 'SuiteSparse'."""
+
+ homepage = "http://matrix.r-forge.r-project.org/"
+ url = "https://cran.r-project.org/src/contrib/Matrix_1.2-6.tar.gz"
+ list_url = "https://cran.r-project.org/src/contrib/Archive/Matrix"
+
+ version('1.2-6', 'f545307fb1284861e9266c4e9712c55e')
+
+ depends_on('r-lattice', type=('build', 'run'))
diff --git a/var/spack/repos/builtin/packages/r-matrixmodels/package.py b/var/spack/repos/builtin/packages/r-matrixmodels/package.py
new file mode 100644
index 0000000000..0958de49a3
--- /dev/null
+++ b/var/spack/repos/builtin/packages/r-matrixmodels/package.py
@@ -0,0 +1,38 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class RMatrixmodels(RPackage):
+ """Modelling with sparse and dense 'Matrix' matrices, using modular
+ prediction and response module classes."""
+
+ homepage = "http://matrix.r-forge.r-project.org/"
+ url = "https://cran.r-project.org/src/contrib/MatrixModels_0.4-1.tar.gz"
+ list_url = "https://cran.r-project.org/src/contrib/Archive/MatrixModels"
+
+ version('0.4-1', '65b3ab56650c62bf1046a3eb1f1e19a0')
+
+ depends_on('r-matrix', type=('build', 'run'))
diff --git a/var/spack/repos/builtin/packages/r-memoise/package.py b/var/spack/repos/builtin/packages/r-memoise/package.py
new file mode 100644
index 0000000000..76c207dc04
--- /dev/null
+++ b/var/spack/repos/builtin/packages/r-memoise/package.py
@@ -0,0 +1,38 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class RMemoise(RPackage):
+ """Cache the results of a function so that when you call it again with the
+ same arguments it returns the pre-computed value."""
+
+ homepage = "https://github.com/hadley/memoise"
+ url = "https://cran.r-project.org/src/contrib/memoise_1.0.0.tar.gz"
+ list_url = "https://cran.r-project.org/src/contrib/Archive/memoise"
+
+ version('1.0.0', 'd31145292e2a88ae9a504cab1602e4ac')
+
+ depends_on('r-digest', type=('build', 'run'))
diff --git a/var/spack/repos/builtin/packages/r-mgcv/package.py b/var/spack/repos/builtin/packages/r-mgcv/package.py
new file mode 100644
index 0000000000..c8cb067275
--- /dev/null
+++ b/var/spack/repos/builtin/packages/r-mgcv/package.py
@@ -0,0 +1,41 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class RMgcv(RPackage):
+ """GAMs, GAMMs and other generalized ridge regression with multiple
+ smoothing parameter estimation by GCV, REML or UBRE/AIC. Includes a gam()
+ function, a wide variety of smoothers, JAGS support and distributions
+ beyond the exponential family."""
+
+ homepage = "https://cran.r-project.org/package=mgcv"
+ url = "https://cran.r-project.org/src/contrib/mgcv_1.8-13.tar.gz"
+ list_url = "https://cran.r-project.org/src/contrib/Archive/mgcv"
+
+ version('1.8-13', '30607be3aaf44b13bd8c81fc32e8c984')
+
+ depends_on('r-nlme', type=('build', 'run'))
+ depends_on('r-matrix', type=('build', 'run'))
diff --git a/var/spack/repos/builtin/packages/r-mime/package.py b/var/spack/repos/builtin/packages/r-mime/package.py
new file mode 100644
index 0000000000..c4d2eb2b3e
--- /dev/null
+++ b/var/spack/repos/builtin/packages/r-mime/package.py
@@ -0,0 +1,37 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class RMime(RPackage):
+ """Guesses the MIME type from a filename extension using the data derived
+ from /etc/mime.types in UNIX-type systems."""
+
+ homepage = "https://github.com/yihui/mime"
+ url = "https://cran.r-project.org/src/contrib/mime_0.5.tar.gz"
+ list_url = "https://cran.r-project.org/src/contrib/Archive/mime"
+
+ version('0.5', '87e00b6d57b581465c19ae869a723c4d')
+ version('0.4', '789cb33e41db2206c6fc7c3e9fbc2c02')
diff --git a/var/spack/repos/builtin/packages/r-minqa/package.py b/var/spack/repos/builtin/packages/r-minqa/package.py
new file mode 100644
index 0000000000..7a9032a546
--- /dev/null
+++ b/var/spack/repos/builtin/packages/r-minqa/package.py
@@ -0,0 +1,38 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class RMinqa(RPackage):
+ """Derivative-free optimization by quadratic approximation based on an
+ interface to Fortran implementations by M. J. D. Powell."""
+
+ homepage = "http://optimizer.r-forge.r-project.org/"
+ url = "https://cran.r-project.org/src/contrib/minqa_1.2.4.tar.gz"
+ list_url = "https://cran.r-project.org/src/contrib/Archive/minqa"
+
+ version('1.2.4', 'bcaae4fdba60a33528f2116e2fd51105')
+
+ depends_on('r-rcpp', type=('build', 'run'))
diff --git a/var/spack/repos/builtin/packages/r-multcomp/package.py b/var/spack/repos/builtin/packages/r-multcomp/package.py
new file mode 100644
index 0000000000..70704a9c61
--- /dev/null
+++ b/var/spack/repos/builtin/packages/r-multcomp/package.py
@@ -0,0 +1,45 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class RMultcomp(RPackage):
+ """Simultaneous tests and confidence intervals for general linear
+ hypotheses in parametric models, including linear, generalized linear,
+ linear mixed effects, and survival models. The package includes demos
+ reproducing analyzes presented in the book "Multiple Comparisons Using R"
+ (Bretz, Hothorn, Westfall, 2010, CRC Press)."""
+
+ homepage = "http://multcomp.r-forge.r-project.org/"
+ url = "https://cran.r-project.org/src/contrib/multcomp_1.4-6.tar.gz"
+ list_url = "https://cran.r-project.org/src/contrib/Archive/multcomp"
+
+ version('1.4-6', 'f1353ede2ed78b23859a7f1f1f9ebe88')
+
+ depends_on('r-mvtnorm', type=('build', 'run'))
+ depends_on('r-survival', type=('build', 'run'))
+ depends_on('r-thdata', type=('build', 'run'))
+ depends_on('r-sandwich', type=('build', 'run'))
+ depends_on('r-codetools', type=('build', 'run'))
diff --git a/var/spack/repos/builtin/packages/r-munsell/package.py b/var/spack/repos/builtin/packages/r-munsell/package.py
new file mode 100644
index 0000000000..670fed41e2
--- /dev/null
+++ b/var/spack/repos/builtin/packages/r-munsell/package.py
@@ -0,0 +1,41 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class RMunsell(RPackage):
+ """Provides easy access to, and manipulation of, the Munsell colours.
+ Provides a mapping between Munsell's original notation (e.g. "5R 5/10") and
+ hexadecimal strings suitable for use directly in R graphics. Also provides
+ utilities to explore slices through the Munsell colour tree, to transform
+ Munsell colours and display colour palettes."""
+
+ homepage = "https://cran.r-project.org/web/packages/munsell/index.html"
+ url = "https://cran.r-project.org/src/contrib/munsell_0.4.3.tar.gz"
+ list_url = "https://cran.r-project.org/src/contrib/Archive/munsell"
+
+ version('0.4.3', 'ebd205323dc37c948f499ee08be9c476')
+
+ depends_on('r-colorspace', type=('build', 'run'))
diff --git a/var/spack/repos/builtin/packages/r-mvtnorm/package.py b/var/spack/repos/builtin/packages/r-mvtnorm/package.py
new file mode 100644
index 0000000000..01e3aea91d
--- /dev/null
+++ b/var/spack/repos/builtin/packages/r-mvtnorm/package.py
@@ -0,0 +1,36 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class RMvtnorm(RPackage):
+ """Computes multivariate normal and t probabilities, quantiles, random
+ deviates and densities."""
+
+ homepage = "http://mvtnorm.r-forge.r-project.org/"
+ url = "https://cran.r-project.org/src/contrib/mvtnorm_1.0-5.tar.gz"
+ list_url = "https://cran.r-project.org/src/contrib/Archive/mvtnorm"
+
+ version('1.0-5', '5894dd3969bbfa26f4862c45f9a48a52')
diff --git a/var/spack/repos/builtin/packages/r-ncdf4/package.py b/var/spack/repos/builtin/packages/r-ncdf4/package.py
new file mode 100644
index 0000000000..597f4d903f
--- /dev/null
+++ b/var/spack/repos/builtin/packages/r-ncdf4/package.py
@@ -0,0 +1,50 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class RNcdf4(RPackage):
+ """Provides a high-level R interface to data files written using Unidata's
+ netCDF library (version 4 or earlier), which are binary data files that are
+ portable across platforms and include metadata information in addition to
+ the data sets. Using this package, netCDF files (either version 4 or
+ "classic" version 3) can be opened and data sets read in easily. It is also
+ easy to create new netCDF dimensions, variables, and files, in either
+ version 3 or 4 format, and manipulate existing netCDF files. This package
+ replaces the former ncdf package, which only worked with netcdf version 3
+ files. For various reasons the names of the functions have had to be
+ changed from the names in the ncdf package. The old ncdf package is still
+ available at the URL given below, if you need to have backward
+ compatibility. It should be possible to have both the ncdf and ncdf4
+ packages installed simultaneously without a problem. However, the ncdf
+ package does not provide an interface for netcdf version 4 files."""
+
+ homepage = "http://cirrus.ucsd.edu/~pierce/ncdf"
+ url = "https://cran.r-project.org/src/contrib/ncdf4_1.15.tar.gz"
+ list_url = "https://cran.r-project.org/src/contrib/Archive/ncdf4"
+
+ version('1.15', 'cd60dadbae3be31371e1ed40ddeb420a')
+
+ depends_on('netcdf')
diff --git a/var/spack/repos/builtin/packages/r-networkd3/package.py b/var/spack/repos/builtin/packages/r-networkd3/package.py
new file mode 100644
index 0000000000..e881394538
--- /dev/null
+++ b/var/spack/repos/builtin/packages/r-networkd3/package.py
@@ -0,0 +1,40 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class RNetworkd3(RPackage):
+ """Creates 'D3' 'JavaScript' network, tree, dendrogram, and Sankey graphs
+ from 'R'."""
+
+ homepage = "http://cran.r-project.org/package=networkD3"
+ url = "https://cran.r-project.org/src/contrib/networkD3_0.2.12.tar.gz"
+ list_url = "https://cran.r-project.org/src/contrib/Archive/networkD3"
+
+ version('0.2.12', '356fe4be59698e6fb052644bd9659d84')
+
+ depends_on('r-htmlwidgets', type=('build', 'run'))
+ depends_on('r-igraph', type=('build', 'run'))
+ depends_on('r-magrittr', type=('build', 'run'))
diff --git a/var/spack/repos/builtin/packages/r-nlme/package.py b/var/spack/repos/builtin/packages/r-nlme/package.py
new file mode 100644
index 0000000000..869e03ab51
--- /dev/null
+++ b/var/spack/repos/builtin/packages/r-nlme/package.py
@@ -0,0 +1,37 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class RNlme(RPackage):
+ """Fit and compare Gaussian linear and nonlinear mixed-effects models."""
+
+ homepage = "https://cran.r-project.org/package=nlme"
+ url = "https://cran.r-project.org/src/contrib/nlme_3.1-128.tar.gz"
+ list_url = "https://cran.r-project.org/src/contrib/Archive/nlme"
+
+ version('3.1-128', '3d75ae7380bf123761b95a073eb55008')
+
+ depends_on('r-lattice', type=('build', 'run'))
diff --git a/var/spack/repos/builtin/packages/r-nloptr/package.py b/var/spack/repos/builtin/packages/r-nloptr/package.py
new file mode 100644
index 0000000000..8da84c5814
--- /dev/null
+++ b/var/spack/repos/builtin/packages/r-nloptr/package.py
@@ -0,0 +1,41 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class RNloptr(RPackage):
+ """nloptr is an R interface to NLopt. NLopt is a free/open-source library
+ for nonlinear optimization, providing a common interface for a number of
+ different free optimization routines available online as well as original
+ implementations of various other algorithms. See
+ http://ab-initio.mit.edu/wiki/index.php/NLopt_Introduction for more
+ information on the available algorithms. During installation on Unix the
+ NLopt code is downloaded and compiled from the NLopt website."""
+
+ homepage = "https://cran.r-project.org/package=nloptr"
+ url = "https://cran.r-project.org/src/contrib/nloptr_1.0.4.tar.gz"
+ list_url = "https://cran.r-project.org/src/contrib/Archive/nloptr"
+
+ version('1.0.4', '9af69a613349b236fd377d0a107f484c')
diff --git a/var/spack/repos/builtin/packages/r-nmf/package.py b/var/spack/repos/builtin/packages/r-nmf/package.py
new file mode 100644
index 0000000000..78347ea615
--- /dev/null
+++ b/var/spack/repos/builtin/packages/r-nmf/package.py
@@ -0,0 +1,54 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class RNmf(RPackage):
+ """Provides a framework to perform Non-negative Matrix Factorization (NMF).
+ The package implements a set of already published algorithms and seeding
+ methods, and provides a framework to test, develop and plug new/custom
+ algorithms. Most of the built-in algorithms have been optimized in C++, and
+ the main interface function provides an easy way of performing parallel
+ computations on multicore machines.."""
+
+ homepage = "http://renozao.github.io/NMF"
+ url = "https://cran.r-project.org/src/contrib/NMF_0.20.6.tar.gz"
+ list_url = "https://cran.r-project.org/src/contrib/Archive/NMF"
+
+ version('0.20.6', '81df07b3bf710a611db5af24730ff3d0')
+
+ depends_on('r-pkgmaker', type=('build', 'run'))
+ depends_on('r-registry', type=('build', 'run'))
+ depends_on('r-rngtools', type=('build', 'run'))
+ depends_on('r-cluster', type=('build', 'run'))
+ depends_on('r-stringr', type=('build', 'run'))
+ depends_on('r-digest', type=('build', 'run'))
+ depends_on('r-gridbase', type=('build', 'run'))
+ depends_on('r-colorspace', type=('build', 'run'))
+ depends_on('r-rcolorbrewer', type=('build', 'run'))
+ depends_on('r-foreach', type=('build', 'run'))
+ depends_on('r-doparallel', type=('build', 'run'))
+ depends_on('r-ggplot2', type=('build', 'run'))
+ depends_on('r-reshape2', type=('build', 'run'))
diff --git a/var/spack/repos/builtin/packages/r-nnet/package.py b/var/spack/repos/builtin/packages/r-nnet/package.py
new file mode 100644
index 0000000000..eeb6f91034
--- /dev/null
+++ b/var/spack/repos/builtin/packages/r-nnet/package.py
@@ -0,0 +1,36 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class RNnet(RPackage):
+ """Software for feed-forward neural networks with a single hidden layer,
+ and for multinomial log-linear models."""
+
+ homepage = "http://www.stats.ox.ac.uk/pub/MASS4/"
+ url = "https://cran.r-project.org/src/contrib/nnet_7.3-12.tar.gz"
+ list_url = "https://cran.r-project.org/src/contrib/Archive/nnet"
+
+ version('7.3-12', 'dc7c6f0d0de53d8fc72b44554400a74e')
diff --git a/var/spack/repos/builtin/packages/r-np/package.py b/var/spack/repos/builtin/packages/r-np/package.py
new file mode 100644
index 0000000000..e15cb7efcb
--- /dev/null
+++ b/var/spack/repos/builtin/packages/r-np/package.py
@@ -0,0 +1,44 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class RNp(RPackage):
+ """This package provides a variety of nonparametric (and semiparametric)
+ kernel methods that seamlessly handle a mix of continuous, unordered, and
+ ordered factor data types. We would like to gratefully acknowledge support
+ from the Natural Sciences and Engineering Research Council of Canada
+ (NSERC:www.nserc.ca), the Social Sciences and Humanities Research Council
+ of Canada (SSHRC:www.sshrc.ca), and the Shared Hierarchical Academic
+ Research Computing Network (SHARCNET:www.sharcnet.ca)."""
+
+ homepage = "https://github.com/JeffreyRacine/R-Package-np/"
+ url = "https://cran.r-project.org/src/contrib/np_0.60-2.tar.gz"
+ list_url = "https://cran.r-project.org/src/contrib/Archive/np"
+
+ version('0.60-2', 'e094d52ddff7280272b41e6cb2c74389')
+
+ depends_on('r-boot', type=('build', 'run'))
+ depends_on('r-cubature', type=('build', 'run'))
diff --git a/var/spack/repos/builtin/packages/r-openssl/package.py b/var/spack/repos/builtin/packages/r-openssl/package.py
new file mode 100644
index 0000000000..bf9f38be72
--- /dev/null
+++ b/var/spack/repos/builtin/packages/r-openssl/package.py
@@ -0,0 +1,46 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class ROpenssl(RPackage):
+ """Bindings to OpenSSL libssl and libcrypto, plus custom SSH pubkey
+ parsers. Supports RSA, DSA and EC curves P-256, P-384 and P-521.
+ Cryptographic signatures can either be created and verified manually or via
+ x509 certificates. AES can be used in cbc, ctr or gcm mode for symmetric
+ encryption; RSA for asymmetric (public key) encryption or EC for Diffie
+ Hellman. High-level envelope functions combine RSA and AES for encrypting
+ arbitrary sized data. Other utilities include key generators, hash
+ functions (md5, sha1, sha256, etc), base64 encoder, a secure random number
+ generator, and 'bignum' math methods for manually performing crypto
+ calculations on large multibyte integers."""
+
+ homepage = "https://github.com/jeroenooms/openssl#readme"
+ url = "https://cran.r-project.org/src/contrib/openssl_0.9.4.tar.gz"
+ list_url = "https://cran.r-project.org/src/contrib/Archive/openssl"
+
+ version('0.9.4', '82a890e71ed0e74499878bedacfb8ccb')
+
+ depends_on('openssl')
diff --git a/var/spack/repos/builtin/packages/r-packrat/package.py b/var/spack/repos/builtin/packages/r-packrat/package.py
new file mode 100644
index 0000000000..726a6640e8
--- /dev/null
+++ b/var/spack/repos/builtin/packages/r-packrat/package.py
@@ -0,0 +1,36 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class RPackrat(RPackage):
+ """Manage the R packages your project depends on in an isolated, portable,
+ and reproducible way."""
+
+ homepage = "https://github.com/rstudio/packrat/"
+ url = "https://cran.r-project.org/src/contrib/packrat_0.4.7-1.tar.gz"
+ list_url = "https://cran.r-project.org/src/contrib/Archive/packrat"
+
+ version('0.4.7-1', '80c2413269b292ade163a70ba5053e84')
diff --git a/var/spack/repos/builtin/packages/r-partykit/package.py b/var/spack/repos/builtin/packages/r-partykit/package.py
new file mode 100644
index 0000000000..8773dace22
--- /dev/null
+++ b/var/spack/repos/builtin/packages/r-partykit/package.py
@@ -0,0 +1,46 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+
+from spack import *
+
+
+class RPartykit(RPackage):
+ """A toolkit with infrastructure for representing, summarizing, and
+ visualizing tree-structured regression and classification models. This
+ unified infrastructure can be used for reading/coercing tree models from
+ different sources ('rpart', 'RWeka', 'PMML') yielding objects that share
+ functionality for print()/plot()/predict() methods. Furthermore, new and
+ improved reimplementations of conditional inference trees (ctree()) and
+ model-based recursive partitioning (mob()) from the 'party' package are
+ provided based on the new infrastructure."""
+
+ homepage = "http://partykit.r-forge.r-project.org/partykit"
+ url = "https://cran.r-project.org/src/contrib/partykit_1.1-1.tar.gz"
+ list_url = "https://cran.r-project.org/src/contrib/Archive/partykit"
+
+ version('1.1-1', '8fcb31d73ec1b8cd3bcd9789639a9277')
+
+ depends_on('r-survival', type=('build', 'run'))
+ depends_on('r-formula', type=('build', 'run'))
diff --git a/var/spack/repos/builtin/packages/r-pbdzmq/package.py b/var/spack/repos/builtin/packages/r-pbdzmq/package.py
new file mode 100644
index 0000000000..f602e50723
--- /dev/null
+++ b/var/spack/repos/builtin/packages/r-pbdzmq/package.py
@@ -0,0 +1,45 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+
+from spack import *
+
+
+class RPbdzmq(RPackage):
+ """'ZeroMQ' is a well-known library for high-performance asynchronous
+ messaging in scalable, distributed applications. This package provides
+ high level R wrapper functions to easily utilize 'ZeroMQ'. We mainly focus
+ on interactive client/server programming frameworks. For convenience, a
+ minimal 'ZeroMQ' library (4.1.0 rc1) is shipped with 'pbdZMQ', which can
+ be used if no system installation of 'ZeroMQ' is available. A few wrapper
+ functions compatible with 'rzmq' are also provided."""
+
+ homepage = "http://r-pbd.org/"
+ url = "https://cran.r-project.org/src/contrib/pbdZMQ_0.2-4.tar.gz"
+ list_url = "https://cran.r-project.org/src/contrib/Archive/pbdZMQ"
+
+ version('0.2-4', 'e5afb70199aa54d737ee7a0e26bde060')
+
+ depends_on('r-r6', type=('build', 'run'))
+ depends_on('zeromq')
diff --git a/var/spack/repos/builtin/packages/r-pbkrtest/package.py b/var/spack/repos/builtin/packages/r-pbkrtest/package.py
new file mode 100644
index 0000000000..2d51d2b958
--- /dev/null
+++ b/var/spack/repos/builtin/packages/r-pbkrtest/package.py
@@ -0,0 +1,43 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class RPbkrtest(RPackage):
+ """Test in mixed effects models. Attention is on mixed effects models as
+ implemented in the 'lme4' package. This package implements a parametric
+ bootstrap test and a Kenward Roger modification of F-tests for linear mixed
+ effects models and a parametric bootstrap test for generalized linear mixed
+ models."""
+
+ homepage = "http://people.math.aau.dk/~sorenh/software/pbkrtest/"
+ url = "https://cran.r-project.org/src/contrib/pbkrtest_0.4-6.tar.gz"
+ list_url = "https://cran.r-project.org/src/contrib/Archive/pbkrtest"
+
+ version('0.4-6', '0a7d9ff83b8d131af9b2335f35781ef9')
+
+ depends_on('r-lme4', type=('build', 'run'))
+ depends_on('r-matrix', type=('build', 'run'))
+ depends_on('r-mass', type=('build', 'run'))
diff --git a/var/spack/repos/builtin/packages/r-pkgmaker/package.py b/var/spack/repos/builtin/packages/r-pkgmaker/package.py
new file mode 100644
index 0000000000..099cabd954
--- /dev/null
+++ b/var/spack/repos/builtin/packages/r-pkgmaker/package.py
@@ -0,0 +1,47 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class RPkgmaker(RPackage):
+ """This package provides some low-level utilities to use for package
+ development. It currently provides managers for multiple package specific
+ options and registries, vignette, unit test and bibtex related utilities.
+ It serves as a base package for packages like NMF, RcppOctave, doRNG, and
+ as an incubator package for other general purposes utilities, that will
+ eventually be packaged separately. It is still under heavy development and
+ changes in the interface(s) are more than likely to happen."""
+
+ homepage = "https://renozao.github.io/pkgmaker"
+ url = "https://cran.r-project.org/src/contrib/pkgmaker_0.22.tar.gz"
+ list_url = "https://cran.r-project.org/src/contrib/Archive/pkgmaker"
+
+ version('0.22', '73a0c6d3e84c6dadf3de7582ef7e88a4')
+
+ depends_on('r-registry', type=('build', 'run'))
+ depends_on('r-codetools', type=('build', 'run'))
+ depends_on('r-digest', type=('build', 'run'))
+ depends_on('r-stringr', type=('build', 'run'))
+ depends_on('r-xtable', type=('build', 'run'))
diff --git a/var/spack/repos/builtin/packages/r-plotrix/package.py b/var/spack/repos/builtin/packages/r-plotrix/package.py
new file mode 100644
index 0000000000..8a17c72f91
--- /dev/null
+++ b/var/spack/repos/builtin/packages/r-plotrix/package.py
@@ -0,0 +1,35 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class RPlotrix(RPackage):
+ """Lots of plots, various labeling, axis and color scaling functions."""
+
+ homepage = "https://cran.r-project.org/package=plotrix"
+ url = "https://cran.r-project.org/src/contrib/plotrix_3.6-3.tar.gz"
+ list_url = "https://cran.r-project.org/src/contrib/Archive/plotrix"
+
+ version('3.6-3', '23e3e022a13a596e9b77b40afcb4a2ef')
diff --git a/var/spack/repos/builtin/packages/r-plyr/package.py b/var/spack/repos/builtin/packages/r-plyr/package.py
new file mode 100644
index 0000000000..6f48f5c38e
--- /dev/null
+++ b/var/spack/repos/builtin/packages/r-plyr/package.py
@@ -0,0 +1,43 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class RPlyr(RPackage):
+ """A set of tools that solves a common set of problems: you need to break a
+ big problem down into manageable pieces, operate on each piece and then put
+ all the pieces back together. For example, you might want to fit a model to
+ each spatial location or time point in your study, summarise data by panels
+ or collapse high-dimensional arrays to simpler summary statistics. The
+ development of 'plyr' has been generously supported by 'Becton
+ Dickinson'."""
+
+ homepage = "http://had.co.nz/plyr"
+ url = "https://cran.r-project.org/src/contrib/plyr_1.8.4.tar.gz"
+ list_url = "https://cran.r-project.org/src/contrib/Archive/plyr"
+
+ version('1.8.4', 'ef455cf7fc06e34837692156b7b2587b')
+
+ depends_on('r-rcpp', type=('build', 'run'))
diff --git a/var/spack/repos/builtin/packages/r-png/package.py b/var/spack/repos/builtin/packages/r-png/package.py
new file mode 100644
index 0000000000..38b7ae5138
--- /dev/null
+++ b/var/spack/repos/builtin/packages/r-png/package.py
@@ -0,0 +1,39 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class RPng(RPackage):
+ """This package provides an easy and simple way to read, write and display
+ bitmap images stored in the PNG format. It can read and write both files
+ and in-memory raw vectors."""
+
+ homepage = "http://www.rforge.net/png/"
+ url = "https://cran.r-project.org/src/contrib/png_0.1-7.tar.gz"
+ list_url = "https://cran.r-project.org/src/contrib/Archive/png"
+
+ version('0.1-7', '1ebc8b8aa5979b12c5ec2384b30d649f')
+
+ depends_on('libpng')
diff --git a/var/spack/repos/builtin/packages/r-praise/package.py b/var/spack/repos/builtin/packages/r-praise/package.py
new file mode 100644
index 0000000000..ff23594af9
--- /dev/null
+++ b/var/spack/repos/builtin/packages/r-praise/package.py
@@ -0,0 +1,35 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class RPraise(RPackage):
+ """Build friendly R packages that praise their users if they have done
+ something good, or they just need it to feel better."""
+
+ homepage = "https://github.com/gaborcsardi/praise"
+ url = "https://cran.r-project.org/src/contrib/praise_1.0.0.tar.gz"
+
+ version('1.0.0', '9318724cec0454884b5f762bee2da6a1')
diff --git a/var/spack/repos/builtin/packages/r-proto/package.py b/var/spack/repos/builtin/packages/r-proto/package.py
new file mode 100644
index 0000000000..2553e325f3
--- /dev/null
+++ b/var/spack/repos/builtin/packages/r-proto/package.py
@@ -0,0 +1,36 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class RProto(RPackage):
+ """An object oriented system using object-based, also called
+ prototype-based, rather than class-based object oriented ideas."""
+
+ homepage = "http://r-proto.googlecode.com/"
+ url = "https://cran.r-project.org/src/contrib/proto_0.3-10.tar.gz"
+ list_url = "https://cran.r-project.org/src/contrib/Archive/proto"
+
+ version('0.3-10', 'd5523943a5be6ca2f0ab557c900f8212')
diff --git a/var/spack/repos/builtin/packages/r-pryr/package.py b/var/spack/repos/builtin/packages/r-pryr/package.py
new file mode 100644
index 0000000000..3a103e9855
--- /dev/null
+++ b/var/spack/repos/builtin/packages/r-pryr/package.py
@@ -0,0 +1,40 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+
+from spack import *
+
+
+class RPryr(RPackage):
+ """Useful tools to pry back the covers of R and understand the language
+ at a deeper level."""
+
+ homepage = "https://github.com/hadley/pryr"
+ url = "https://cran.r-project.org/src/contrib/pryr_0.1.2.tar.gz"
+ list_url = "https://cran.r-project.org/src/contrib/Archive/pryr"
+
+ version('0.1.2', '66b597a762aa15a3b7037779522983b6')
+
+ depends_on('r-stringr', type=('build', 'run'))
+ depends_on('r-rcpp', type=('build', 'run'))
diff --git a/var/spack/repos/builtin/packages/r-quantmod/package.py b/var/spack/repos/builtin/packages/r-quantmod/package.py
new file mode 100644
index 0000000000..4cc53fcf69
--- /dev/null
+++ b/var/spack/repos/builtin/packages/r-quantmod/package.py
@@ -0,0 +1,40 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class RQuantmod(RPackage):
+ """Specify, build, trade, and analyse quantitative financial trading
+ strategies."""
+
+ homepage = "http://www.quantmod.com/"
+ url = "https://cran.r-project.org/src/contrib/quantmod_0.4-5.tar.gz"
+ list_url = "https://cran.r-project.org/src/contrib/Archive/quantmod"
+
+ version('0.4-5', 'cab3c409e4de3df98a20f1ded60f3631')
+
+ depends_on('r-xts', type=('build', 'run'))
+ depends_on('r-zoo', type=('build', 'run'))
+ depends_on('r-ttr', type=('build', 'run'))
diff --git a/var/spack/repos/builtin/packages/r-quantreg/package.py b/var/spack/repos/builtin/packages/r-quantreg/package.py
new file mode 100644
index 0000000000..2d5091ccaf
--- /dev/null
+++ b/var/spack/repos/builtin/packages/r-quantreg/package.py
@@ -0,0 +1,44 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class RQuantreg(RPackage):
+ """Estimation and inference methods for models of conditional quantiles:
+ Linear and nonlinear parametric and non-parametric (total variation
+ penalized) models for conditional quantiles of a univariate response
+ and several methods for handling censored survival data. Portfolio
+ selection methods based on expected shortfall risk are also
+ included."""
+
+ homepage = "https://cran.r-project.org/package=quantreg"
+ url = "https://cran.r-project.org/src/contrib/quantreg_5.26.tar.gz"
+ list_url = "https://cran.r-project.org/src/contrib/Archive/quantreg"
+
+ version('5.26', '1d89ed932fb4d67ae2d5da0eb8c2989f')
+
+ depends_on('r-sparsem', type=('build', 'run'))
+ depends_on('r-matrix', type=('build', 'run'))
+ depends_on('r-matrixmodels', type=('build', 'run'))
diff --git a/var/spack/repos/builtin/packages/r-r6/package.py b/var/spack/repos/builtin/packages/r-r6/package.py
new file mode 100644
index 0000000000..e64a8a6532
--- /dev/null
+++ b/var/spack/repos/builtin/packages/r-r6/package.py
@@ -0,0 +1,40 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class RR6(RPackage):
+ """The R6 package allows the creation of classes with reference semantics,
+ similar to R's built-in reference classes. Compared to reference classes,
+ R6 classes are simpler and lighter-weight, and they are not built on S4
+ classes so they do not require the methods package. These classes allow
+ public and private members, and they support inheritance, even when the
+ classes are defined in different packages."""
+
+ homepage = "https://github.com/wch/R6/"
+ url = "https://cran.r-project.org/src/contrib/R6_2.1.2.tar.gz"
+ list_url = "https://cran.r-project.org/src/contrib/Archive/R6"
+
+ version('2.1.2', 'b6afb9430e48707be87638675390e457')
diff --git a/var/spack/repos/builtin/packages/r-randomforest/package.py b/var/spack/repos/builtin/packages/r-randomforest/package.py
new file mode 100644
index 0000000000..bc7798695d
--- /dev/null
+++ b/var/spack/repos/builtin/packages/r-randomforest/package.py
@@ -0,0 +1,36 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class RRandomforest(RPackage):
+ """Classification and regression based on a forest of trees using random
+ inputs."""
+
+ homepage = "https://www.stat.berkeley.edu/~breiman/RandomForests/"
+ url = "https://cran.r-project.org/src/contrib/randomForest_4.6-12.tar.gz"
+ list_url = "https://cran.r-project.org/src/contrib/Archive/randomForest"
+
+ version('4.6-12', '071c03af974198e861f1475c5bab9e7a')
diff --git a/var/spack/repos/builtin/packages/r-raster/package.py b/var/spack/repos/builtin/packages/r-raster/package.py
new file mode 100644
index 0000000000..daa42793ee
--- /dev/null
+++ b/var/spack/repos/builtin/packages/r-raster/package.py
@@ -0,0 +1,40 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class RRaster(RPackage):
+ """Reading, writing, manipulating, analyzing and modeling of gridded
+ spatial data. The package implements basic and high-level functions.
+ Processing of very large files is supported."""
+
+ homepage = "http://cran.r-project.org/package=raster"
+ url = "https://cran.r-project.org/src/contrib/raster_2.5-8.tar.gz"
+ list_url = "https://cran.r-project.org/src/contrib/Archive/raster"
+
+ version('2.5-8', '2a7db931c74d50516e82d04687c0a577')
+
+ depends_on('r-sp', type=('build', 'run'))
+ depends_on('r-rcpp', type=('build', 'run'))
diff --git a/var/spack/repos/builtin/packages/r-rbokeh/package.py b/var/spack/repos/builtin/packages/r-rbokeh/package.py
new file mode 100644
index 0000000000..00f15891b2
--- /dev/null
+++ b/var/spack/repos/builtin/packages/r-rbokeh/package.py
@@ -0,0 +1,49 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+
+from spack import *
+
+
+class RRbokeh(RPackage):
+ """R interface for creating plots in Bokeh. Bokeh by Continuum
+ Analytics."""
+
+ homepage = "https://hafen.github.io/rbokeh"
+ url = "https://cran.r-project.org/src/contrib/rbokeh_0.5.0.tar.gz"
+ list_url = "https://cran.r-project.org/src/contrib/Archive/rbokeh"
+
+ version('0.5.0', '4e14778c3fbd9286460ca28c68f57d10')
+
+ depends_on('r-htmlwidgets', type=('build', 'run'))
+ depends_on('r-maps', type=('build', 'run'))
+ depends_on('r-jsonlite', type=('build', 'run'))
+ depends_on('r-digest', type=('build', 'run'))
+ depends_on('r-hexbin', type=('build', 'run'))
+ depends_on('r-lazyeval', type=('build', 'run'))
+ depends_on('r-pryr', type=('build', 'run'))
+ depends_on('r-magrittr', type=('build', 'run'))
+ depends_on('r-ggplot2', type=('build', 'run'))
+ depends_on('r-scales', type=('build', 'run'))
+ depends_on('r-gistr', type=('build', 'run'))
diff --git a/var/spack/repos/builtin/packages/r-rcolorbrewer/package.py b/var/spack/repos/builtin/packages/r-rcolorbrewer/package.py
new file mode 100644
index 0000000000..59f134caad
--- /dev/null
+++ b/var/spack/repos/builtin/packages/r-rcolorbrewer/package.py
@@ -0,0 +1,36 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class RRcolorbrewer(RPackage):
+ """Provides color schemes for maps (and other graphics) designed by Cynthia
+ Brewer as described at http://colorbrewer2.org"""
+
+ homepage = "http://colorbrewer2.org"
+ url = "https://cran.r-project.org/src/contrib/RColorBrewer_1.1-2.tar.gz"
+ list_url = "https://cran.r-project.org/src/contrib/Archive/RColorBrewer"
+
+ version('1.1-2', '66054d83eade4dff8a43ad4732691182')
diff --git a/var/spack/repos/builtin/packages/r-rcpp/package.py b/var/spack/repos/builtin/packages/r-rcpp/package.py
new file mode 100644
index 0000000000..b447dea8bd
--- /dev/null
+++ b/var/spack/repos/builtin/packages/r-rcpp/package.py
@@ -0,0 +1,44 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class RRcpp(RPackage):
+ """The 'Rcpp' package provides R functions as well as C++ classes which
+ offer a seamless integration of R and C++. Many R data types and objects
+ can be mapped back and forth to C++ equivalents which facilitates both
+ writing of new code as well as easier integration of third-party libraries.
+ Documentation about 'Rcpp' is provided by several vignettes included in
+ this package, via the 'Rcpp Gallery' site at <http://gallery.rcpp.org>, the
+ paper by Eddelbuettel and Francois (2011, JSS), and the book by
+ Eddelbuettel (2013, Springer); see 'citation("Rcpp")' for details on these
+ last two."""
+
+ homepage = "http://dirk.eddelbuettel.com/code/rcpp.html"
+ url = "https://cran.r-project.org/src/contrib/Rcpp_0.12.6.tar.gz"
+ list_url = "https://cran.r-project.org/src/contrib/Archive/Rcpp"
+
+ version('0.12.6', 'db4280fb0a79cd19be73a662c33b0a8b')
+ version('0.12.5', 'f03ec05b4e391cc46e7ce330e82ff5e2')
diff --git a/var/spack/repos/builtin/packages/r-rcppeigen/package.py b/var/spack/repos/builtin/packages/r-rcppeigen/package.py
new file mode 100644
index 0000000000..23ec0bc27b
--- /dev/null
+++ b/var/spack/repos/builtin/packages/r-rcppeigen/package.py
@@ -0,0 +1,50 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class RRcppeigen(RPackage):
+ """R and 'Eigen' integration using 'Rcpp'. 'Eigen' is a C++ template
+ library for linear algebra: matrices, vectors, numerical solvers and
+ related algorithms. It supports dense and sparse matrices on integer,
+ floating point and complex numbers, decompositions of such matrices, and
+ solutions of linear systems. Its performance on many algorithms is
+ comparable with some of the best implementations based on 'Lapack' and
+ level-3 'BLAS'. The 'RcppEigen' package includes the header files from the
+ 'Eigen' C++ template library (currently version 3.2.8). Thus users do not
+ need to install 'Eigen' itself in order to use 'RcppEigen'. Since version
+ 3.1.1, 'Eigen' is licensed under the Mozilla Public License (version 2);
+ earlier version were licensed under the GNU LGPL version 3 or later.
+ 'RcppEigen' (the 'Rcpp' bindings/bridge to 'Eigen') is licensed under the
+ GNU GPL version 2 or later, as is the rest of 'Rcpp'."""
+
+ homepage = "http://eigen.tuxfamily.org/"
+ url = "https://cran.r-project.org/src/contrib/RcppEigen_0.3.2.8.1.tar.gz"
+ list_url = "https://cran.r-project.org/src/contrib/Archive/RcppEigen"
+
+ version('0.3.2.8.1', '4146e06e4fdf7f4d08db7839069d479f')
+
+ depends_on('r-matrix', type=('build', 'run'))
+ depends_on('r-rcpp', type=('build', 'run'))
diff --git a/var/spack/repos/builtin/packages/r-registry/package.py b/var/spack/repos/builtin/packages/r-registry/package.py
new file mode 100644
index 0000000000..479250cac6
--- /dev/null
+++ b/var/spack/repos/builtin/packages/r-registry/package.py
@@ -0,0 +1,35 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class RRegistry(RPackage):
+ """Provides a generic infrastructure for creating and using registries."""
+
+ homepage = "https://cran.r-project.org/web/packages/registry/index.html"
+ url = "https://cran.r-project.org/src/contrib/registry_0.3.tar.gz"
+ list_url = "https://cran.r-project.org/src/contrib/Archive/registry"
+
+ version('0.3', '85345b334ec81eb3da6edcbb27c5f421')
diff --git a/var/spack/repos/builtin/packages/r-repr/package.py b/var/spack/repos/builtin/packages/r-repr/package.py
new file mode 100644
index 0000000000..47720327de
--- /dev/null
+++ b/var/spack/repos/builtin/packages/r-repr/package.py
@@ -0,0 +1,37 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+
+from spack import *
+
+
+class RRepr(RPackage):
+ """String and binary representations of objects for several formats and
+ mime types."""
+
+ homepage = "https://github.com/IRkernel/repr"
+ url = "https://cran.r-project.org/src/contrib/repr_0.9.tar.gz"
+ list_url = "https://cran.r-project.org/src/contrib/Archive/repr"
+
+ version('0.9', 'db5ff74893063b492f684e42283070bd')
diff --git a/var/spack/repos/builtin/packages/r-reshape2/package.py b/var/spack/repos/builtin/packages/r-reshape2/package.py
new file mode 100644
index 0000000000..d27231e139
--- /dev/null
+++ b/var/spack/repos/builtin/packages/r-reshape2/package.py
@@ -0,0 +1,40 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class RReshape2(RPackage):
+ """Flexibly restructure and aggregate data using just two functions: melt
+ and dcast (or acast)."""
+
+ homepage = "https://github.com/hadley/reshape"
+ url = "https://cran.r-project.org/src/contrib/reshape2_1.4.1.tar.gz"
+ list_url = "https://cran.r-project.org/src/contrib/Archive/reshape2"
+
+ version('1.4.1', '41e9dffdf5c6fa830321ac9c8ebffe00')
+
+ depends_on('r-plyr', type=('build', 'run'))
+ depends_on('r-stringr', type=('build', 'run'))
+ depends_on('r-rcpp', type=('build', 'run'))
diff --git a/var/spack/repos/builtin/packages/r-rgooglemaps/package.py b/var/spack/repos/builtin/packages/r-rgooglemaps/package.py
new file mode 100644
index 0000000000..87672a35e2
--- /dev/null
+++ b/var/spack/repos/builtin/packages/r-rgooglemaps/package.py
@@ -0,0 +1,41 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class RRgooglemaps(RPackage):
+ """This package serves two purposes: (i) Provide a comfortable R interface
+ to query the Google server for static maps, and (ii) Use the map as a
+ background image to overlay plots within R. This requires proper coordinate
+ scaling."""
+
+ homepage = "https://cran.r-project.org/package=RgoogleMaps"
+ url = "https://cran.r-project.org/src/contrib/RgoogleMaps_1.2.0.7.tar.gz"
+ list_url = "https://cran.r-project.org/src/contrib/Archive/RgoogleMaps"
+
+ version('1.2.0.7', '2e1df804f0331b4122d841105f0c7ea5')
+
+ depends_on('r-png', type=('build', 'run'))
+ depends_on('r-rjsonio', type=('build', 'run'))
diff --git a/var/spack/repos/builtin/packages/r-rinside/package.py b/var/spack/repos/builtin/packages/r-rinside/package.py
new file mode 100644
index 0000000000..d8e7c28e23
--- /dev/null
+++ b/var/spack/repos/builtin/packages/r-rinside/package.py
@@ -0,0 +1,51 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+
+from spack import *
+
+
+class RRinside(RPackage):
+ """C++ classes to embed R in C++ applications The 'RInside' packages makes
+ it easier to have "R inside" your C++ application by providing a C++
+ wrapperclass providing the R interpreter. As R itself is embedded into
+ your application, a shared library build of R is required. This works on
+ Linux, OS X and even on Windows provided you use the same tools used to
+ build R itself. Numerous examples are provided in the eight subdirectories
+ of the examples/ directory of the installed package: standard, mpi (for
+ parallel computing) qt (showing how to embed 'RInside' inside a Qt GUI
+ application), wt (showing how to build a "web-application" using the Wt
+ toolkit), armadillo (for 'RInside' use with 'RcppArmadillo') and eigen (for
+ 'RInside' use with 'RcppEigen'). The example use GNUmakefile(s) with GNU
+ extensions, so a GNU make is required (and will use the GNUmakefile
+ automatically). Doxygen-generated documentation of the C++ classes is
+ available at the 'RInside' website as well."""
+
+ homepage = "http://dirk.eddelbuettel.com/code/rinside.html"
+ url = "https://cran.r-project.org/src/contrib/RInside_0.2.13.tar.gz"
+ list_url = "https://cran.r-project.org/src/contrib/Archive/RInside"
+
+ version('0.2.13', '2e3c35a7bd648e9bef98d0afcc02cf88')
+
+ depends_on('r-rcpp', type=('build', 'run'))
diff --git a/var/spack/repos/builtin/packages/r-rjava/package.py b/var/spack/repos/builtin/packages/r-rjava/package.py
new file mode 100644
index 0000000000..440b93ff1f
--- /dev/null
+++ b/var/spack/repos/builtin/packages/r-rjava/package.py
@@ -0,0 +1,38 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class RRjava(RPackage):
+ """Low-level interface to Java VM very much like .C/.Call and friends.
+ Allows creation of objects, calling methods and accessing fields."""
+
+ homepage = "http://www.rforge.net/rJava/"
+ url = "https://cran.r-project.org/src/contrib/rJava_0.9-8.tar.gz"
+ list_url = "https://cran.r-project.org/src/contrib/Archive/rJava"
+
+ version('0.9-8', '51ae0d690ceed056ebe7c4be71fc6c7a')
+
+ depends_on('jdk')
diff --git a/var/spack/repos/builtin/packages/r-rjson/package.py b/var/spack/repos/builtin/packages/r-rjson/package.py
new file mode 100644
index 0000000000..f37b574323
--- /dev/null
+++ b/var/spack/repos/builtin/packages/r-rjson/package.py
@@ -0,0 +1,35 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class RRjson(RPackage):
+ """Converts R object into JSON objects and vice-versa."""
+
+ homepage = "https://cran.r-project.org/package=rjson"
+ url = "https://cran.r-project.org/src/contrib/rjson_0.2.15.tar.gz"
+ list_url = "https://cran.r-project.org/src/contrib/Archive/rjson"
+
+ version('0.2.15', '87d0e29bc179c6aeaf312b138089f8e9')
diff --git a/var/spack/repos/builtin/packages/r-rjsonio/package.py b/var/spack/repos/builtin/packages/r-rjsonio/package.py
new file mode 100644
index 0000000000..4d5ffa6ddf
--- /dev/null
+++ b/var/spack/repos/builtin/packages/r-rjsonio/package.py
@@ -0,0 +1,49 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class RRjsonio(RPackage):
+ """This is a package that allows conversion to and from data in Javascript
+ object notation (JSON) format. This allows R objects to be inserted into
+ Javascript/ECMAScript/ActionScript code and allows R programmers to read
+ and convert JSON content to R objects. This is an alternative to rjson
+ package. Originally, that was too slow for converting large R objects to
+ JSON and was not extensible. rjson's performance is now similar to this
+ package, and perhaps slightly faster in some cases. This package uses
+ methods and is readily extensible by defining methods for different
+ classes, vectorized operations, and C code and callbacks to R functions for
+ deserializing JSON objects to R. The two packages intentionally share the
+ same basic interface. This package (RJSONIO) has many additional options to
+ allow customizing the generation and processing of JSON content. This
+ package uses libjson rather than implementing yet another JSON parser. The
+ aim is to support other general projects by building on their work,
+ providing feedback and benefit from their ongoing development."""
+
+ homepage = "https://cran.r-project.org/package=RJSONIO"
+ url = "https://cran.r-project.org/src/contrib/RJSONIO_1.3-0.tar.gz"
+ list_url = "https://cran.r-project.org/src/contrib/Archive/RJSONIO"
+
+ version('1.3-0', '72c395622ba8d1435ec43849fd32c830')
diff --git a/var/spack/repos/builtin/packages/r-rmarkdown/package.py b/var/spack/repos/builtin/packages/r-rmarkdown/package.py
new file mode 100644
index 0000000000..31a7695923
--- /dev/null
+++ b/var/spack/repos/builtin/packages/r-rmarkdown/package.py
@@ -0,0 +1,44 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+
+from spack import *
+
+
+class RRmarkdown(RPackage):
+ """Convert R Markdown documents into a variety of formats."""
+
+ homepage = "http://rmarkdown.rstudio.com/"
+ url = "https://cran.r-project.org/src/contrib/rmarkdown_1.0.tar.gz"
+ list_url = "https://cran.r-project.org/src/contrib/Archive/rmarkdown"
+
+ version('1.0', '264aa6a59e9680109e38df8270e14c58')
+
+ depends_on('r-knitr', type=('build', 'run'))
+ depends_on('r-yaml', type=('build', 'run'))
+ depends_on('r-htmltools', type=('build', 'run'))
+ depends_on('r-catools', type=('build', 'run'))
+ depends_on('r-evaluate', type=('build', 'run'))
+ depends_on('r-base64enc', type=('build', 'run'))
+ depends_on('r-jsonlite', type=('build', 'run'))
diff --git a/var/spack/repos/builtin/packages/r-rmysql/package.py b/var/spack/repos/builtin/packages/r-rmysql/package.py
new file mode 100644
index 0000000000..4946b071fa
--- /dev/null
+++ b/var/spack/repos/builtin/packages/r-rmysql/package.py
@@ -0,0 +1,38 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class RRmysql(RPackage):
+ """Implements 'DBI' Interface to 'MySQL' and 'MariaDB' Databases."""
+
+ homepage = "https://github.com/rstats-db/rmysql"
+ url = "https://cran.r-project.org/src/contrib/RMySQL_0.10.9.tar.gz"
+ list_url = "https://cran.r-project.org/src/contrib/Archive/RMySQL"
+
+ version('0.10.9', '3628200a1864ac3005cfd55cc7cde17a')
+
+ depends_on('r-dbi', type=('build', 'run'))
+ depends_on('mariadb')
diff --git a/var/spack/repos/builtin/packages/r-rngtools/package.py b/var/spack/repos/builtin/packages/r-rngtools/package.py
new file mode 100644
index 0000000000..f9edc93a2c
--- /dev/null
+++ b/var/spack/repos/builtin/packages/r-rngtools/package.py
@@ -0,0 +1,43 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class RRngtools(RPackage):
+ """This package contains a set of functions for working with Random Number
+ Generators (RNGs). In particular, it defines a generic S4 framework for
+ getting/setting the current RNG, or RNG data that are embedded into objects
+ for reproducibility. Notably, convenient default methods greatly facilitate
+ the way current RNG settings can be changed."""
+
+ homepage = "https://renozao.github.io/rngtools"
+ url = "https://cran.r-project.org/src/contrib/rngtools_1.2.4.tar.gz"
+ list_url = "https://cran.r-project.org/src/contrib/Archive/rngtools"
+
+ version('1.2.4', '715967f8b3af2848a76593a7c718c1cd')
+
+ depends_on('r-pkgmaker', type=('build', 'run'))
+ depends_on('r-stringr', type=('build', 'run'))
+ depends_on('r-digest', type=('build', 'run'))
diff --git a/var/spack/repos/builtin/packages/r-rodbc/package.py b/var/spack/repos/builtin/packages/r-rodbc/package.py
new file mode 100644
index 0000000000..70e477bcb1
--- /dev/null
+++ b/var/spack/repos/builtin/packages/r-rodbc/package.py
@@ -0,0 +1,37 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class RRodbc(RPackage):
+ """An ODBC database interface."""
+
+ homepage = "https://cran.rstudio.com/web/packages/RODBC/"
+ url = "https://cran.rstudio.com/src/contrib/RODBC_1.3-13.tar.gz"
+ list_url = "https://cran.rstudio.com/src/contrib/Archive/RODBC"
+
+ version('1.3-13', 'c52ef9139c2ed85adc53ad6effa7d68e')
+
+ depends_on('unixodbc')
diff --git a/var/spack/repos/builtin/packages/r-roxygen2/package.py b/var/spack/repos/builtin/packages/r-roxygen2/package.py
new file mode 100644
index 0000000000..1bbfeb2e87
--- /dev/null
+++ b/var/spack/repos/builtin/packages/r-roxygen2/package.py
@@ -0,0 +1,42 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class RRoxygen2(RPackage):
+ """A 'Doxygen'-like in-source documentation system for Rd, collation, and
+ 'NAMESPACE' files."""
+
+ homepage = "https://github.com/klutometis/roxygen"
+ url = "https://cran.r-project.org/src/contrib/roxygen2_5.0.1.tar.gz"
+ list_url = "https://cran.r-project.org/src/contrib/Archive/roxygen2"
+
+ version('5.0.1', 'df5bdbc12fda372e427710ef1cd92ed7')
+
+ depends_on('r-stringr', type=('build', 'run'))
+ depends_on('r-stringi', type=('build', 'run'))
+ depends_on('r-brew', type=('build', 'run'))
+ depends_on('r-digest', type=('build', 'run'))
+ depends_on('r-rcpp', type=('build', 'run'))
diff --git a/var/spack/repos/builtin/packages/r-rpostgresql/package.py b/var/spack/repos/builtin/packages/r-rpostgresql/package.py
new file mode 100644
index 0000000000..b204c53828
--- /dev/null
+++ b/var/spack/repos/builtin/packages/r-rpostgresql/package.py
@@ -0,0 +1,46 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class RRpostgresql(RPackage):
+ """Database interface and PostgreSQL driver for R This package provides a
+ Database Interface (DBI) compliant driver for R to access PostgreSQL
+ database systems. In order to build and install this package from source,
+ PostgreSQL itself must be present your system to provide PostgreSQL
+ functionality via its libraries and header files. These files are provided
+ as postgresql-devel package under some Linux distributions. On Microsoft
+ Windows system the attached libpq library source will be used. A wiki and
+ issue tracking system for the package are available at Google Code at
+ https://code.google.com/p/rpostgresql/."""
+
+ homepage = "https://code.google.com/p/rpostgresql/"
+ url = "https://cran.r-project.org/src/contrib/RPostgreSQL_0.4-1.tar.gz"
+ list_url = "https://cran.r-project.org/src/contrib/Archive/RPostgreSQL"
+
+ version('0.4-1', 'e7b22e212afbb2cbb88bab937f93e55a')
+
+ depends_on('r-dbi', type=('build', 'run'))
+ depends_on('postgresql')
diff --git a/var/spack/repos/builtin/packages/r-rsnns/package.py b/var/spack/repos/builtin/packages/r-rsnns/package.py
new file mode 100644
index 0000000000..1a0978363a
--- /dev/null
+++ b/var/spack/repos/builtin/packages/r-rsnns/package.py
@@ -0,0 +1,44 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+
+from spack import *
+
+
+class RRsnns(RPackage):
+ """The Stuttgart Neural Network Simulator (SNNS) is a library containing
+ many standard implementations of neural networks. This package wraps the
+ SNNS functionality to make it available from within R. Using the RSNNS
+ low-level interface, all of the algorithmic functionality and flexibility
+ of SNNS can be accessed. Furthermore, the package contains a convenient
+ high-level interface, so that the most common neural network topologies
+ and learning algorithms integrate seamlessly into R."""
+
+ homepage = "http://sci2s.ugr.es/dicits/software/RSNNS"
+ url = "https://cran.r-project.org/src/contrib/RSNNS_0.4-7.tar.gz"
+ list_url = "https://cran.r-project.org/src/contrib/Archive/RSNNS"
+
+ version('0.4-7', 'ade7736611c456effb5f72e0ce0a1e6f')
+
+ depends_on('r-rcpp', type=('build', 'run'))
diff --git a/var/spack/repos/builtin/packages/r-rsqlite/package.py b/var/spack/repos/builtin/packages/r-rsqlite/package.py
new file mode 100644
index 0000000000..c08fcac20c
--- /dev/null
+++ b/var/spack/repos/builtin/packages/r-rsqlite/package.py
@@ -0,0 +1,39 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class RRsqlite(RPackage):
+ """This package embeds the SQLite database engine in R and provides an
+ interface compliant with the DBI package. The source for the SQLite engine
+ (version 3.8.6) is included."""
+
+ homepage = "https://github.com/rstats-db/RSQLite"
+ url = "https://cran.r-project.org/src/contrib/RSQLite_1.0.0.tar.gz"
+ list_url = "https://cran.r-project.org/src/contrib/Archive/RSQLite"
+
+ version('1.0.0', 'e6cbe2709612b687c13a10d30c7bad45')
+
+ depends_on('r-dbi', type=('build', 'run'))
diff --git a/var/spack/repos/builtin/packages/r-rstan/package.py b/var/spack/repos/builtin/packages/r-rstan/package.py
new file mode 100644
index 0000000000..e616f0a7dd
--- /dev/null
+++ b/var/spack/repos/builtin/packages/r-rstan/package.py
@@ -0,0 +1,51 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class RRstan(RPackage):
+ """User-facing R functions are provided to parse, compile, test, estimate,
+ and analyze Stan models by accessing the header-only Stan library provided
+ by the 'StanHeaders' package. The Stan project develops a probabilistic
+ programming language that implements full Bayesian statistical inference
+ via Markov Chain Monte Carlo, rough Bayesian inference via variational
+ approximation, and (optionally penalized) maximum likelihood estimation via
+ optimization. In all three cases, automatic differentiation is used to
+ quickly and accurately evaluate gradients without burdening the user with
+ the need to derive the partial derivatives."""
+
+ homepage = "http://mc-stan.org/"
+ url = "https://cran.r-project.org/src/contrib/rstan_2.10.1.tar.gz"
+ list_url = "https://cran.r-project.org/src/contrib/Archive/rstan"
+
+ version('2.10.1', 'f5d212f6f8551bdb91fe713d05d4052a')
+
+ depends_on('r-ggplot2', type=('build', 'run'))
+ depends_on('r-stanheaders', type=('build', 'run'))
+ depends_on('r-inline', type=('build', 'run'))
+ depends_on('r-gridextra', type=('build', 'run'))
+ depends_on('r-rcpp', type=('build', 'run'))
+ depends_on('r-rcppeigen', type=('build', 'run'))
+ depends_on('r-bh', type=('build', 'run'))
diff --git a/var/spack/repos/builtin/packages/r-rstudioapi/package.py b/var/spack/repos/builtin/packages/r-rstudioapi/package.py
new file mode 100644
index 0000000000..2558a5c3f6
--- /dev/null
+++ b/var/spack/repos/builtin/packages/r-rstudioapi/package.py
@@ -0,0 +1,37 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class RRstudioapi(RPackage):
+ """Access the RStudio API (if available) and provide informative error
+ messages when it's not."""
+
+ homepage = "https://cran.r-project.org/web/packages/rstudioapi/index.html"
+ url = "https://cran.r-project.org/src/contrib/rstudioapi_0.5.tar.gz"
+ list_url = "https://cran.r-project.org/src/contrib/Archive/rstudioapi"
+
+ version('0.6', 'fdb13bf46aab02421557e713fceab66b')
+ version('0.5', '6ce1191da74e7bcbf06b61339486b3ba')
diff --git a/var/spack/repos/builtin/packages/r-rzmq/package.py b/var/spack/repos/builtin/packages/r-rzmq/package.py
new file mode 100644
index 0000000000..f385a13901
--- /dev/null
+++ b/var/spack/repos/builtin/packages/r-rzmq/package.py
@@ -0,0 +1,38 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+
+from spack import *
+
+
+class RRzmq(RPackage):
+ """Interface to the ZeroMQ lightweight messaging kernel."""
+
+ homepage = "http://github.com/armstrtw/rzmq"
+ url = "https://cran.r-project.org/src/contrib/rzmq_0.7.7.tar.gz"
+ list_url = "https://cran.r-project.org/src/contrib/Archive/rzmq"
+
+ version('0.7.7', '8ba18fd1c222d1eb25bb622ccd2897e0')
+
+ depends_on('zeromq')
diff --git a/var/spack/repos/builtin/packages/r-sandwich/package.py b/var/spack/repos/builtin/packages/r-sandwich/package.py
new file mode 100644
index 0000000000..62bd2880e5
--- /dev/null
+++ b/var/spack/repos/builtin/packages/r-sandwich/package.py
@@ -0,0 +1,38 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class RSandwich(RPackage):
+ """Model-robust standard error estimators for cross-sectional, time series,
+ and longitudinal data."""
+
+ homepage = "https://cran.r-project.org/package=sandwich"
+ url = "https://cran.r-project.org/src/contrib/sandwich_2.3-4.tar.gz"
+ list_url = "https://cran.r-project.org/src/contrib/Archive/sandwich"
+
+ version('2.3-4', 'a621dbd8a57b6e1e036496642aadc2e5')
+
+ depends_on('r-zoo', type=('build', 'run'))
diff --git a/var/spack/repos/builtin/packages/r-scales/package.py b/var/spack/repos/builtin/packages/r-scales/package.py
new file mode 100644
index 0000000000..e3832f78e0
--- /dev/null
+++ b/var/spack/repos/builtin/packages/r-scales/package.py
@@ -0,0 +1,43 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class RScales(RPackage):
+ """Graphical scales map data to aesthetics, and provide methods for
+ automatically determining breaks and labels for axes and legends."""
+
+ homepage = "https://github.com/hadley/scales"
+ url = "https://cran.r-project.org/src/contrib/scales_0.4.0.tar.gz"
+ list_url = "https://cran.r-project.org/src/contrib/Archive/scales"
+
+ version('0.4.0', '7b5602d9c55595901192248bca25c099')
+
+ depends_on('r-rcolorbrewer', type=('build', 'run'))
+ depends_on('r-dichromat', type=('build', 'run'))
+ depends_on('r-plyr', type=('build', 'run'))
+ depends_on('r-munsell', type=('build', 'run'))
+ depends_on('r-labeling', type=('build', 'run'))
+ depends_on('r-rcpp', type=('build', 'run'))
diff --git a/var/spack/repos/builtin/packages/r-shiny/package.py b/var/spack/repos/builtin/packages/r-shiny/package.py
new file mode 100644
index 0000000000..b1d21c7e7c
--- /dev/null
+++ b/var/spack/repos/builtin/packages/r-shiny/package.py
@@ -0,0 +1,46 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class RShiny(RPackage):
+ """Makes it incredibly easy to build interactive web applications with R.
+ Automatic "reactive" binding between inputs and outputs and extensive
+ pre-built widgets make it possible to build beautiful, responsive, and
+ powerful applications with minimal effort."""
+
+ homepage = "http://shiny.rstudio.com/"
+ url = "https://cran.r-project.org/src/contrib/shiny_0.13.2.tar.gz"
+ list_url = "https://cran.r-project.org/src/contrib/Archive/shiny"
+
+ version('0.13.2', 'cb5bff7a28ad59ec2883cd0912ca9611')
+
+ depends_on('r-httpuv', type=('build', 'run'))
+ depends_on('r-mime', type=('build', 'run'))
+ depends_on('r-jsonlite', type=('build', 'run'))
+ depends_on('r-xtable', type=('build', 'run'))
+ depends_on('r-digest', type=('build', 'run'))
+ depends_on('r-htmltools', type=('build', 'run'))
+ depends_on('r-r6', type=('build', 'run'))
diff --git a/var/spack/repos/builtin/packages/r-sp/package.py b/var/spack/repos/builtin/packages/r-sp/package.py
new file mode 100644
index 0000000000..2917f0b6b6
--- /dev/null
+++ b/var/spack/repos/builtin/packages/r-sp/package.py
@@ -0,0 +1,40 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class RSp(RPackage):
+ """Classes and methods for spatial data; the classes document where the
+ spatial location information resides, for 2D or 3D data. Utility functions
+ are provided, e.g. for plotting data as maps, spatial selection, as well as
+ methods for retrieving coordinates, for subsetting, print, summary, etc."""
+
+ homepage = "https://github.com/edzer/sp/"
+ url = "https://cran.r-project.org/src/contrib/sp_1.2-3.tar.gz"
+ list_url = "https://cran.r-project.org/src/contrib/Archive/sp"
+
+ version('1.2-3', 'f0e24d993dec128642ee66b6b47b10c1')
+
+ depends_on('r-lattice', type=('build', 'run'))
diff --git a/var/spack/repos/builtin/packages/r-sparsem/package.py b/var/spack/repos/builtin/packages/r-sparsem/package.py
new file mode 100644
index 0000000000..370497e395
--- /dev/null
+++ b/var/spack/repos/builtin/packages/r-sparsem/package.py
@@ -0,0 +1,37 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class RSparsem(RPackage):
+ """Some basic linear algebra functionality for sparse matrices is provided:
+ including Cholesky decomposition and backsolving as well as standard R
+ subsetting and Kronecker products."""
+
+ homepage = "http://www.econ.uiuc.edu/~roger/research/sparse/sparse.html"
+ url = "https://cran.r-project.org/src/contrib/SparseM_1.7.tar.gz"
+ list_url = "https://cran.r-project.org/src/contrib/Archive/SparseM"
+
+ version('1.7', '7b5b0ab166a0929ef6dcfe1d97643601')
diff --git a/var/spack/repos/builtin/packages/r-stanheaders/package.py b/var/spack/repos/builtin/packages/r-stanheaders/package.py
new file mode 100644
index 0000000000..322356e347
--- /dev/null
+++ b/var/spack/repos/builtin/packages/r-stanheaders/package.py
@@ -0,0 +1,49 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class RStanheaders(RPackage):
+ """The C++ header files of the Stan project are provided by this package,
+ but it contains no R code, vignettes, or function documentation. There is a
+ shared object containing part of the CVODES library, but it is not
+ accessible from R. StanHeaders is only useful for developers who want to
+ utilize the LinkingTo directive of their package's DESCRIPTION file to
+ build on the Stan library without incurring unnecessary dependencies. The
+ Stan project develops a probabilistic programming language that implements
+ full or approximate Bayesian statistical inference via Markov Chain Monte
+ Carlo or variational methods and implements (optionally penalized) maximum
+ likelihood estimation via optimization. The Stan library includes an
+ advanced automatic differentiation scheme, templated statistical and linear
+ algebra functions that can handle the automatically differentiable scalar
+ types (and doubles, ints, etc.), and a parser for the Stan language. The
+ 'rstan' package provides user-facing R functions to parse, compile, test,
+ estimate, and analyze Stan models."""
+
+ homepage = "http://mc-stan.org/"
+ url = "https://cran.r-project.org/src/contrib/StanHeaders_2.10.0-2.tar.gz"
+ list_url = "https://cran.r-project.org/src/contrib/Archive/StanHeaders"
+
+ version('2.10.0-2', '9d09b1e9278f08768f7a988ad9082d57')
diff --git a/var/spack/repos/builtin/packages/r-stringi/package.py b/var/spack/repos/builtin/packages/r-stringi/package.py
new file mode 100644
index 0000000000..d89238f3d7
--- /dev/null
+++ b/var/spack/repos/builtin/packages/r-stringi/package.py
@@ -0,0 +1,45 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class RStringi(RPackage):
+ """Allows for fast, correct, consistent, portable, as well as convenient
+ character string/text processing in every locale and any native encoding.
+ Owing to the use of the ICU library, the package provides R users with
+ platform-independent functions known to Java, Perl, Python, PHP, and Ruby
+ programmers. Among available features there are: pattern searching (e.g.,
+ with ICU Java-like regular expressions or the Unicode Collation Algorithm),
+ random string generation, case mapping, string transliteration,
+ concatenation, Unicode normalization, date-time formatting and parsing,
+ etc."""
+
+ homepage = "http://www.gagolewski.com/software/stringi/"
+ url = "https://cran.r-project.org/src/contrib/stringi_1.1.1.tar.gz"
+ list_url = "https://cran.r-project.org/src/contrib/Archive/stringi"
+
+ version('1.1.1', '32b919ee3fa8474530c4942962a6d8d9')
+
+ depends_on('icu4c')
diff --git a/var/spack/repos/builtin/packages/r-stringr/package.py b/var/spack/repos/builtin/packages/r-stringr/package.py
new file mode 100644
index 0000000000..de8d83b500
--- /dev/null
+++ b/var/spack/repos/builtin/packages/r-stringr/package.py
@@ -0,0 +1,42 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class RStringr(RPackage):
+ """A consistent, simple and easy to use set of wrappers around the
+ fantastic 'stringi' package. All function and argument names (and
+ positions) are consistent, all functions deal with "NA"'s and zero length
+ vectors in the same way, and the output from one function is easy to feed
+ into the input of another."""
+
+ homepage = "https://cran.r-project.org/web/packages/stringr/index.html"
+ url = "https://cran.r-project.org/src/contrib/stringr_1.0.0.tar.gz"
+ list_url = "https://cran.r-project.org/src/contrib/Archive/stringr"
+
+ version('1.0.0', '5ca977c90351f78b1b888b379114a7b4')
+
+ depends_on('r-stringi', type=('build', 'run'))
+ depends_on('r-magrittr', type=('build', 'run'))
diff --git a/var/spack/repos/builtin/packages/r-survey/package.py b/var/spack/repos/builtin/packages/r-survey/package.py
new file mode 100644
index 0000000000..249cad8178
--- /dev/null
+++ b/var/spack/repos/builtin/packages/r-survey/package.py
@@ -0,0 +1,41 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class RSurvey(RPackage):
+ """Summary statistics, two-sample tests, rank tests, generalised linear
+ models, cumulative link models, Cox models, loglinear models, and general
+ maximum pseudolikelihood estimation for multistage stratified,
+ cluster-sampled, unequally weighted survey samples. Variances by Taylor
+ series linearisation or replicate weights. Post-stratification,
+ calibration, and raking. Two-phase subsampling designs. Graphics. PPS
+ sampling without replacement. Principal components, factor analysis."""
+
+ homepage = "http://r-survey.r-forge.r-project.org/survey/"
+ url = "https://cran.r-project.org/src/contrib/survey_3.30-3.tar.gz"
+ list_url = "https://cran.r-project.org/src/contrib/Archive/survey"
+
+ version('3.30-3', 'c70cdae9cb43d35abddd11173d64cad0')
diff --git a/var/spack/repos/builtin/packages/r-survival/package.py b/var/spack/repos/builtin/packages/r-survival/package.py
new file mode 100644
index 0000000000..1df00f0c93
--- /dev/null
+++ b/var/spack/repos/builtin/packages/r-survival/package.py
@@ -0,0 +1,39 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class RSurvival(RPackage):
+ """Contains the core survival analysis routines, including definition of
+ Surv objects, Kaplan-Meier and Aalen-Johansen (multi-state) curves, Cox
+ models, and parametric accelerated failure time models."""
+
+ homepage = "https://cran.r-project.org/package=survival"
+ url = "https://cran.r-project.org/src/contrib/survival_2.39-5.tar.gz"
+ list_url = "https://cran.r-project.org/src/contrib/Archive/survival"
+
+ version('2.39-5', 'a3cc6b5762e8c5c0bb9e64a276710be2')
+
+ depends_on('r-matrix', type=('build', 'run'))
diff --git a/var/spack/repos/builtin/packages/r-tarifx/package.py b/var/spack/repos/builtin/packages/r-tarifx/package.py
new file mode 100644
index 0000000000..1fb2d35b1d
--- /dev/null
+++ b/var/spack/repos/builtin/packages/r-tarifx/package.py
@@ -0,0 +1,38 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class RTarifx(RPackage):
+ """A collection of various utility and convenience functions."""
+
+ homepage = "https://cran.r-project.org/package=taRifx"
+ url = "https://cran.r-project.org/src/contrib/taRifx_1.0.6.tar.gz"
+ list_url = "https://cran.r-project.org/src/contrib/Archive/taRifx"
+
+ version('1.0.6', '7e782e04bd69d929b29f91553382e6a2')
+
+ depends_on('r-reshape2', type=('build', 'run'))
+ depends_on('r-plyr', type=('build', 'run'))
diff --git a/var/spack/repos/builtin/packages/r-testit/package.py b/var/spack/repos/builtin/packages/r-testit/package.py
new file mode 100644
index 0000000000..4d99c388e6
--- /dev/null
+++ b/var/spack/repos/builtin/packages/r-testit/package.py
@@ -0,0 +1,37 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+
+from spack import *
+
+
+class RTestit(RPackage):
+ """Provides two convenience functions assert() and test_pkg() to facilitate
+ testing R packages."""
+
+ homepage = "https://github.com/yihui/testit"
+ url = "https://cran.r-project.org/src/contrib/testit_0.5.tar.gz"
+ list_url = "https://cran.r-project.org/src/contrib/Archive/testit"
+
+ version('0.5', 'f206d3cbdc5174e353d2d05ba6a12e59')
diff --git a/var/spack/repos/builtin/packages/r-testthat/package.py b/var/spack/repos/builtin/packages/r-testthat/package.py
new file mode 100644
index 0000000000..62409912f7
--- /dev/null
+++ b/var/spack/repos/builtin/packages/r-testthat/package.py
@@ -0,0 +1,42 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class RTestthat(RPackage):
+ """A unit testing system designed to be fun, flexible and easy to set
+ up."""
+
+ homepage = "https://github.com/hadley/testthat"
+ url = "https://cran.r-project.org/src/contrib/testthat_1.0.2.tar.gz"
+ list_url = "https://cran.r-project.org/src/contrib/Archive/testthat"
+
+ version('1.0.2', '6c6a90c8db860292df5784a70e07b8dc')
+
+ depends_on('r-digest', type=('build', 'run'))
+ depends_on('r-crayon', type=('build', 'run'))
+ depends_on('r-praise', type=('build', 'run'))
+ depends_on('r-magrittr', type=('build', 'run'))
+ depends_on('r-r6', type=('build', 'run'))
diff --git a/var/spack/repos/builtin/packages/r-thdata/package.py b/var/spack/repos/builtin/packages/r-thdata/package.py
new file mode 100644
index 0000000000..cf2b01e6e8
--- /dev/null
+++ b/var/spack/repos/builtin/packages/r-thdata/package.py
@@ -0,0 +1,38 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class RThdata(RPackage):
+ """Contains data sets used in other packages Torsten Hothorn maintains."""
+
+ homepage = "https://cran.r-project.org/package=TH.data"
+ url = "https://cran.r-project.org/src/contrib/TH.data_1.0-7.tar.gz"
+ list_url = "https://cran.r-project.org/src/contrib/Archive/TH.data"
+
+ version('1.0-7', '3e8b6b1a4699544f175215aed7039a94')
+
+ depends_on('r-survival', type=('build', 'run'))
+ depends_on('r-mass', type=('build', 'run'))
diff --git a/var/spack/repos/builtin/packages/r-threejs/package.py b/var/spack/repos/builtin/packages/r-threejs/package.py
new file mode 100644
index 0000000000..50b484dc25
--- /dev/null
+++ b/var/spack/repos/builtin/packages/r-threejs/package.py
@@ -0,0 +1,41 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class RThreejs(RPackage):
+ """Create interactive 3D scatter plots, network plots, and globes using the
+ 'three.js' visualization library ("http://threejs.org")."""
+
+ homepage = "http://bwlewis.github.io/rthreejs"
+ url = "https://cran.r-project.org/src/contrib/threejs_0.2.2.tar.gz"
+ list_url = "https://cran.r-project.org/src/contrib/Archive/threejs"
+
+ version('0.2.2', '35c179b10813c5e4bd3e7827fae6627b')
+
+ depends_on('r-htmlwidgets', type=('build', 'run'))
+ depends_on('r-base64enc', type=('build', 'run'))
+ depends_on('r-matrix', type=('build', 'run'))
+ depends_on('r-jsonlite', type=('build', 'run'))
diff --git a/var/spack/repos/builtin/packages/r-tibble/package.py b/var/spack/repos/builtin/packages/r-tibble/package.py
new file mode 100644
index 0000000000..39dfc3893b
--- /dev/null
+++ b/var/spack/repos/builtin/packages/r-tibble/package.py
@@ -0,0 +1,40 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class RTibble(RPackage):
+ """Provides a 'tbl_df' class that offers better checking and printing
+ capabilities than traditional data frames."""
+
+ homepage = "https://github.com/hadley/tibble"
+ url = "https://cran.r-project.org/src/contrib/tibble_1.1.tar.gz"
+ list_url = "https://cran.r-project.org/src/contrib/Archive/tibble"
+
+ version('1.1', '2fe9f806109d0b7fadafb1ffafea4cb8')
+
+ depends_on('r-assertthat', type=('build', 'run'))
+ depends_on('r-lazyeval', type=('build', 'run'))
+ depends_on('r-rcpp', type=('build', 'run'))
diff --git a/var/spack/repos/builtin/packages/r-tidyr/package.py b/var/spack/repos/builtin/packages/r-tidyr/package.py
new file mode 100644
index 0000000000..1285e5e9ae
--- /dev/null
+++ b/var/spack/repos/builtin/packages/r-tidyr/package.py
@@ -0,0 +1,44 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class RTidyr(RPackage):
+ """An evolution of 'reshape2'. It's designed specifically for data tidying
+ (not general reshaping or aggregating) and works well with 'dplyr' data
+ pipelines."""
+
+ homepage = "https://github.com/hadley/tidyr"
+ url = "https://cran.r-project.org/src/contrib/tidyr_0.5.1.tar.gz"
+ list_url = "https://cran.r-project.org/src/contrib/Archive/tidyr"
+
+ version('0.5.1', '3cadc869510c054ed93d374ab44120bd')
+
+ depends_on('r-tibble', type=('build', 'run'))
+ depends_on('r-dplyr', type=('build', 'run'))
+ depends_on('r-stringi', type=('build', 'run'))
+ depends_on('r-lazyeval', type=('build', 'run'))
+ depends_on('r-magrittr', type=('build', 'run'))
+ depends_on('r-rcpp', type=('build', 'run'))
diff --git a/var/spack/repos/builtin/packages/r-ttr/package.py b/var/spack/repos/builtin/packages/r-ttr/package.py
new file mode 100644
index 0000000000..79429f5286
--- /dev/null
+++ b/var/spack/repos/builtin/packages/r-ttr/package.py
@@ -0,0 +1,38 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class RTtr(RPackage):
+ """Functions and data to construct technical trading rules with R."""
+
+ homepage = "https://github.com/joshuaulrich/TTR"
+ url = "https://cran.r-project.org/src/contrib/TTR_0.23-1.tar.gz"
+ list_url = "https://cran.r-project.org/src/contrib/Archive/TTR"
+
+ version('0.23-1', '35f693ac0d97e8ec742ebea2da222986')
+
+ depends_on('r-xts', type=('build', 'run'))
+ depends_on('r-zoo', type=('build', 'run'))
diff --git a/var/spack/repos/builtin/packages/r-uuid/package.py b/var/spack/repos/builtin/packages/r-uuid/package.py
new file mode 100644
index 0000000000..b9dcc12629
--- /dev/null
+++ b/var/spack/repos/builtin/packages/r-uuid/package.py
@@ -0,0 +1,37 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+
+from spack import *
+
+
+class RUuid(RPackage):
+ """Tools for generating and handling of UUIDs (Universally Unique
+ Identifiers)."""
+
+ homepage = "http://www.rforge.net/uuid"
+ url = "https://cran.rstudio.com/src/contrib/uuid_0.1-2.tar.gz"
+ list_url = "https://cran.rstudio.com/src/contrib/Archive/uuid"
+
+ version('0.1-2', 'f97d000c0b16bca455fb5bf2cd668ddf')
diff --git a/var/spack/repos/builtin/packages/r-vcd/package.py b/var/spack/repos/builtin/packages/r-vcd/package.py
new file mode 100644
index 0000000000..56a2ebdfa7
--- /dev/null
+++ b/var/spack/repos/builtin/packages/r-vcd/package.py
@@ -0,0 +1,44 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class RVcd(RPackage):
+ """Visualization techniques, data sets, summary and inference procedures
+ aimed particularly at categorical data. Special emphasis is given to highly
+ extensible grid graphics. The package was package was originally inspired
+ by the book "Visualizing Categorical Data" by Michael Friendly and is now
+ the main support package for a new book, "Discrete Data Analysis with R" by
+ Michael Friendly and David Meyer (2015)."""
+
+ homepage = "https://cran.r-project.org/package=vcd"
+ url = "https://cran.r-project.org/src/contrib/vcd_1.4-1.tar.gz"
+ list_url = "https://cran.r-project.org/src/contrib/Archive/vcd"
+
+ version('1.4-1', '7db150a77f173f85b69a1f86f73f8f02')
+
+ depends_on('r-mass', type=('build', 'run'))
+ depends_on('r-colorspace', type=('build', 'run'))
+ depends_on('r-lmtest', type=('build', 'run'))
diff --git a/var/spack/repos/builtin/packages/r-visnetwork/package.py b/var/spack/repos/builtin/packages/r-visnetwork/package.py
new file mode 100644
index 0000000000..ea0b972bf1
--- /dev/null
+++ b/var/spack/repos/builtin/packages/r-visnetwork/package.py
@@ -0,0 +1,41 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class RVisnetwork(RPackage):
+ """Provides an R interface to the 'vis.js' JavaScript charting library. It
+ allows an interactive visualization of networks."""
+
+ homepage = "https://github.com/datastorm-open/visNetwork"
+ url = "https://cran.r-project.org/src/contrib/visNetwork_1.0.1.tar.gz"
+ list_url = "https://cran.r-project.org/src/contrib/Archive/visNetwork"
+
+ version('1.0.1', 'dfc9664a5165134d8dbdcd949ad73cf7')
+
+ depends_on('r-htmlwidgets', type=('build', 'run'))
+ depends_on('r-htmltools', type=('build', 'run'))
+ depends_on('r-jsonlite', type=('build', 'run'))
+ depends_on('r-magrittr', type=('build', 'run'))
diff --git a/var/spack/repos/builtin/packages/r-whisker/package.py b/var/spack/repos/builtin/packages/r-whisker/package.py
new file mode 100644
index 0000000000..17f904f5c3
--- /dev/null
+++ b/var/spack/repos/builtin/packages/r-whisker/package.py
@@ -0,0 +1,36 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class RWhisker(RPackage):
+ """logicless templating, reuse templates in many programming languages
+ including R"""
+
+ homepage = "http://github.com/edwindj/whisker"
+ url = "https://cran.r-project.org/src/contrib/whisker_0.3-2.tar.gz"
+ list_url = "https://cran.r-project.org/src/contrib/Archive/whisker"
+
+ version('0.3-2', 'c4b9bf9a22e69ce003fe68663ab5e8e6')
diff --git a/var/spack/repos/builtin/packages/r-withr/package.py b/var/spack/repos/builtin/packages/r-withr/package.py
new file mode 100644
index 0000000000..785050ed87
--- /dev/null
+++ b/var/spack/repos/builtin/packages/r-withr/package.py
@@ -0,0 +1,38 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class RWithr(RPackage):
+ """A set of functions to run code 'with' safely and temporarily modified
+ global state. Many of these functions were originally a part of the
+ 'devtools' package, this provides a simple package with limited
+ dependencies to provide access to these functions."""
+
+ homepage = "http://github.com/jimhester/withr"
+ url = "https://cran.r-project.org/src/contrib/withr_1.0.1.tar.gz"
+ list_url = "https://cran.r-project.org/src/contrib/Archive/withr"
+
+ version('1.0.1', 'ac38af2c6f74027c9592dd8f0acb7598')
diff --git a/var/spack/repos/builtin/packages/r-xgboost/package.py b/var/spack/repos/builtin/packages/r-xgboost/package.py
new file mode 100644
index 0000000000..766191dcc1
--- /dev/null
+++ b/var/spack/repos/builtin/packages/r-xgboost/package.py
@@ -0,0 +1,48 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+
+from spack import *
+
+
+class RXgboost(RPackage):
+ """Extreme Gradient Boosting, which is an efficient implementation of
+ gradient boosting framework. This package is its R interface. The package
+ includes efficient linear model solver and tree learning algorithms. The
+ package can automatically do parallel computation on a single machine which
+ could be more than 10 times faster than existing gradient boosting
+ packages. It supports various objective functions, including regression,
+ classification and ranking. The package is made to be extensible, so that
+ users are also allowed to define their own objectives easily."""
+
+ homepage = "https://github.com/dmlc/xgboost"
+ url = "https://cran.r-project.org/src/contrib/xgboost_0.4-4.tar.gz"
+ list_url = "https://cran.r-project.org/src/contrib/Archive/xgboost"
+
+ version('0.4-4', 'c24d3076058101a71de4b8af8806697c')
+
+ depends_on('r-matrix', type=('build', 'run'))
+ depends_on('r-datatable', type=('build', 'run'))
+ depends_on('r-magrittr', type=('build', 'run'))
+ depends_on('r-stringr', type=('build', 'run'))
diff --git a/var/spack/repos/builtin/packages/r-xlconnect/package.py b/var/spack/repos/builtin/packages/r-xlconnect/package.py
new file mode 100644
index 0000000000..1863997ad7
--- /dev/null
+++ b/var/spack/repos/builtin/packages/r-xlconnect/package.py
@@ -0,0 +1,40 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class RXlconnect(RPackage):
+ """Provides comprehensive functionality to read, write and format Excel
+ data."""
+
+ homepage = "http://miraisolutions.wordpress.com/"
+ url = "https://cran.r-project.org/src/contrib/XLConnect_0.2-11.tar.gz"
+ list_url = "https://cran.r-project.org/src/contrib/Archive/XLConnect"
+
+ version('0.2-12', '3340d05d259f0a41262eab4ed32617ad')
+ version('0.2-11', '9d1769a103cda05665df399cc335017d')
+
+ depends_on('r-xlconnectjars', type=('build', 'run'))
+ depends_on('r-rjava', type=('build', 'run'))
diff --git a/var/spack/repos/builtin/packages/r-xlconnectjars/package.py b/var/spack/repos/builtin/packages/r-xlconnectjars/package.py
new file mode 100644
index 0000000000..0200b00a0f
--- /dev/null
+++ b/var/spack/repos/builtin/packages/r-xlconnectjars/package.py
@@ -0,0 +1,38 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class RXlconnectjars(RPackage):
+ """Provides external JAR dependencies for the XLConnect package."""
+
+ homepage = "http://miraisolutions.wordpress.com/"
+ url = "https://cran.r-project.org/src/contrib/XLConnectJars_0.2-9.tar.gz"
+ list_url = "https://cran.r-project.org/src/contrib/Archive/XLConnectJars"
+
+ version('0.2-12', '6984e5140cd1c887c017ef6f88cbba81')
+ version('0.2-9', 'e6d6b1acfede26acaa616ee421bd30fb')
+
+ depends_on('r-rjava', type=('build', 'run'))
diff --git a/var/spack/repos/builtin/packages/r-xlsx/package.py b/var/spack/repos/builtin/packages/r-xlsx/package.py
new file mode 100644
index 0000000000..e16a582306
--- /dev/null
+++ b/var/spack/repos/builtin/packages/r-xlsx/package.py
@@ -0,0 +1,39 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class RXlsx(RPackage):
+ """Provide R functions to read/write/format Excel 2007 and Excel
+ 97/2000/XP/2003 file formats."""
+
+ homepage = "http://code.google.com/p/rexcel/"
+ url = "https://cran.rstudio.com/src/contrib/xlsx_0.5.7.tar.gz"
+ list_url = "https://cran.rstudio.com/src/contrib/Archive/xlsx"
+
+ version('0.5.7', '36b1b16f29c54b6089b1dae923180dd5')
+
+ depends_on('r-rjava', type=('build', 'run'))
+ depends_on('r-xlsxjars', type=('build', 'run'))
diff --git a/var/spack/repos/builtin/packages/r-xlsxjars/package.py b/var/spack/repos/builtin/packages/r-xlsxjars/package.py
new file mode 100644
index 0000000000..1c16c75a9d
--- /dev/null
+++ b/var/spack/repos/builtin/packages/r-xlsxjars/package.py
@@ -0,0 +1,38 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class RXlsxjars(RPackage):
+ """The xlsxjars package collects all the external jars required for the
+ xlxs package. This release corresponds to POI 3.10.1."""
+
+ homepage = "https://cran.rstudio.com/web/packages/xlsxjars/index.html"
+ url = "https://cran.rstudio.com/src/contrib/xlsxjars_0.6.1.tar.gz"
+ list_url = "https://cran.rstudio.com/src/contrib/Archive/xlsxjars"
+
+ version('0.6.1', '5a1721d5733cb42f3a29e3f353e39166')
+
+ depends_on('r-rjava', type=('build', 'run'))
diff --git a/var/spack/repos/builtin/packages/r-xml/package.py b/var/spack/repos/builtin/packages/r-xml/package.py
new file mode 100644
index 0000000000..2fe2a8a05b
--- /dev/null
+++ b/var/spack/repos/builtin/packages/r-xml/package.py
@@ -0,0 +1,40 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class RXml(RPackage):
+ """Many approaches for both reading and creating XML (and HTML) documents
+ (including DTDs), both local and accessible via HTTP or FTP. Also offers
+ access to an 'XPath' "interpreter"."""
+
+ homepage = "http://www.omegahat.net/RSXML"
+ url = "https://cran.r-project.org/src/contrib/XML_3.98-1.4.tar.gz"
+ list_url = "https://cran.r-project.org/src/contrib/Archive/XML"
+
+ version('3.98-1.5', 'd1cfcd56f7aec96a84ffca91aea507ee')
+ version('3.98-1.4', '1a7f3ce6f264eeb109bfa57bedb26c14')
+
+ depends_on('libxml2')
diff --git a/var/spack/repos/builtin/packages/r-xtable/package.py b/var/spack/repos/builtin/packages/r-xtable/package.py
new file mode 100644
index 0000000000..66d8687b6d
--- /dev/null
+++ b/var/spack/repos/builtin/packages/r-xtable/package.py
@@ -0,0 +1,35 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class RXtable(RPackage):
+ """Coerce data to LaTeX and HTML tables."""
+
+ homepage = "http://xtable.r-forge.r-project.org/"
+ url = "https://cran.r-project.org/src/contrib/xtable_1.8-2.tar.gz"
+ list_url = "https://cran.r-project.org/src/contrib/Archive/xtable"
+
+ version('1.8-2', '239e4825cd046156a67efae3aac01d86')
diff --git a/var/spack/repos/builtin/packages/r-xts/package.py b/var/spack/repos/builtin/packages/r-xts/package.py
new file mode 100644
index 0000000000..1cedec1d42
--- /dev/null
+++ b/var/spack/repos/builtin/packages/r-xts/package.py
@@ -0,0 +1,40 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class RXts(RPackage):
+ """Provide for uniform handling of R's different time-based data classes by
+ extending zoo, maximizing native format information preservation and
+ allowing for user level customization and extension, while simplifying
+ cross-class interoperability."""
+
+ homepage = "http://r-forge.r-project.org/projects/xts/"
+ url = "https://cran.r-project.org/src/contrib/xts_0.9-7.tar.gz"
+ list_url = "https://cran.r-project.org/src/contrib/Archive/xts"
+
+ version('0.9-7', 'a232e94aebfa654653a7d88a0503537b')
+
+ depends_on('r-zoo', type=('build', 'run'))
diff --git a/var/spack/repos/builtin/packages/r-yaml/package.py b/var/spack/repos/builtin/packages/r-yaml/package.py
new file mode 100644
index 0000000000..c812ea8ca2
--- /dev/null
+++ b/var/spack/repos/builtin/packages/r-yaml/package.py
@@ -0,0 +1,36 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class RYaml(RPackage):
+ """This package implements the libyaml YAML 1.1 parser and emitter
+ (http://pyyaml.org/wiki/LibYAML) for R."""
+
+ homepage = "https://cran.r-project.org/web/packages/yaml/index.html"
+ url = "https://cran.r-project.org/src/contrib/yaml_2.1.13.tar.gz"
+ list_url = "https://cran.r-project.org/src/contrib/Archive/yaml"
+
+ version('2.1.13', 'f2203ea395adaff6bd09134666191d9a')
diff --git a/var/spack/repos/builtin/packages/r-zoo/package.py b/var/spack/repos/builtin/packages/r-zoo/package.py
new file mode 100644
index 0000000000..230c78a61a
--- /dev/null
+++ b/var/spack/repos/builtin/packages/r-zoo/package.py
@@ -0,0 +1,41 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class RZoo(RPackage):
+ """An S3 class with methods for totally ordered indexed observations. It is
+ particularly aimed at irregular time series of numeric vectors/matrices and
+ factors. zoo's key design goals are independence of a particular
+ index/date/time class and consistency with ts and base R by providing
+ methods to extend standard generics."""
+
+ homepage = "http://zoo.r-forge.r-project.org/"
+ url = "https://cran.r-project.org/src/contrib/zoo_1.7-13.tar.gz"
+ list_url = "https://cran.r-project.org/src/contrib/Archive/zoo"
+
+ version('1.7-13', '99521dfa4c668e692720cefcc5a1bf30')
+
+ depends_on('r-lattice', type=('build', 'run'))
diff --git a/var/spack/repos/builtin/packages/r/package.py b/var/spack/repos/builtin/packages/r/package.py
new file mode 100644
index 0000000000..3fed62d1fa
--- /dev/null
+++ b/var/spack/repos/builtin/packages/r/package.py
@@ -0,0 +1,185 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+
+from spack import *
+from spack.util.environment import *
+import shutil
+
+
+class R(Package):
+ """R is 'GNU S', a freely available language and environment for
+ statistical computing and graphics which provides a wide variety of
+ statistical and graphical techniques: linear and nonlinear modelling,
+ statistical tests, time series analysis, classification, clustering, etc.
+ Please consult the R project homepage for further information."""
+
+ homepage = "https://www.r-project.org"
+ url = "http://cran.cnr.berkeley.edu/src/base/R-3/R-3.1.2.tar.gz"
+
+ extendable = True
+
+ version('3.3.1', 'f50a659738b73036e2f5635adbd229c5')
+ version('3.3.0', '5a7506c8813432d1621c9725e86baf7a')
+ version('3.2.3', '1ba3dac113efab69e706902810cc2970')
+ version('3.2.2', '57cef5c2e210a5454da1979562a10e5b')
+ version('3.2.1', 'c2aac8b40f84e08e7f8c9068de9239a3')
+ version('3.2.0', '66fa17ad457d7e618191aa0f52fc402e')
+ version('3.1.3', '53a85b884925aa6b5811dfc361d73fc4')
+ version('3.1.2', '3af29ec06704cbd08d4ba8d69250ae74')
+
+ variant('external-lapack', default=False,
+ description='Links to externally installed BLAS/LAPACK')
+ variant('X', default=False,
+ description='Enable X11 support (call configure --with-x)')
+
+ # Virtual dependencies
+ depends_on('blas', when='+external-lapack')
+ depends_on('lapack', when='+external-lapack')
+
+ # Concrete dependencies
+ depends_on('readline')
+ depends_on('ncurses')
+ depends_on('icu4c')
+ depends_on('glib')
+ depends_on('zlib@:1.2.8')
+ depends_on('bzip2')
+ depends_on('libtiff')
+ depends_on('jpeg')
+ depends_on('cairo')
+ depends_on('cairo+X', when='+X')
+ depends_on('cairo~X', when='~X')
+ depends_on('pango')
+ depends_on('freetype')
+ depends_on('tcl')
+ depends_on('tk')
+ depends_on('tk+X', when='+X')
+ depends_on('tk~X', when='~X')
+ depends_on('libx11', when='+X')
+ depends_on('libxt', when='+X')
+ depends_on('curl')
+ depends_on('pcre')
+ depends_on('jdk')
+
+ @property
+ def etcdir(self):
+ return join_path(prefix, 'rlib', 'R', 'etc')
+
+ def install(self, spec, prefix):
+ rlibdir = join_path(prefix, 'rlib')
+ configure_args = ['--prefix=%s' % prefix,
+ '--libdir=%s' % rlibdir,
+ '--enable-R-shlib',
+ '--enable-BLAS-shlib',
+ '--enable-R-framework=no']
+ if '+external-lapack' in spec:
+ configure_args.extend(['--with-blas', '--with-lapack'])
+
+ configure(*configure_args)
+ make()
+ make('install')
+
+ # Make a copy of Makeconf because it will be needed to properly build R
+ # dependencies in Spack.
+ src_makeconf = join_path(self.etcdir, 'Makeconf')
+ dst_makeconf = join_path(self.etcdir, 'Makeconf.spack')
+ shutil.copy(src_makeconf, dst_makeconf)
+
+ self.filter_compilers(spec, prefix)
+
+ def filter_compilers(self, spec, prefix):
+ """Run after install to tell the configuration files and Makefiles
+ to use the compilers that Spack built the package with.
+
+ If this isn't done, they'll have CC and CXX set to Spack's generic
+ cc and c++. We want them to be bound to whatever compiler
+ they were built with."""
+
+ kwargs = {'ignore_absent': True, 'backup': False, 'string': True}
+
+ filter_file(env['CC'], self.compiler.cc,
+ join_path(self.etcdir, 'Makeconf'), **kwargs)
+ filter_file(env['CXX'], self.compiler.cxx,
+ join_path(self.etcdir, 'Makeconf'), **kwargs)
+ filter_file(env['F77'], self.compiler.f77,
+ join_path(self.etcdir, 'Makeconf'), **kwargs)
+ filter_file(env['FC'], self.compiler.fc,
+ join_path(self.etcdir, 'Makeconf'), **kwargs)
+
+ # ========================================================================
+ # Set up environment to make install easy for R extensions.
+ # ========================================================================
+
+ @property
+ def r_lib_dir(self):
+ return join_path('rlib', 'R', 'library')
+
+ def setup_dependent_environment(self, spack_env, run_env, extension_spec):
+ # Set R_LIBS to include the library dir for the
+ # extension and any other R extensions it depends on.
+ r_libs_path = []
+ for d in extension_spec.traverse(
+ deptype=('build', 'run'), deptype_query='run'):
+ if d.package.extends(self.spec):
+ r_libs_path.append(join_path(d.prefix, self.r_lib_dir))
+
+ r_libs_path = ':'.join(r_libs_path)
+ spack_env.set('R_LIBS', r_libs_path)
+ spack_env.set('R_MAKEVARS_SITE',
+ join_path(self.etcdir, 'Makeconf.spack'))
+
+ # Use the number of make_jobs set in spack. The make program will
+ # determine how many jobs can actually be started.
+ spack_env.set('MAKEFLAGS', '-j{0}'.format(make_jobs))
+
+ # For run time environment set only the path for extension_spec and
+ # prepend it to R_LIBS
+ if extension_spec.package.extends(self.spec):
+ run_env.prepend_path('R_LIBS', join_path(
+ extension_spec.prefix, self.r_lib_dir))
+
+ def setup_environment(self, spack_env, run_env):
+ run_env.prepend_path('LIBRARY_PATH',
+ join_path(self.prefix, 'rlib', 'R', 'lib'))
+ run_env.prepend_path('LD_LIBRARY_PATH',
+ join_path(self.prefix, 'rlib', 'R', 'lib'))
+ run_env.prepend_path('CPATH',
+ join_path(self.prefix, 'rlib', 'R', 'include'))
+
+ def setup_dependent_package(self, module, ext_spec):
+ """Called before R modules' install() methods. In most cases,
+ extensions will only need to have one line:
+ R('CMD', 'INSTALL', '--library={0}'.format(self.module.r_lib_dir),
+ self.stage.source_path)"""
+
+ # R extension builds can have a global R executable function
+ module.R = Executable(join_path(self.spec.prefix.bin, 'R'))
+
+ # Add variable for library directry
+ module.r_lib_dir = join_path(ext_spec.prefix, self.r_lib_dir)
+
+ # Make the site packages directory for extensions, if it does not exist
+ # already.
+ if ext_spec.package.is_extension:
+ mkdirp(module.r_lib_dir)
diff --git a/var/spack/repos/builtin/packages/raja/package.py b/var/spack/repos/builtin/packages/raja/package.py
index b6300a1dfa..dccf9a581c 100644
--- a/var/spack/repos/builtin/packages/raja/package.py
+++ b/var/spack/repos/builtin/packages/raja/package.py
@@ -24,6 +24,7 @@
##############################################################################
from spack import *
+
class Raja(Package):
"""RAJA Parallel Framework."""
homepage = "http://software.llnl.gov/RAJA/"
@@ -31,6 +32,7 @@ class Raja(Package):
version('git', git='https://github.com/LLNL/RAJA.git', branch="master")
def install(self, spec, prefix):
- cmake('.',*std_cmake_args)
+ with working_dir('build', create=True):
+ cmake('..', *std_cmake_args)
make()
make('install')
diff --git a/var/spack/repos/builtin/packages/randrproto/package.py b/var/spack/repos/builtin/packages/randrproto/package.py
new file mode 100644
index 0000000000..ecff886a3b
--- /dev/null
+++ b/var/spack/repos/builtin/packages/randrproto/package.py
@@ -0,0 +1,46 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class Randrproto(Package):
+ """X Resize and Rotate Extension (RandR).
+
+ This extension defines a protocol for clients to dynamically change X
+ screens, so as to resize, rotate and reflect the root window of a screen.
+ """
+
+ homepage = "http://cgit.freedesktop.org/xorg/proto/randrproto"
+ url = "https://www.x.org/archive/individual/proto/randrproto-1.5.0.tar.gz"
+
+ version('1.5.0', '863d6ee3e0b2708f75d968470ed31eb9')
+
+ depends_on('pkg-config@0.9.0:', type='build')
+ depends_on('util-macros', type='build')
+
+ def install(self, spec, prefix):
+ configure('--prefix={0}'.format(prefix))
+
+ make('install')
diff --git a/var/spack/repos/builtin/packages/ravel/package.py b/var/spack/repos/builtin/packages/ravel/package.py
index 763ec1b9a2..4f4f2b2e10 100644
--- a/var/spack/repos/builtin/packages/ravel/package.py
+++ b/var/spack/repos/builtin/packages/ravel/package.py
@@ -24,6 +24,7 @@
##############################################################################
from spack import *
+
class Ravel(Package):
"""Ravel is a parallel communication trace visualization tool that
orders events according to logical time."""
@@ -33,8 +34,7 @@ class Ravel(Package):
version('1.0.0', 'b25fece58331c2adfcce76c5036485c2')
- # TODO: make this a build dependency
- depends_on('cmake@2.8.9:')
+ depends_on('cmake@2.8.9:', type='build')
depends_on('muster@1.0.1:')
depends_on('otf')
diff --git a/var/spack/repos/builtin/packages/readline/package.py b/var/spack/repos/builtin/packages/readline/package.py
index 039bf725eb..abb6ba04ce 100644
--- a/var/spack/repos/builtin/packages/readline/package.py
+++ b/var/spack/repos/builtin/packages/readline/package.py
@@ -24,6 +24,7 @@
##############################################################################
from spack import *
+
class Readline(Package):
"""The GNU Readline library provides a set of functions for use by
applications that allow users to edit command lines as they
diff --git a/var/spack/repos/builtin/packages/recordproto/package.py b/var/spack/repos/builtin/packages/recordproto/package.py
new file mode 100644
index 0000000000..02018a76ff
--- /dev/null
+++ b/var/spack/repos/builtin/packages/recordproto/package.py
@@ -0,0 +1,45 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class Recordproto(Package):
+ """X Record Extension.
+
+ This extension defines a protocol for the recording and playback of user
+ actions in the X Window System."""
+
+ homepage = "http://cgit.freedesktop.org/xorg/proto/recordproto"
+ url = "https://www.x.org/archive/individual/proto/recordproto-1.14.2.tar.gz"
+
+ version('1.14.2', '868235e1e150e68916d5a316ebc4ccc4')
+
+ depends_on('pkg-config@0.9.0:', type='build')
+ depends_on('util-macros', type='build')
+
+ def install(self, spec, prefix):
+ configure('--prefix={0}'.format(prefix))
+
+ make('install')
diff --git a/var/spack/repos/builtin/packages/rename/package.py b/var/spack/repos/builtin/packages/rename/package.py
new file mode 100644
index 0000000000..3538fd21cc
--- /dev/null
+++ b/var/spack/repos/builtin/packages/rename/package.py
@@ -0,0 +1,45 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class Rename(Package):
+ """Perl-powered file rename script with many helpful built-ins."""
+
+ homepage = "http://plasmasturm.org/code/rename"
+ url = "https://github.com/ap/rename/archive/v1.600.tar.gz"
+
+ version('1.600', '91beb555c93d407420b5dad191069bb3')
+
+ depends_on('perl', type=('build', 'run'))
+
+ def install(self, spec, prefix):
+ Executable('pod2man')('rename', 'rename.1')
+ bdir = join_path(prefix, 'bin')
+ mkdirp(bdir)
+ install('rename', bdir)
+ mdir = join_path(prefix, 'share', 'man', 'man1')
+ mkdirp(mdir)
+ install('rename.1', mdir)
diff --git a/var/spack/repos/builtin/packages/rendercheck/package.py b/var/spack/repos/builtin/packages/rendercheck/package.py
new file mode 100644
index 0000000000..07cc809e9a
--- /dev/null
+++ b/var/spack/repos/builtin/packages/rendercheck/package.py
@@ -0,0 +1,48 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class Rendercheck(Package):
+ """rendercheck is a program to test a Render extension implementation
+ against separate calculations of expected output."""
+
+ homepage = "http://cgit.freedesktop.org/xorg/app/rendercheck"
+ url = "https://www.x.org/archive/individual/app/rendercheck-1.5.tar.gz"
+
+ version('1.5', '92ddef6d01f02529521af103f9b9bf60')
+
+ depends_on('libxrender')
+ depends_on('libx11')
+
+ depends_on('xproto@7.0.17:', type='build')
+ depends_on('pkg-config@0.9.0:', type='build')
+ depends_on('util-macros', type='build')
+
+ def install(self, spec, prefix):
+ configure('--prefix={0}'.format(prefix))
+
+ make()
+ make('install')
diff --git a/var/spack/repos/builtin/packages/renderproto/package.py b/var/spack/repos/builtin/packages/renderproto/package.py
new file mode 100644
index 0000000000..10be4c941c
--- /dev/null
+++ b/var/spack/repos/builtin/packages/renderproto/package.py
@@ -0,0 +1,45 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class Renderproto(Package):
+ """X Rendering Extension.
+
+ This extension defines the protcol for a digital image composition as
+ the foundation of a new rendering model within the X Window System."""
+
+ homepage = "http://cgit.freedesktop.org/xorg/proto/renderproto"
+ url = "https://www.x.org/archive/individual/proto/renderproto-0.11.1.tar.gz"
+
+ version('0.11.1', '9b103359123e375bb7760f7dbae3dece')
+
+ depends_on('pkg-config@0.9.0:', type='build')
+ depends_on('util-macros', type='build')
+
+ def install(self, spec, prefix):
+ configure('--prefix={0}'.format(prefix))
+
+ make('install')
diff --git a/var/spack/repos/builtin/packages/resourceproto/package.py b/var/spack/repos/builtin/packages/resourceproto/package.py
new file mode 100644
index 0000000000..4e0a495d83
--- /dev/null
+++ b/var/spack/repos/builtin/packages/resourceproto/package.py
@@ -0,0 +1,45 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class Resourceproto(Package):
+ """X Resource Extension.
+
+ This extension defines a protocol that allows a client to query the
+ X server about its usage of various resources."""
+
+ homepage = "http://cgit.freedesktop.org/xorg/proto/resourceproto"
+ url = "https://www.x.org/archive/individual/proto/resourceproto-1.2.0.tar.gz"
+
+ version('1.2.0', '33091d5358ec32dd7562a1aa225a70aa')
+
+ depends_on('pkg-config@0.9.0:', type='build')
+ depends_on('util-macros', type='build')
+
+ def install(self, spec, prefix):
+ configure('--prefix={0}'.format(prefix))
+
+ make('install')
diff --git a/var/spack/repos/builtin/packages/rgb/package.py b/var/spack/repos/builtin/packages/rgb/package.py
new file mode 100644
index 0000000000..ddc5419305
--- /dev/null
+++ b/var/spack/repos/builtin/packages/rgb/package.py
@@ -0,0 +1,51 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class Rgb(Package):
+ """X color name database.
+
+ This package includes both the list mapping X color names to RGB values
+ (rgb.txt) and, if configured to use a database for color lookup, the
+ rgb program to convert the text file into the binary database format.
+
+ The "others" subdirectory contains some alternate color databases."""
+
+ homepage = "http://cgit.freedesktop.org/xorg/app/rgb"
+ url = "https://www.x.org/archive/individual/app/rgb-1.0.6.tar.gz"
+
+ version('1.0.6', '9759d058108f39066bbdf1d5d6de048c')
+
+ depends_on('xorg-server')
+
+ depends_on('xproto', type='build')
+
+ def install(self, spec, prefix):
+ configure('--prefix={0}'.format(prefix))
+
+ make()
+ make('check')
+ make('install')
diff --git a/var/spack/repos/builtin/packages/root/math_uint.patch b/var/spack/repos/builtin/packages/root/math_uint.patch
new file mode 100644
index 0000000000..ff4a38b91c
--- /dev/null
+++ b/var/spack/repos/builtin/packages/root/math_uint.patch
@@ -0,0 +1,115 @@
+From 5f3faffdd869bce5e254ae70f69290e4651a061d Mon Sep 17 00:00:00 2001
+From: Lorenzo Moneta <Lorenzo.Moneta@cern.ch>
+Date: Mon, 4 Jan 2016 15:38:23 +0100
+Subject: [PATCH] Fix ROOT-7886. Use unsigned int instead of uint
+
+---
+ math/mathcore/inc/Math/Delaunay2D.h | 2 +-
+ math/mathcore/src/Delaunay2D.cxx | 30 +++++++++++++++---------------
+ 2 files changed, 16 insertions(+), 16 deletions(-)
+
+diff --git a/math/mathcore/inc/Math/Delaunay2D.h b/math/mathcore/inc/Math/Delaunay2D.h
+index 6255e78..472bded 100644
+--- a/math/mathcore/inc/Math/Delaunay2D.h
++++ b/math/mathcore/inc/Math/Delaunay2D.h
+@@ -273,7 +273,7 @@ class Delaunay2D {
+ double fYCellStep; //! inverse denominator to calculate X cell = fNCells / (fYNmax - fYNmin)
+ std::set<UInt_t> fCells[(fNCells+1)*(fNCells+1)]; //! grid cells with containing triangles
+
+- inline unsigned int Cell(uint x, uint y) const {
++ inline unsigned int Cell(UInt_t x, UInt_t y) const {
+ return x*(fNCells+1) + y;
+ }
+
+diff --git a/math/mathcore/src/Delaunay2D.cxx b/math/mathcore/src/Delaunay2D.cxx
+index c4d4680..9a82858 100644
+--- a/math/mathcore/src/Delaunay2D.cxx
++++ b/math/mathcore/src/Delaunay2D.cxx
+@@ -190,7 +190,7 @@ void Delaunay2D::DoFindTriangles() {
+
+ Triangle tri;
+
+- auto transform = [&] (const uint i) {
++ auto transform = [&] (const unsigned int i) {
+ tri.x[i] = face.vertex(i)->point().x();
+ tri.y[i] = face.vertex(i)->point().y();
+ tri.idx[i] = face.vertex(i)->info();
+@@ -326,7 +326,7 @@ void Delaunay2D::DoFindTriangles() {
+ for(int t = 0; t < out.numberoftriangles; ++t){
+ Triangle tri;
+
+- auto transform = [&] (const uint v) {
++ auto transform = [&] (const unsigned int v) {
+ //each triangle as numberofcorners vertices ( = 3)
+ tri.idx[v] = out.trianglelist[t*out.numberofcorners + v];
+
+@@ -354,14 +354,14 @@ void Delaunay2D::DoFindTriangles() {
+ auto bx = std::minmax({tri.x[0], tri.x[1], tri.x[2]});
+ auto by = std::minmax({tri.y[0], tri.y[1], tri.y[2]});
+
+- uint cellXmin = CellX(bx.first);
+- uint cellXmax = CellX(bx.second);
++ unsigned int cellXmin = CellX(bx.first);
++ unsigned int cellXmax = CellX(bx.second);
+
+- uint cellYmin = CellY(by.first);
+- uint cellYmax = CellY(by.second);
++ unsigned int cellYmin = CellY(by.first);
++ unsigned int cellYmax = CellY(by.second);
+
+- for(uint i = cellXmin; i <= cellXmax; ++i)
+- for(uint j = cellYmin; j <= cellYmax; ++j){
++ for(unsigned int i = cellXmin; i <= cellXmax; ++i)
++ for(unsigned int j = cellYmin; j <= cellYmax; ++j){
+ //printf("(%u,%u) = %u\n", i, j, Cell(i,j));
+ fCells[Cell(i,j)].insert(t);
+ }
+@@ -382,7 +382,7 @@ double Delaunay2D::DoInterpolateNormalized(double xx, double yy)
+ /// FindAllTriangles();
+
+ //see comment in header for CGAL fallback section
+- auto bayCoords = [&] (const uint t) -> std::tuple<double, double, double> {
++ auto bayCoords = [&] (const unsigned int t) -> std::tuple<double, double, double> {
+ double la = ( (fTriangles[t].y[1] - fTriangles[t].y[2])*(xx - fTriangles[t].x[2])
+ + (fTriangles[t].x[2] - fTriangles[t].x[1])*(yy - fTriangles[t].y[2]) ) * fTriangles[t].invDenom;
+ double lb = ( (fTriangles[t].y[2] - fTriangles[t].y[0])*(xx - fTriangles[t].x[2])
+@@ -401,7 +401,7 @@ double Delaunay2D::DoInterpolateNormalized(double xx, double yy)
+ if(cX < 0 || cX > fNCells || cY < 0 || cY > fNCells)
+ return fZout; //TODO some more fancy interpolation here
+
+- for(uint t : fCells[Cell(cX, cY)]){
++ for(unsigned int t : fCells[Cell(cX, cY)]){
+ auto coords = bayCoords(t);
+
+ if(inTriangle(coords)){
+@@ -415,7 +415,7 @@ double Delaunay2D::DoInterpolateNormalized(double xx, double yy)
+
+ //debugging
+
+- /*for(uint t = 0; t < fNdt; ++t){
++ /*for(unsigned int t = 0; t < fNdt; ++t){
+ auto coords = bayCoords(t);
+
+ if(inTriangle(coords)){
+@@ -423,17 +423,17 @@ double Delaunay2D::DoInterpolateNormalized(double xx, double yy)
+ //brute force found a triangle -> grid not
+ printf("Found triangle %u for (%f,%f) -> (%u,%u)\n", t, xx,yy, cX, cY);
+ printf("Triangles in grid cell: ");
+- for(uint x : fCells[Cell(cX, cY)])
++ for(unsigned int x : fCells[Cell(cX, cY)])
+ printf("%u ", x);
+ printf("\n");
+
+ printf("Triangle %u is in cells: ", t);
+- for(uint i = 0; i <= fNCells; ++i)
+- for(uint j = 0; j <= fNCells; ++j)
++ for(unsigned int i = 0; i <= fNCells; ++i)
++ for(unsigned int j = 0; j <= fNCells; ++j)
+ if(fCells[Cell(i,j)].count(t))
+ printf("(%u,%u) ", i, j);
+ printf("\n");
+- for(uint i = 0; i < 3; ++i)
++ for(unsigned int i = 0; i < 3; ++i)
+ printf("\tpoint %u (%u): (%f,%f) -> (%u,%u)\n", i, fTriangles[t].idx[i], fTriangles[t].x[i], fTriangles[t].y[i], CellX(fTriangles[t].x[i]), CellY(fTriangles[t].y[i]));
+
+ //we found the triangle -> interpolate using the barycentric interpolation
diff --git a/var/spack/repos/builtin/packages/root/package.py b/var/spack/repos/builtin/packages/root/package.py
new file mode 100644
index 0000000000..0f66dcebaa
--- /dev/null
+++ b/var/spack/repos/builtin/packages/root/package.py
@@ -0,0 +1,89 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+
+from spack import *
+import sys
+
+
+class Root(Package):
+ """ROOT is a data analysis framework."""
+ homepage = "https://root.cern.ch"
+ url = "https://root.cern.ch/download/root_v6.07.02.source.tar.gz"
+
+ version('6.06.06', '4308449892210c8d36e36924261fea26')
+ version('6.06.04', '55a2f98dd4cea79c9c4e32407c2d6d17')
+ version('6.06.02', 'e9b8b86838f65b0a78d8d02c66c2ec55')
+
+ if sys.platform == 'darwin':
+ patch('math_uint.patch', when='@6.06.02')
+ patch('root6-60606-mathmore.patch', when='@6.06.06')
+
+ variant('graphviz', default=False, description='Enable graphviz support')
+
+ depends_on("cmake", type='build')
+ depends_on("pcre")
+ depends_on("fftw")
+ depends_on("graphviz", when="+graphviz")
+ depends_on("python")
+ depends_on("gsl")
+ depends_on("libxml2+python")
+ depends_on("jpeg")
+ if sys.platform != 'darwin':
+ depends_on("libpng")
+ depends_on("openssl")
+ depends_on("freetype")
+
+ def install(self, spec, prefix):
+ build_directory = join_path(self.stage.path, 'spack-build')
+ source_directory = self.stage.source_path
+ options = [source_directory]
+ if '+debug' in spec:
+ options.append('-DCMAKE_BUILD_TYPE:STRING=Debug')
+ else:
+ options.append('-DCMAKE_BUILD_TYPE:STRING=Release')
+ options.append('-Dcxx14=on')
+ options.append('-Dcocoa=off')
+ options.append('-Dbonjour=off')
+ options.append('-Dx11=on')
+ options.extend(std_cmake_args)
+ if sys.platform == 'darwin':
+ darwin_options = [
+ '-Dcastor=OFF',
+ '-Drfio=OFF',
+ '-Ddcache=OFF']
+ options.extend(darwin_options)
+ with working_dir(build_directory, create=True):
+ cmake(*options)
+ make()
+ make("install")
+
+ def setup_dependent_environment(self, spack_env, run_env, dspec):
+ spack_env.set('ROOTSYS', self.prefix)
+ spack_env.set('ROOT_VERSION', 'v6')
+ spack_env.prepend_path('PYTHONPATH', self.prefix.lib)
+
+ def url_for_version(self, version):
+ """Handle ROOT's unusual version string."""
+ return "https://root.cern.ch/download/root_v%s.source.tar.gz" % version
diff --git a/var/spack/repos/builtin/packages/root/root6-60606-mathmore.patch b/var/spack/repos/builtin/packages/root/root6-60606-mathmore.patch
new file mode 100644
index 0000000000..d009a5af0d
--- /dev/null
+++ b/var/spack/repos/builtin/packages/root/root6-60606-mathmore.patch
@@ -0,0 +1,29 @@
+diff --git a/math/mathmore/inc/Math/QuantFuncMathMore.h b/math/mathmore/inc/Math/QuantFuncMathMore.h
+index fd6679c..03ccc03 100644
+--- a/math/mathmore/inc/Math/QuantFuncMathMore.h
++++ b/math/mathmore/inc/Math/QuantFuncMathMore.h
+@@ -25,17 +25,6 @@
+ **********************************************************************/
+
+
+-#if defined(__CINT__) && !defined(__MAKECINT__)
+-// avoid to include header file when using CINT
+-#ifndef _WIN32
+-#include "../lib/libMathMore.so"
+-#else
+-#include "../bin/libMathMore.dll"
+-#endif
+-
+-#else
+-
+-
+ #ifndef ROOT_Math_QuantFuncMathMore
+ #define ROOT_Math_QuantFuncMathMore
+
+@@ -190,5 +179,3 @@ namespace MathMore {
+
+
+ #endif // ROOT_Math_QuantFuncMathMore
+-
+-#endif // if defined (__CINT__) && !defined(__MAKECINT__)
+
diff --git a/var/spack/repos/builtin/packages/rose/package.py b/var/spack/repos/builtin/packages/rose/package.py
index b8a65e8574..02b09f0126 100644
--- a/var/spack/repos/builtin/packages/rose/package.py
+++ b/var/spack/repos/builtin/packages/rose/package.py
@@ -22,12 +22,13 @@
# License along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
-#------------------------------------------------------------------------------
+# -----------------------------------------------------------------------------
# Author: Justin Too <too1@llnl.gov>
-#------------------------------------------------------------------------------
+# -----------------------------------------------------------------------------
from spack import *
+
class Rose(Package):
"""A compiler infrastructure to build source-to-source program
transformation and analysis tools.
@@ -36,13 +37,14 @@ class Rose(Package):
homepage = "http://rosecompiler.org/"
url = "https://github.com/rose-compiler/edg4x-rose"
- version('master', branch='master', git='https://github.com/rose-compiler/edg4x-rose.git')
+ version('master', branch='master',
+ git='https://github.com/rose-compiler/edg4x-rose.git')
patch('add_spack_compiler_recognition.patch')
- depends_on("autoconf@2.69")
- depends_on("automake@1.14")
- depends_on("libtool@2.4")
+ depends_on("autoconf@2.69", type='build')
+ depends_on("automake@1.14", type='build')
+ depends_on("libtool@2.4", type='build')
depends_on("boost@1.54.0")
depends_on("jdk@8u25-linux-x64")
@@ -60,4 +62,3 @@ class Rose(Package):
"--with-boost=" + boost.prefix,
"--disable-boost-version-check")
make("install-core")
-
diff --git a/var/spack/repos/builtin/packages/rstart/package.py b/var/spack/repos/builtin/packages/rstart/package.py
new file mode 100644
index 0000000000..7b80e88ae7
--- /dev/null
+++ b/var/spack/repos/builtin/packages/rstart/package.py
@@ -0,0 +1,49 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class Rstart(Package):
+ """This package includes both the client and server sides implementing
+ the protocol described in the "A Flexible Remote Execution Protocol
+ Based on rsh" paper found in the specs/ subdirectory.
+
+ This software has been deprecated in favor of the X11 forwarding
+ provided in common ssh implementations."""
+
+ homepage = "http://cgit.freedesktop.org/xorg/app/rstart"
+ url = "https://www.x.org/archive/individual/app/rstart-1.0.5.tar.gz"
+
+ version('1.0.5', '32db3625cb5e841e17d6bc696f21edfb')
+
+ depends_on('xproto', type='build')
+ depends_on('pkg-config@0.9.0:', type='build')
+ depends_on('util-macros', type='build')
+
+ def install(self, spec, prefix):
+ configure('--prefix={0}'.format(prefix))
+
+ make()
+ make('install')
diff --git a/var/spack/repos/builtin/packages/rsync/package.py b/var/spack/repos/builtin/packages/rsync/package.py
index a9f8d4cfda..4e741b255f 100644
--- a/var/spack/repos/builtin/packages/rsync/package.py
+++ b/var/spack/repos/builtin/packages/rsync/package.py
@@ -24,8 +24,9 @@
##############################################################################
from spack import *
+
class Rsync(Package):
- """rsync is an open source utility that provides fast incremental file transfer."""
+ """An open source utility that provides fast incremental file transfer."""
homepage = "https://rsync.samba.org"
url = "https://download.samba.org/pub/rsync/rsync-3.1.1.tar.gz"
diff --git a/var/spack/repos/builtin/packages/ruby/package.py b/var/spack/repos/builtin/packages/ruby/package.py
index dd71913fc9..8dc314c171 100644
--- a/var/spack/repos/builtin/packages/ruby/package.py
+++ b/var/spack/repos/builtin/packages/ruby/package.py
@@ -35,9 +35,20 @@ class Ruby(Package):
extendable = True
version('2.2.0', 'cd03b28fd0b555970f5c4fd481700852')
+ depends_on('libffi')
+ depends_on('zlib')
+ variant('openssl', default=False, description="Enable OpenSSL support")
+ depends_on('openssl', when='+openssl')
+ variant('readline', default=False, description="Enable Readline support")
+ depends_on('readline', when='+readline')
def install(self, spec, prefix):
- configure("--prefix=%s" % prefix)
+ options = ["--prefix=%s" % prefix]
+ if '+openssl' in spec:
+ options.append("--with-openssl-dir=%s" % spec['openssl'].prefix)
+ if '+readline' in spec:
+ options.append("--with-readline-dir=%s" % spec['readline'].prefix)
+ configure(*options)
make()
make("install")
diff --git a/var/spack/repos/builtin/packages/rust-bindgen/package.py b/var/spack/repos/builtin/packages/rust-bindgen/package.py
new file mode 100644
index 0000000000..c411bc15d1
--- /dev/null
+++ b/var/spack/repos/builtin/packages/rust-bindgen/package.py
@@ -0,0 +1,42 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+import os
+
+
+class RustBindgen(Package):
+ """The rust programming language toolchain"""
+ homepage = "http://www.rust-lang.org"
+ url = "https://github.com/crabtw/rust-bindgen"
+
+ version('0.16', tag='0.16', git='https://github.com/crabtw/rust-bindgen')
+
+ extends("rust")
+ depends_on("llvm")
+
+ def install(self, spec, prefix):
+ env = dict(os.environ)
+ env['LIBCLANG_PATH'] = os.path.join(spec['llvm'].prefix, 'lib')
+ cargo('install', '--root', prefix, env=env)
diff --git a/var/spack/repos/builtin/packages/rust/package.py b/var/spack/repos/builtin/packages/rust/package.py
new file mode 100644
index 0000000000..8a92fca634
--- /dev/null
+++ b/var/spack/repos/builtin/packages/rust/package.py
@@ -0,0 +1,88 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+import os
+
+
+def get_submodules():
+ git = which('git')
+ git('submodule', 'update', '--init', '--recursive')
+
+
+class Rust(Package):
+ """The rust programming language toolchain"""
+ homepage = "http://www.rust-lang.org"
+ url = "https://github.com/rust-lang/rust"
+
+ version('1.8.0', tag='1.8.0', git="https://github.com/rust-lang/rust")
+
+ resource(name='cargo',
+ git="https://github.com/rust-lang/cargo.git",
+ tag='0.10.0',
+ destination='cargo')
+
+ extendable = True
+
+ # Rust
+ depends_on("llvm")
+ depends_on("curl")
+ depends_on("git")
+ depends_on("cmake")
+ depends_on("python@:2.8")
+
+ # Cargo
+ depends_on("openssl")
+
+ def install(self, spec, prefix):
+ configure('--prefix=%s' % prefix,
+ '--llvm-root=' + spec['llvm'].prefix)
+
+ make()
+ make("install")
+
+ # Install cargo, rust package manager
+ with working_dir(os.path.join('cargo', 'cargo')):
+ get_submodules()
+ configure('--prefix=' + prefix,
+ '--local-rust-root=' + prefix)
+
+ make()
+ make("install")
+
+ def setup_dependent_package(self, module, ext_spec):
+ """
+ Called before python modules' install() methods.
+
+ In most cases, extensions will only need to have one or two lines::
+
+ cargo('build')
+ cargo('install', '--root', prefix)
+
+ or
+
+ cargo('install', '--root', prefix)
+ """
+ # Rust extension builds can have a global cargo executable function
+ module.cargo = Executable(join_path(self.spec.prefix.bin, 'cargo'))
diff --git a/var/spack/repos/builtin/packages/SAMRAI/no-tool-build.patch b/var/spack/repos/builtin/packages/samrai/no-tool-build.patch
index 1adf0cf721..1adf0cf721 100644
--- a/var/spack/repos/builtin/packages/SAMRAI/no-tool-build.patch
+++ b/var/spack/repos/builtin/packages/samrai/no-tool-build.patch
diff --git a/var/spack/repos/builtin/packages/SAMRAI/package.py b/var/spack/repos/builtin/packages/samrai/package.py
index 73c51ced23..e0648290d6 100644
--- a/var/spack/repos/builtin/packages/SAMRAI/package.py
+++ b/var/spack/repos/builtin/packages/samrai/package.py
@@ -24,12 +24,14 @@
##############################################################################
from spack import *
+
class Samrai(Package):
"""SAMRAI (Structured Adaptive Mesh Refinement Application Infrastructure)
- is an object-oriented C++ software library enables exploration of numerical,
- algorithmic, parallel computing, and software issues associated with applying
- structured adaptive mesh refinement (SAMR) technology in large-scale parallel
- application development.
+ is an object-oriented C++ software library enables exploration of
+ numerical, algorithmic, parallel computing, and software issues
+ associated with applying structured adaptive mesh refinement
+ (SAMR) technology in large-scale parallel application development.
+
"""
homepage = "https://computation.llnl.gov/project/SAMRAI/"
url = "https://computation.llnl.gov/project/SAMRAI/download/SAMRAI-v3.9.1.tar.gz"
diff --git a/var/spack/repos/builtin/packages/samtools/package.py b/var/spack/repos/builtin/packages/samtools/package.py
index f5c7f4431f..b82a7e55d6 100644
--- a/var/spack/repos/builtin/packages/samtools/package.py
+++ b/var/spack/repos/builtin/packages/samtools/package.py
@@ -24,19 +24,28 @@
##############################################################################
from spack import *
+
class Samtools(Package):
- """SAM Tools provide various utilities for manipulating alignments in the SAM format,
- including sorting, merging, indexing and generating
+ """SAM Tools provide various utilities for manipulating alignments in
+ the SAM format, including sorting, merging, indexing and generating
alignments in a per-position format"""
homepage = "www.htslib.org"
- version('1.2','988ec4c3058a6ceda36503eebecd4122',url = "https://github.com/samtools/samtools/releases/download/1.2/samtools-1.2.tar.bz2")
+ url = "https://github.com/samtools/samtools/releases/download/1.3.1/samtools-1.3.1.tar.bz2"
- depends_on("zlib")
- depends_on("mpc")
- parallel=False
- patch("samtools1.2.patch",level=0)
+ version('1.3.1', 'a7471aa5a1eb7fc9cc4c6491d73c2d88')
+ version('1.2', '988ec4c3058a6ceda36503eebecd4122')
- def install(self, spec, prefix):
- make("prefix=%s" % prefix, "install")
+ depends_on("ncurses")
+ depends_on("htslib", when='@1.3.1:') # htslib became standalone
+ depends_on('zlib', when='@1.2') # needed for builtin htslib
+ def install(self, spec, prefix):
+ if self.spec.version >= Version('1.3.1'):
+ configure('--prefix={0}'.format(prefix), '--with-ncurses',
+ 'CURSES_LIB=-lncurses')
+ make()
+ make('install')
+ else:
+ make("prefix=%s" % prefix)
+ make("prefix=%s" % prefix, "install")
diff --git a/var/spack/repos/builtin/packages/samtools/samtools1.2.patch b/var/spack/repos/builtin/packages/samtools/samtools1.2.patch
deleted file mode 100644
index ead3ab4e2c..0000000000
--- a/var/spack/repos/builtin/packages/samtools/samtools1.2.patch
+++ /dev/null
@@ -1,20 +0,0 @@
---- Makefile 2015-02-03 08:27:34.000000000 -0800
-+++ Makefile.new 2015-07-21 10:38:27.881406892 -0700
-@@ -26,7 +26,7 @@
- CFLAGS = -g -Wall -O2
- LDFLAGS =
- LDLIBS =
--DFLAGS= -D_FILE_OFFSET_BITS=64 -D_LARGEFILE64_SOURCE -D_CURSES_LIB=1
-+DFLAGS= -D_FILE_OFFSET_BITS=64 -D_LARGEFILE64_SOURCE -D_CURSES_LIB=0
- LOBJS= bam_aux.o bam.o bam_import.o sam.o \
- sam_header.o bam_plbuf.o
- AOBJS= bam_index.o bam_plcmd.o sam_view.o \
-@@ -37,7 +37,7 @@
- faidx.o stats.o stats_isize.o bam_flags.o bam_split.o \
- bam_tview.o bam_tview_curses.o bam_tview_html.o bam_lpileup.o
- INCLUDES= -I. -I$(HTSDIR)
--LIBCURSES= -lcurses # -lXCurses
-+#LIBCURSES= -lcurses # -lXCurses
-
- prefix = /usr/local
- exec_prefix = $(prefix)
diff --git a/var/spack/repos/builtin/packages/sbt/package.py b/var/spack/repos/builtin/packages/sbt/package.py
new file mode 100644
index 0000000000..977939c9df
--- /dev/null
+++ b/var/spack/repos/builtin/packages/sbt/package.py
@@ -0,0 +1,41 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+import shutil
+
+
+class Sbt(Package):
+ """Scala Build Tool"""
+
+ homepage = "http://www.scala-sbt.org"
+ url = "https://dl.bintray.com/sbt/native-packages/sbt/0.13.12/sbt-0.13.12.tgz"
+
+ version('0.13.12', 'cec3071d46ef13334c8097cc3467ff28')
+
+ depends_on('jdk')
+
+ def install(self, spec, prefix):
+ shutil.copytree('bin', join_path(prefix, 'bin'), symlinks=True)
+ shutil.copytree('conf', join_path(prefix, 'conf'), symlinks=True)
diff --git a/var/spack/repos/builtin/packages/scalasca/package.py b/var/spack/repos/builtin/packages/scalasca/package.py
index 98e43ee75a..228d814aed 100644
--- a/var/spack/repos/builtin/packages/scalasca/package.py
+++ b/var/spack/repos/builtin/packages/scalasca/package.py
@@ -27,10 +27,12 @@ from spack import *
class Scalasca(Package):
- """
- Scalasca is a software tool that supports the performance optimization of parallel programs by measuring and
- analyzing their runtime behavior. The analysis identifies potential performance bottlenecks - in particular those
- concerning communication and synchronization - and offers guidance in exploring their causes.
+ """Scalasca is a software tool that supports the performance optimization
+ of parallel programs by measuring and analyzing their runtime
+ behavior. The analysis identifies potential performance
+ bottlenecks - in particular those concerning communication and
+ synchronization - and offers guidance in exploring their causes.
+
"""
homepage = "http://www.scalasca.org"
@@ -44,7 +46,8 @@ class Scalasca(Package):
depends_on("mpi")
##########
- # Hard-code dependencies for Scalasca according to what stated in the release page
+ # Hard-code dependencies for Scalasca according to what stated in the
+ # release page
# The OTF2 library path should be detected automatically from SCOREP
# SCALASCA 2.2.2
depends_on("scorep@1.4:", when='@2.2.2')
@@ -60,4 +63,4 @@ class Scalasca(Package):
"--enable-shared"]
configure(*configure_args)
make()
- make("install") \ No newline at end of file
+ make("install")
diff --git a/var/spack/repos/builtin/packages/scons/package.py b/var/spack/repos/builtin/packages/scons/package.py
index 40ae4176dd..54f894da6f 100644
--- a/var/spack/repos/builtin/packages/scons/package.py
+++ b/var/spack/repos/builtin/packages/scons/package.py
@@ -24,14 +24,10 @@
##############################################################################
from spack import *
-class Scons(Package):
+
+class Scons(PythonPackage):
"""SCons is a software construction tool"""
homepage = "http://scons.org"
url = "http://downloads.sourceforge.net/project/scons/scons/2.5.0/scons-2.5.0.tar.gz"
version('2.5.0', '9e00fa0df8f5ca5c5f5975b40e0ed354')
-
- extends('python')
-
- def install(self, spec, prefix):
- python('setup.py', 'install', '--prefix=%s' % prefix)
diff --git a/var/spack/repos/builtin/packages/scorep/package.py b/var/spack/repos/builtin/packages/scorep/package.py
index 633511a15a..e0f7972304 100644
--- a/var/spack/repos/builtin/packages/scorep/package.py
+++ b/var/spack/repos/builtin/packages/scorep/package.py
@@ -22,26 +22,32 @@
# License along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
-
from spack import *
class Scorep(Package):
- """
- The Score-P measurement infrastructure is a highly scalable and easy-to-use tool suite for profiling, event
- tracing, and online analysis of HPC applications.
+ """The Score-P measurement infrastructure is a highly scalable and
+ easy-to-use tool suite for profiling, event tracing, and online analysis
+ of HPC applications.
"""
homepage = "http://www.vi-hps.org/projects/score-p"
url = "http://www.vi-hps.org/upload/packages/scorep/scorep-1.2.3.tar.gz"
+ version('2.0.2', '8f00e79e1b5b96e511c5ebecd10b2888',
+ url='http://www.vi-hps.org/upload/packages/scorep/scorep-2.0.2.tar.gz')
version('1.4.2', '3b9a042b13bdd5836452354e6567f71e',
url='http://www.vi-hps.org/upload/packages/scorep/scorep-1.4.2.tar.gz')
version('1.3', '9db6f957b7f51fa01377a9537867a55c',
url='http://www.vi-hps.org/upload/packages/scorep/scorep-1.3.tar.gz')
##########
- # Dependencies for SCORE-P are quite tight. See the homepage for more information.
+ # Dependencies for SCORE-P are quite tight. See the homepage for more
+ # information.
+ # SCOREP 2.0.2
+ depends_on('otf2@2.0', when='@2.0.2')
+ depends_on('opari2@2.0', when='@2.0.2')
+ depends_on('cube@4.3:4.4', when='@2.0.2')
# SCOREP 1.4.2
depends_on('otf2@1.5:1.6', when='@1.4.2')
depends_on('opari2@1.1.4', when='@1.4.2')
@@ -56,17 +62,18 @@ class Scorep(Package):
depends_on("papi")
def install(self, spec, prefix):
- configure = Executable( join_path(self.stage.source_path, 'configure') )
+ configure = Executable(join_path(self.stage.source_path, 'configure'))
with working_dir('spack-build', create=True):
- configure_args = ["--prefix=%s" % prefix,
- "--with-otf2=%s" % spec['otf2'].prefix.bin,
- "--with-opari2=%s" % spec['opari2'].prefix.bin,
- "--with-cube=%s" % spec['cube'].prefix.bin,
- "--with-papi-header=%s" % spec['papi'].prefix.include,
- "--with-papi-lib=%s" % spec['papi'].prefix.lib,
- "--enable-shared",
- "CFLAGS=-fPIC",
- "CXXFLAGS=-fPIC"]
+ configure_args = [
+ "--prefix=%s" % prefix,
+ "--with-otf2=%s" % spec['otf2'].prefix.bin,
+ "--with-opari2=%s" % spec['opari2'].prefix.bin,
+ "--with-cube=%s" % spec['cube'].prefix.bin,
+ "--with-papi-header=%s" % spec['papi'].prefix.include,
+ "--with-papi-lib=%s" % spec['papi'].prefix.lib,
+ "--enable-shared",
+ "CFLAGS=-fPIC",
+ "CXXFLAGS=-fPIC"]
configure(*configure_args)
make()
make("install")
diff --git a/var/spack/repos/builtin/packages/scotch/Makefile.esmumps b/var/spack/repos/builtin/packages/scotch/Makefile.esmumps
deleted file mode 100644
index 4bfc760197..0000000000
--- a/var/spack/repos/builtin/packages/scotch/Makefile.esmumps
+++ /dev/null
@@ -1,5 +0,0 @@
-esmumps : scotch
- (cd esmumps ; $(MAKE) scotch && $(MAKE) install)
-
-ptesmumps : ptscotch
- (cd esmumps ; $(MAKE) ptscotch && $(MAKE) ptinstall)
diff --git a/var/spack/repos/builtin/packages/scotch/package.py b/var/spack/repos/builtin/packages/scotch/package.py
index e82c3acd42..ca8e3e3fa0 100644
--- a/var/spack/repos/builtin/packages/scotch/package.py
+++ b/var/spack/repos/builtin/packages/scotch/package.py
@@ -22,72 +22,60 @@
# License along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
+import os
from spack import *
-import os, re
+
class Scotch(Package):
"""Scotch is a software package for graph and mesh/hypergraph
partitioning, graph clustering, and sparse matrix ordering."""
homepage = "http://www.labri.fr/perso/pelegrin/scotch/"
- url = "http://gforge.inria.fr/frs/download.php/latestfile/298/scotch_6.0.3.tar.gz"
+ url = "http://gforge.inria.fr/frs/download.php/latestfile/298/scotch_6.0.3.tar.gz"
base_url = "http://gforge.inria.fr/frs/download.php/latestfile/298"
list_url = "http://gforge.inria.fr/frs/?group_id=248"
+ version('6.0.4', 'd58b825eb95e1db77efe8c6ff42d329f')
version('6.0.3', '10b0cc0f184de2de99859eafaca83cfc')
version('6.0.0', 'c50d6187462ba801f9a82133ee666e8e')
version('5.1.10b', 'f587201d6cf5cf63527182fbfba70753')
- variant('mpi', default=False, description='Activate the compilation of PT-Scotch')
- variant('compression', default=True, description='Activate the posibility to use compressed files')
- variant('esmumps', default=False, description='Activate the compilation of the lib esmumps needed by mumps')
- variant('shared', default=True, description='Build shared libraries')
-
- depends_on('flex')
- depends_on('bison')
+ variant('mpi', default=True,
+ description='Activate the compilation of parallel libraries')
+ variant('compression', default=True,
+ description='Activate the posibility to use compressed files')
+ variant('esmumps', default=False,
+ description='Activate the compilation of esmumps needed by mumps')
+ variant('shared', default=True,
+ description='Build a shared version of the library')
+ variant('metis', default=True,
+ description='Build metis and parmetis wrapper libraries')
+
+ depends_on('flex@:2.6.1', type='build')
+ depends_on('bison', type='build')
depends_on('mpi', when='+mpi')
depends_on('zlib', when='+compression')
# NOTE: Versions of Scotch up to version 6.0.0 don't include support for
# building with 'esmumps' in their default packages. In order to enable
# support for this feature, we must grab the 'esmumps' enabled archives
- # from the Scotch hosting site. These alternative archives include a strict
+ # from the Scotch hosting site. These alternative archives include a
# superset of the behavior in their default counterparts, so we choose to
# always grab these versions for older Scotch versions for simplicity.
- @when('@:6.0.0')
- def url_for_version(self, version):
- return '%s/scotch_%s_esmumps.tar.gz' % (Scotch.base_url, version)
-
- @when('@6.0.1:')
def url_for_version(self, version):
return super(Scotch, self).url_for_version(version)
- # NOTE: Several of the 'esmumps' enabled Scotch releases up to version 6.0.0
- # have broken build scripts that don't properly build 'esmumps' as a separate
- # target, so we need a patch procedure to remove 'esmumps' from existing targets
- # and to add it as a standalone target.
@when('@:6.0.0')
- def patch(self):
- makefile_path = os.path.join('src', 'Makefile')
- with open(makefile_path, 'r') as makefile:
- esmumps_enabled = any(re.search(r'^esmumps(\s*):(.*)$', line) for line in makefile.readlines())
-
- if not esmumps_enabled:
- mff = FileFilter(makefile_path)
- mff.filter(r'^.*((esmumps)|(ptesmumps)).*(install).*$', '')
-
- makefile_esmumps_path = os.path.join(os.path.dirname(os.path.realpath(__file__)), 'Makefile.esmumps')
- with open(makefile_path, 'a') as makefile:
- makefile.write('\ninclude %s\n' % makefile_esmumps_path)
+ def url_for_version(self, version):
+ return '%s/scotch_%s_esmumps.tar.gz' % (Scotch.base_url, version)
- @when('@6.0.1:')
def patch(self):
- pass
+ self.configure()
- # NOTE: Configuration of Scotch is achieved by writing a 'Makefile.inc' file
- # that contains all of the configuration variables and their desired values
- # for the installation. This function writes this file based on the given
- # installation variants.
+ # NOTE: Configuration of Scotch is achieved by writing a 'Makefile.inc'
+ # file that contains all of the configuration variables and their desired
+ # values for the installation. This function writes this file based on
+ # the given installation variants.
def configure(self):
makefile_inc = []
cflags = [
@@ -96,17 +84,30 @@ class Scotch(Package):
'-DSCOTCH_DETERMINISTIC',
'-DSCOTCH_RENAME',
'-DIDXSIZE64'
- ]
+ ]
- ## Library Build Type ##
+ if self.spec.satisfies('platform=darwin'):
+ cflags.extend([
+ '-Drestrict=__restrict'
+ ])
+ # Library Build Type #
if '+shared' in self.spec:
- makefile_inc.extend([
- 'LIB = .so',
- 'CLIBFLAGS = -shared -fPIC',
- 'RANLIB = echo',
- 'AR = $(CC)',
- 'ARFLAGS = -shared $(LDFLAGS) -o'
+ if self.spec.satisfies('platform=darwin'):
+ makefile_inc.extend([
+ 'LIB = .dylib',
+ 'CLIBFLAGS = -dynamiclib -fPIC',
+ 'RANLIB = echo',
+ 'AR = $(CC)',
+ 'ARFLAGS = -dynamiclib $(LDFLAGS) -Wl,-install_name -Wl,%s/$(notdir $@) -undefined dynamic_lookup -o ' % prefix.lib # noqa
+ ])
+ else:
+ makefile_inc.extend([
+ 'LIB = .so',
+ 'CLIBFLAGS = -shared -fPIC',
+ 'RANLIB = echo',
+ 'AR = $(CC)',
+ 'ARFLAGS = -shared $(LDFLAGS) -o'
])
cflags.append('-fPIC')
else:
@@ -114,23 +115,23 @@ class Scotch(Package):
'LIB = .a',
'CLIBFLAGS = ',
'RANLIB = ranlib',
- 'AR = ar',
+ 'AR = ar',
'ARFLAGS = -ruv '
- ])
+ ])
- ## Compiler-Specific Options ##
+ # Compiler-Specific Options #
if self.compiler.name == 'gcc':
cflags.append('-Drestrict=__restrict')
elif self.compiler.name == 'intel':
cflags.append('-restrict')
+ mpicc_path = self.spec['mpi'].mpicc if '+mpi' in self.spec else 'mpicc'
makefile_inc.append('CCS = $(CC)')
- makefile_inc.append('CCP = %s' %
- (self.spec['mpi'].mpicc if '+mpi' in self.spec else 'mpicc'))
+ makefile_inc.append('CCP = %s' % mpicc_path)
makefile_inc.append('CCD = $(CCS)')
- ## Extra Features ##
+ # Extra Features #
ldflags = []
@@ -139,12 +140,18 @@ class Scotch(Package):
ldflags.append('-L%s -lz' % (self.spec['zlib'].prefix.lib))
cflags.append('-DCOMMON_PTHREAD')
- ldflags.append('-lm -lrt -pthread')
+ if self.spec.satisfies('platform=darwin'):
+ cflags.append('-DCOMMON_PTHREAD_BARRIER')
+ ldflags.append('-lm -pthread')
+ else:
+ ldflags.append('-lm -lrt -pthread')
makefile_inc.append('LDFLAGS = %s' % ' '.join(ldflags))
- ## General Features ##
+ # General Features #
+ flex_path = os.path.join(self.spec['flex'].prefix.bin, 'flex')
+ bison_path = os.path.join(self.spec['bison'].prefix.bin, 'bison')
makefile_inc.extend([
'EXE =',
'OBJ = .o',
@@ -155,30 +162,57 @@ class Scotch(Package):
'MV = mv',
'CP = cp',
'CFLAGS = %s' % ' '.join(cflags),
- 'LEX = %s -Pscotchyy -olex.yy.c' % os.path.join(self.spec['flex'].prefix.bin , 'flex'),
- 'YACC = %s -pscotchyy -y -b y' % os.path.join(self.spec['bison'].prefix.bin, 'bison'),
+ 'LEX = %s -Pscotchyy -olex.yy.c' % flex_path,
+ 'YACC = %s -pscotchyy -y -b y' % bison_path,
'prefix = %s' % self.prefix
- ])
+ ])
with working_dir('src'):
with open('Makefile.inc', 'w') as fh:
fh.write('\n'.join(makefile_inc))
def install(self, spec, prefix):
- self.configure()
-
targets = ['scotch']
if '+mpi' in self.spec:
targets.append('ptscotch')
- if '+esmumps' in self.spec:
- targets.append('esmumps')
- if '+mpi' in self.spec:
- targets.append('ptesmumps')
+ if self.spec.version >= Version('6.0.0'):
+ if '+esmumps' in self.spec:
+ targets.append('esmumps')
+ if '+mpi' in self.spec:
+ targets.append('ptesmumps')
with working_dir('src'):
for target in targets:
- make(target, parallel=(target!='ptesmumps'))
+ # It seams that building ptesmumps in parallel fails, for
+ # version prior to 6.0.0 there is no separated targets force
+ # ptesmumps, this library is built by the ptscotch target. This
+ # should explain the test for the can_make_parallel variable
+ can_make_parallel = \
+ not (target == 'ptesmumps' or
+ (self.spec.version < Version('6.0.0') and
+ target == 'ptscotch'))
+ make(target, parallel=can_make_parallel)
+
+ lib_ext = dso_suffix if '+shared' in self.spec else 'a'
+ # It seams easier to remove metis wrappers from the folder that will be
+ # installed than to tweak the Makefiles
+ if '+metis' not in self.spec:
+ with working_dir('lib'):
+ force_remove('libscotchmetis.{0}'.format(lib_ext))
+ force_remove('libptscotchparmetis.{0}'.format(lib_ext))
+
+ with working_dir('include'):
+ force_remove('metis.h')
+ force_remove('parmetis.h')
+
+ if '~esmumps' in self.spec and self.spec.version < Version('6.0.0'):
+ with working_dir('lib'):
+ force_remove('libesmumps.{0}'.format(lib_ext))
+ force_remove('libptesmumps.{0}'.format(lib_ext))
+
+ with working_dir('include'):
+ force_remove('esmumps.h')
install_tree('bin', prefix.bin)
install_tree('lib', prefix.lib)
diff --git a/var/spack/repos/builtin/packages/scr/package.py b/var/spack/repos/builtin/packages/scr/package.py
index b638688e7b..2b01c60b3e 100644
--- a/var/spack/repos/builtin/packages/scr/package.py
+++ b/var/spack/repos/builtin/packages/scr/package.py
@@ -24,6 +24,7 @@
##############################################################################
from spack import *
+
class Scr(Package):
"""SCR caches checkpoint data in storage on the compute nodes of a
Linux cluster to provide a fast, scalable checkpoint/restart
@@ -34,8 +35,10 @@ class Scr(Package):
depends_on("mpi")
# depends_on("dtcmp")
- version('1.1-7', 'a5930e9ab27d1b7049447c2fd7734ebd', url='http://downloads.sourceforge.net/project/scalablecr/releases/scr-1.1-7.tar.gz')
- version('1.1.8', '6a0f11ad18e27fcfc00a271ff587b06e', url='https://github.com/hpc/scr/releases/download/v1.1.8/scr-1.1.8.tar.gz')
+ version('1.1-7', 'a5930e9ab27d1b7049447c2fd7734ebd',
+ url='http://downloads.sourceforge.net/project/scalablecr/releases/scr-1.1-7.tar.gz')
+ version('1.1.8', '6a0f11ad18e27fcfc00a271ff587b06e',
+ url='https://github.com/hpc/scr/releases/download/v1.1.8/scr-1.1.8.tar.gz')
def install(self, spec, prefix):
configure("--prefix=" + prefix,
diff --git a/var/spack/repos/builtin/packages/screen/package.py b/var/spack/repos/builtin/packages/screen/package.py
new file mode 100644
index 0000000000..7edfb44a4d
--- /dev/null
+++ b/var/spack/repos/builtin/packages/screen/package.py
@@ -0,0 +1,58 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class Screen(Package):
+ """Screen is a full-screen window manager that multiplexes a physical
+ terminal between several processes, typically interactive shells.
+ """
+
+ homepage = "https://www.gnu.org/software/screen/"
+ url = "http://ftp.gnu.org/gnu/screen/screen-4.3.1.tar.gz"
+
+ version('4.3.1', '5bb3b0ff2674e29378c31ad3411170ad')
+ version('4.3.0', 'f76d28eadc4caaf6cdff00685ae6ad46')
+ version('4.2.1', '419a0594e2b25039239af8b90eda7d92')
+ version('4.2.0', 'e5199156a8ac863bbf92495a7638b612')
+ version('4.0.3', '8506fd205028a96c741e4037de6e3c42')
+ version('4.0.2', 'ed68ea9b43d9fba0972cb017a24940a1')
+ version('3.9.15', '0dff6fdc3fbbceabf25a43710fbfe75f')
+ version('3.9.11', '19572f92404995e7b2dea8117204dd67')
+ version('3.9.10', 'bbe271715d1dee038b3cd72d6d2f05fb')
+ version('3.9.9', '9a8b1d6c7438c64b884c4f7d7662afdc')
+ version('3.9.8', '8ddfebe32c2d45410ce89ea9779bb1cf')
+ version('3.9.4', '7de72cd18f7adcdf993ecc6764d0478a')
+ version('3.7.6', '9a353b828d79c3c143109265cae663a7')
+ version('3.7.4', 'c5ab40b068968075e41e25607dfce543')
+ version('3.7.2', '2d6db5de7fb0cf849cc5a6f94203f029')
+ version('3.7.1', '27cdd29318446561ef7c966041cbd2c9')
+
+ depends_on('ncurses')
+
+ def install(self, spec, prefix):
+ configure('--prefix=%s' % prefix)
+ make()
+ make("install")
diff --git a/var/spack/repos/builtin/packages/scripts/package.py b/var/spack/repos/builtin/packages/scripts/package.py
new file mode 100644
index 0000000000..7086cfd6fe
--- /dev/null
+++ b/var/spack/repos/builtin/packages/scripts/package.py
@@ -0,0 +1,45 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class Scripts(Package):
+ """Various X related scripts."""
+
+ homepage = "http://cgit.freedesktop.org/xorg/app/scripts"
+ url = "https://www.x.org/archive/individual/app/scripts-1.0.1.tar.gz"
+
+ version('1.0.1', '1e8294a126a2a7556b21025a8d933e8b')
+
+ depends_on('libx11')
+
+ depends_on('pkg-config@0.9.0:', type='build')
+ depends_on('util-macros', type='build')
+
+ def install(self, spec, prefix):
+ configure('--prefix={0}'.format(prefix))
+
+ make()
+ make('install')
diff --git a/var/spack/repos/builtin/packages/scrnsaverproto/package.py b/var/spack/repos/builtin/packages/scrnsaverproto/package.py
new file mode 100644
index 0000000000..3675fd0eff
--- /dev/null
+++ b/var/spack/repos/builtin/packages/scrnsaverproto/package.py
@@ -0,0 +1,45 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class Scrnsaverproto(Package):
+ """MIT Screen Saver Extension.
+
+ This extension defines a protocol to control screensaver features
+ and also to query screensaver info on specific windows."""
+
+ homepage = "http://cgit.freedesktop.org/xorg/proto/scrnsaverproto"
+ url = "https://www.x.org/archive/individual/proto/scrnsaverproto-1.2.2.tar.gz"
+
+ version('1.2.2', '21704f1bad472d94abd22fea5704bb48')
+
+ depends_on('pkg-config@0.9.0:', type='build')
+ depends_on('util-macros', type='build')
+
+ def install(self, spec, prefix):
+ configure('--prefix={0}'.format(prefix))
+
+ make('install')
diff --git a/var/spack/repos/builtin/packages/sdl2-image/package.py b/var/spack/repos/builtin/packages/sdl2-image/package.py
new file mode 100644
index 0000000000..5df207ac55
--- /dev/null
+++ b/var/spack/repos/builtin/packages/sdl2-image/package.py
@@ -0,0 +1,43 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class Sdl2Image(Package):
+ """SDL is designed to provide the bare bones of creating a graphical
+ program. """
+
+ homepage = "http://sdl.beuc.net/sdl.wiki/SDL_image"
+ url = "https://www.libsdl.org/projects/SDL_image/release/SDL2_image-2.0.1.tar.gz"
+
+ version('2.0.1', 'd94b94555ba022fa249a53a021dc3606')
+
+ depends_on('sdl2')
+
+ def install(self, spec, prefix):
+ configure('--prefix={0}'.format(prefix))
+
+ make()
+ make('install')
diff --git a/var/spack/repos/builtin/packages/sdl2/package.py b/var/spack/repos/builtin/packages/sdl2/package.py
new file mode 100644
index 0000000000..98f8861fed
--- /dev/null
+++ b/var/spack/repos/builtin/packages/sdl2/package.py
@@ -0,0 +1,45 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class Sdl2(Package):
+ """Simple DirectMedia Layer is a cross-platform development library designed
+ to provide low level access to audio, keyboard, mouse, joystick, and
+ graphics hardware via OpenGL and Direct3D."""
+
+ homepage = "https://wiki.libsdl.org/FrontPage"
+ url = "https://libsdl.org/release/SDL2-2.0.5.tar.gz"
+
+ version('2.0.5', 'd4055424d556b4a908aa76fad63abd3c')
+
+ depends_on('cmake', type='build')
+
+ def install(self, spec, prefix):
+ with working_dir('spack-build', create=True):
+ cmake('..', *std_cmake_args)
+
+ make()
+ make('install')
diff --git a/var/spack/repos/builtin/packages/seqtk/package.py b/var/spack/repos/builtin/packages/seqtk/package.py
new file mode 100644
index 0000000000..ca168c176c
--- /dev/null
+++ b/var/spack/repos/builtin/packages/seqtk/package.py
@@ -0,0 +1,43 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class Seqtk(Package):
+ """Toolkit for processing sequences in FASTA/Q formats."""
+
+ homepage = "https://github.com/lh3/seqtk"
+ url = "https://github.com/lh3/seqtk/archive/v1.1.tar.gz"
+
+ version('1.2', '255ffe05bf2f073dc57abcff97f11a37')
+ version('1.1', 'ebf5cc57698a217150c2250494e039a2')
+
+ depends_on('zlib')
+
+ def install(self, spec, prefix):
+ make()
+ mkdirp(prefix.bin)
+ install('seqtk', prefix.bin)
+ set_executable(join_path(prefix.bin, 'seqtk'))
diff --git a/var/spack/repos/builtin/packages/serf/package.py b/var/spack/repos/builtin/packages/serf/package.py
index 3b1d08889c..ebca74a3ab 100644
--- a/var/spack/repos/builtin/packages/serf/package.py
+++ b/var/spack/repos/builtin/packages/serf/package.py
@@ -24,28 +24,32 @@
##############################################################################
from spack import *
+
class Serf(Package):
- """Apache Serf - a high performance C-based HTTP client library built upon the Apache Portable Runtime (APR) library"""
+ """Apache Serf - a high performance C-based HTTP client library
+ built upon the Apache Portable Runtime (APR) library"""
+
homepage = 'https://serf.apache.org/'
url = 'https://archive.apache.org/dist/serf/serf-1.3.8.tar.bz2'
- version('1.3.8', '1d45425ca324336ce2f4ae7d7b4cfbc5567c5446')
+ version('1.3.8', '1d45425ca324336ce2f4ae7d7b4cfbc5567c5446')
depends_on('apr')
depends_on('apr-util')
- depends_on('scons')
+ depends_on('scons', type='build')
depends_on('expat')
depends_on('openssl')
+ depends_on('zlib')
def install(self, spec, prefix):
- scons = which("scons")
-
options = ['PREFIX=%s' % prefix]
options.append('APR=%s' % spec['apr'].prefix)
options.append('APU=%s' % spec['apr-util'].prefix)
options.append('OPENSSL=%s' % spec['openssl'].prefix)
- options.append('LINKFLAGS=-L%s/lib' % spec['expat'].prefix)
- options.append('CPPFLAGS=-I%s/include' % spec['expat'].prefix)
+ options.append('LINKFLAGS=-L%s/lib -L%s/lib' %
+ (spec['expat'].prefix, spec['zlib'].prefix))
+ options.append('CPPFLAGS=-I%s/include -I%s/include' %
+ (spec['expat'].prefix, spec['zlib'].prefix))
scons(*options)
scons('install')
diff --git a/var/spack/repos/builtin/packages/sessreg/package.py b/var/spack/repos/builtin/packages/sessreg/package.py
new file mode 100644
index 0000000000..2ab505cc7a
--- /dev/null
+++ b/var/spack/repos/builtin/packages/sessreg/package.py
@@ -0,0 +1,51 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class Sessreg(Package):
+ """Sessreg is a simple program for managing utmp/wtmp entries for X
+ sessions. It was originally written for use with xdm, but may also be
+ used with other display managers such as gdm or kdm."""
+
+ homepage = "http://cgit.freedesktop.org/xorg/app/sessreg"
+ url = "https://www.x.org/archive/individual/app/sessreg-1.1.0.tar.gz"
+
+ version('1.1.0', '5d7eb499043c7fdd8d53c5ba43660312')
+
+ depends_on('xproto@7.0.25:', type='build')
+ depends_on('pkg-config@0.9.0:', type='build')
+ depends_on('util-macros', type='build')
+
+ def patch(self):
+ kwargs = {'string': True}
+ filter_file('$(CPP) $(DEFS)', '$(CPP) -P $(DEFS)',
+ 'man/Makefile.in', **kwargs)
+
+ def install(self, spec, prefix):
+ configure('--prefix={0}'.format(prefix))
+
+ make()
+ make('install')
diff --git a/var/spack/repos/builtin/packages/setxkbmap/package.py b/var/spack/repos/builtin/packages/setxkbmap/package.py
new file mode 100644
index 0000000000..db365530c9
--- /dev/null
+++ b/var/spack/repos/builtin/packages/setxkbmap/package.py
@@ -0,0 +1,48 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class Setxkbmap(Package):
+ """setxkbmap is an X11 client to change the keymaps in the X server for a
+ specified keyboard to use the layout determined by the options listed
+ on the command line."""
+
+ homepage = "http://cgit.freedesktop.org/xorg/app/setxkbmap"
+ url = "https://www.x.org/archive/individual/app/setxkbmap-1.3.1.tar.gz"
+
+ version('1.3.1', 'fdfc0fc643a50fb0b5fa7546e4d28868')
+
+ depends_on('libxkbfile')
+ depends_on('libx11')
+
+ depends_on('pkg-config@0.9.0:', type='build')
+ depends_on('util-macros', type='build')
+
+ def install(self, spec, prefix):
+ configure('--prefix={0}'.format(prefix))
+
+ make()
+ make('install')
diff --git a/var/spack/repos/builtin/packages/showfont/package.py b/var/spack/repos/builtin/packages/showfont/package.py
new file mode 100644
index 0000000000..232988193b
--- /dev/null
+++ b/var/spack/repos/builtin/packages/showfont/package.py
@@ -0,0 +1,47 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class Showfont(Package):
+ """showfont displays data about a font from an X font server.
+ The information shown includes font information, font properties,
+ character metrics, and character bitmaps."""
+
+ homepage = "http://cgit.freedesktop.org/xorg/app/showfont"
+ url = "https://www.x.org/archive/individual/app/showfont-1.0.5.tar.gz"
+
+ version('1.0.5', 'cea973363df01fb27a87e939600137fd')
+
+ depends_on('libfs')
+
+ depends_on('pkg-config@0.9.0:', type='build')
+ depends_on('util-macros', type='build')
+
+ def install(self, spec, prefix):
+ configure('--prefix={0}'.format(prefix))
+
+ make()
+ make('install')
diff --git a/var/spack/repos/builtin/packages/silo/package.py b/var/spack/repos/builtin/packages/silo/package.py
index 017a09977a..691d53a9f1 100644
--- a/var/spack/repos/builtin/packages/silo/package.py
+++ b/var/spack/repos/builtin/packages/silo/package.py
@@ -38,9 +38,11 @@ class Silo(Package):
variant('fortran', default=True, description='Enable Fortran support')
variant('shared', default=True, description='Build shared libraries')
- variant('silex', default=False, description='Builds Silex, a GUI for viewing Silo files')
+ variant('silex', default=False,
+ description='Builds Silex, a GUI for viewing Silo files')
- depends_on('hdf5')
+ # silo uses the obsolete function H5Pset_fapl_mpiposix:
+ depends_on("hdf5 @:1.8.12")
depends_on('qt', when='+silex')
def install(self, spec, prefix):
@@ -55,8 +57,10 @@ class Silo(Package):
configure(
'--prefix=%s' % prefix,
- '--with-hdf5=%s,%s' % (spec['hdf5'].prefix.include, spec['hdf5'].prefix.lib),
- '--with-zlib=%s,%s' % (spec['zlib'].prefix.include, spec['zlib'].prefix.lib),
+ '--with-hdf5=%s,%s' % (spec['hdf5'].prefix.include,
+ spec['hdf5'].prefix.lib),
+ '--with-zlib=%s,%s' % (spec['zlib'].prefix.include,
+ spec['zlib'].prefix.lib),
'--enable-install-lite-headers',
*config_args)
diff --git a/var/spack/repos/builtin/packages/slepc/install_name_371.patch b/var/spack/repos/builtin/packages/slepc/install_name_371.patch
new file mode 100644
index 0000000000..d02ca88657
--- /dev/null
+++ b/var/spack/repos/builtin/packages/slepc/install_name_371.patch
@@ -0,0 +1,32 @@
+From 7489a3f3d569e2fbf5513ac9dcd769017d9f7eb7 Mon Sep 17 00:00:00 2001
+From: Lisandro Dalcin <dalcinl@gmail.com>
+Date: Thu, 2 Jun 2016 21:57:38 +0300
+Subject: [PATCH] OS X: Fix library path in invocation of install_name_tool
+
+---
+ config/install.py | 3 ++-
+ 1 file changed, 2 insertions(+), 1 deletion(-)
+
+diff --git a/config/install.py b/config/install.py
+index 09acd03..6ce98ae 100755
+--- a/config/install.py
++++ b/config/install.py
+@@ -25,6 +25,7 @@ class Installer:
+
+ def setupDirectories(self):
+ self.installDir = self.destDir
++ self.archDir = os.path.join(self.rootDir, self.arch)
+ self.rootIncludeDir = os.path.join(self.rootDir, 'include')
+ self.archIncludeDir = os.path.join(self.rootDir, self.arch, 'include')
+ self.rootConfDir = os.path.join(self.rootDir, 'lib','slepc','conf')
+@@ -220,7 +221,7 @@ for dir in dirs:
+ if os.path.splitext(dst)[1] == '.dylib' and os.path.isfile('/usr/bin/install_name_tool'):
+ (result, output) = commands.getstatusoutput('otool -D '+src)
+ oldname = output[output.find("\n")+1:]
+- installName = oldname.replace(self.destDir, self.installDir)
++ installName = oldname.replace(self.archDir, self.installDir)
+ (result, output) = commands.getstatusoutput('/usr/bin/install_name_tool -id ' + installName + ' ' + dst)
+ # preserve the original timestamps - so that the .a vs .so time order is preserved
+ shutil.copystat(src,dst)
+--
+2.7.4.1.g5468f9e
diff --git a/var/spack/repos/builtin/packages/slepc/package.py b/var/spack/repos/builtin/packages/slepc/package.py
index c148a579ec..17c512119c 100644
--- a/var/spack/repos/builtin/packages/slepc/package.py
+++ b/var/spack/repos/builtin/packages/slepc/package.py
@@ -28,23 +28,33 @@ from spack import *
class Slepc(Package):
"""
- Scalable Library for Eigenvalue Computations.
+ Scalable Library for Eigenvalue Problem Computations.
"""
homepage = "http://www.grycap.upv.es/slepc"
url = "http://slepc.upv.es/download/download.php?filename=slepc-3.6.2.tar.gz"
+ version('3.7.3', '3ef9bcc645a10c1779d56b3500472ceb66df692e389d635087d30e7c46424df9')
+ version('3.7.1', '670216f263e3074b21e0623c01bc0f562fdc0bffcd7bd42dd5d8edbe73a532c2')
+ version('3.6.3', '384939d009546db37bc05ed81260c8b5ba451093bf891391d32eb7109ccff876')
version('3.6.2', '2ab4311bed26ccf7771818665991b2ea3a9b15f97e29fd13911ab1293e8e65df')
- variant('arpack', default=False, description='Enables Arpack wrappers')
+ variant('arpack', default=True, description='Enables Arpack wrappers')
- depends_on('petsc')
- depends_on('arpack-ng~mpi',when='+arpack^petsc~mpi')
- depends_on('arpack-ng+mpi',when='+arpack^petsc+mpi')
+ # NOTE: make sure PETSc and SLEPc use the same python.
+ depends_on('python@2.6:2.7', type='build')
+ depends_on('petsc@3.7:', when='@3.7.1:')
+ depends_on('petsc@3.6.3:3.6.4', when='@3.6.2:3.6.3')
+ depends_on('arpack-ng~mpi', when='+arpack^petsc~mpi')
+ depends_on('arpack-ng+mpi', when='+arpack^petsc+mpi')
+
+ patch('install_name_371.patch', when='@3.7.1')
def install(self, spec, prefix):
# set SLEPC_DIR for installation
- os.environ['SLEPC_DIR'] = self.stage.source_path
+ # Note that one should set the current (temporary) directory instead
+ # its symlink in spack/stage/ !
+ os.environ['SLEPC_DIR'] = os.getcwd()
options = []
@@ -64,9 +74,10 @@ class Slepc(Package):
configure('--prefix=%s' % prefix, *options)
make('MAKE_NP=%s' % make_jobs, parallel=False)
- #FIXME:
- # make('test')
- make('install')
+ if self.run_tests:
+ make('test', parallel=False)
+
+ make('install', parallel=False)
def setup_dependent_environment(self, spack_env, run_env, dependent_spec):
# set up SLEPC_DIR for everyone using SLEPc package
diff --git a/var/spack/repos/builtin/packages/smproxy/package.py b/var/spack/repos/builtin/packages/smproxy/package.py
new file mode 100644
index 0000000000..5bdde800d4
--- /dev/null
+++ b/var/spack/repos/builtin/packages/smproxy/package.py
@@ -0,0 +1,49 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class Smproxy(Package):
+ """smproxy allows X applications that do not support X11R6 session
+ management to participate in an X11R6 session."""
+
+ homepage = "http://cgit.freedesktop.org/xorg/app/smproxy"
+ url = "https://www.x.org/archive/individual/app/smproxy-1.0.6.tar.gz"
+
+ version('1.0.6', '012c259f5a89e5c636037446d44eb354')
+
+ depends_on('libsm')
+ depends_on('libice')
+ depends_on('libxt')
+ depends_on('libxmu')
+
+ depends_on('pkg-config@0.9.0:', type='build')
+ depends_on('util-macros', type='build')
+
+ def install(self, spec, prefix):
+ configure('--prefix={0}'.format(prefix))
+
+ make()
+ make('install')
diff --git a/var/spack/repos/builtin/packages/snappy/package.py b/var/spack/repos/builtin/packages/snappy/package.py
index 836063f933..1e94980c92 100644
--- a/var/spack/repos/builtin/packages/snappy/package.py
+++ b/var/spack/repos/builtin/packages/snappy/package.py
@@ -22,9 +22,9 @@
# License along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
-import os
from spack import *
+
class Snappy(Package):
"""A fast compressor/decompressor: https://code.google.com/p/snappy"""
diff --git a/var/spack/repos/builtin/packages/sowing/package.py b/var/spack/repos/builtin/packages/sowing/package.py
new file mode 100644
index 0000000000..f7f6297488
--- /dev/null
+++ b/var/spack/repos/builtin/packages/sowing/package.py
@@ -0,0 +1,42 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+
+from spack import *
+
+
+class Sowing(Package):
+ """Sowing generates Fortran interfaces and documentation for PETSc
+ and MPICH.
+ """
+
+ homepage = "http://www.mcs.anl.gov/petsc/index.html"
+ url = "http://ftp.mcs.anl.gov/pub/petsc/externalpackages/sowing-1.1.23-p1.tar.gz"
+
+ version('1.1.23-p1', '65aaf3ae2a4c0f30d532fec291702e16')
+
+ def install(self, spec, prefix):
+ configure('--prefix=%s' % prefix)
+ make('ALL', parallel=False)
+ make("install")
diff --git a/var/spack/repos/builtin/packages/spark/package.py b/var/spack/repos/builtin/packages/spark/package.py
new file mode 100644
index 0000000000..84b63fa87f
--- /dev/null
+++ b/var/spack/repos/builtin/packages/spark/package.py
@@ -0,0 +1,74 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+import shutil
+
+
+class Spark(Package):
+ """Apache Spark is a fast and general engine
+ for large-scale data processing.
+ """
+
+ homepage = "http://spark.apache.org"
+ url = "http://mirrors.ocf.berkeley.edu/apache/spark/spark-2.0.0/spark-2.0.0-bin-without-hadoop.tgz"
+
+ variant('hadoop', default=False,
+ description='Build with Hadoop')
+
+ depends_on('jdk', type=('build', 'run'))
+ depends_on('hadoop', when='+hadoop', type=('build', 'run'))
+
+ version('2.0.0', '8a5307d973da6949a385aefb6ff747bb')
+ version('1.6.2', '304394fbe2899211217f0cd9e9b2b5d9')
+ version('1.6.1', 'fcf4961649f15af1fea78c882e65b001')
+
+ def install(self, spec, prefix):
+
+ def install_dir(dirname):
+ install_tree(dirname, join_path(prefix, dirname))
+
+ install_dir('bin')
+ install_dir('conf')
+ install_dir('jars')
+ install_dir('python')
+ install_dir('R')
+ install_dir('sbin')
+ install_dir('yarn')
+
+ # required for spark to recognize binary distribution
+ shutil.copy('RELEASE', prefix)
+
+ @when('+hadoop')
+ def setup_environment(self, spack_env, run_env):
+
+ env['JAVA_HOME'] = self.spec['jdk'].prefix
+ # spack_env.set('JAVA_HOME', self.spec['jdk'].prefix)
+
+ hadoop_bin_path = join_path(self.spec['hadoop'].prefix.bin, 'hadoop')
+ hadoop_bin = Executable(hadoop_bin_path)
+ hadoop_classpath = hadoop_bin('classpath', return_output=True)
+
+ run_env.set('SPARK_DIST_CLASSPATH', hadoop_classpath)
diff --git a/var/spack/repos/builtin/packages/sparsehash/package.py b/var/spack/repos/builtin/packages/sparsehash/package.py
index a72a5ce105..e5abd42ae6 100644
--- a/var/spack/repos/builtin/packages/sparsehash/package.py
+++ b/var/spack/repos/builtin/packages/sparsehash/package.py
@@ -24,6 +24,7 @@
##############################################################################
from spack import *
+
class Sparsehash(Package):
"""Sparse and dense hash-tables for C++ by Google"""
homepage = "https://github.com/sparsehash/sparsehash"
diff --git a/var/spack/repos/builtin/packages/spdlog/package.py b/var/spack/repos/builtin/packages/spdlog/package.py
new file mode 100644
index 0000000000..f9520219a3
--- /dev/null
+++ b/var/spack/repos/builtin/packages/spdlog/package.py
@@ -0,0 +1,36 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class Spdlog(CMakePackage):
+ """Very fast, header only, C++ logging library"""
+
+ homepage = "https://github.com/gabime/spdlog"
+ url = "https://github.com/gabime/spdlog/archive/v0.9.0.tar.gz"
+
+ version('0.11.0', '08232203f18a6f9ff47e083cc7a141a050805d3b')
+ version('0.10.0', '57b471ef97a23cc29c38b62e00e89a411a87ea7f')
+ version('0.9.0', 'dda741ef8e12d57d91f778d85e95a27d84a82ac4')
diff --git a/var/spack/repos/builtin/packages/spindle/package.py b/var/spack/repos/builtin/packages/spindle/package.py
index bcdc7543a3..213d41e970 100644
--- a/var/spack/repos/builtin/packages/spindle/package.py
+++ b/var/spack/repos/builtin/packages/spindle/package.py
@@ -24,6 +24,7 @@
##############################################################################
from spack import *
+
class Spindle(Package):
"""Spindle improves the library-loading performance of dynamically
linked HPC applications. Without Spindle large MPI jobs can
diff --git a/var/spack/repos/builtin/packages/spot/package.py b/var/spack/repos/builtin/packages/spot/package.py
index 59535dcc4c..096aa24c02 100644
--- a/var/spack/repos/builtin/packages/spot/package.py
+++ b/var/spack/repos/builtin/packages/spot/package.py
@@ -23,16 +23,17 @@
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
from spack import *
-import os
+
class Spot(Package):
- """Spot is a C++11 library for omega-automata manipulation and model checking."""
+ """Spot is a C++11 library for omega-automata manipulation and model
+ checking."""
homepage = "https://spot.lrde.epita.fr/index.html"
url = "http://www.lrde.epita.fr/dload/spot/spot-1.99.3.tar.gz"
version('1.99.3', 'd53adcb2d0fe7c69f45d4e595a58254e')
- #depends_on("gcc@4.8:")
+ # depends_on("gcc@4.8:", type='build')
depends_on("python@3.2:")
def install(self, spec, prefix):
diff --git a/var/spack/repos/builtin/packages/sqlite/package.py b/var/spack/repos/builtin/packages/sqlite/package.py
index 5e7ae4fb8b..c57ee72323 100644
--- a/var/spack/repos/builtin/packages/sqlite/package.py
+++ b/var/spack/repos/builtin/packages/sqlite/package.py
@@ -23,6 +23,8 @@
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
from spack import *
+from spack import architecture
+
class Sqlite(Package):
"""SQLite3 is an SQL database engine in a C library. Programs that
@@ -32,9 +34,17 @@ class Sqlite(Package):
homepage = "www.sqlite.org"
version('3.8.5', '0544ef6d7afd8ca797935ccc2685a9ed',
- url='http://www.sqlite.org/2014/sqlite-autoconf-3080500.tar.gz')
+ url='https://www.sqlite.org/2014/sqlite-autoconf-3080500.tar.gz')
+
+ def get_arch(self):
+ arch = architecture.Arch()
+ arch.platform = architecture.platform()
+ return str(arch.platform.target('default_target'))
def install(self, spec, prefix):
- configure("--prefix=" + prefix)
+ config = ["--prefix=" + prefix]
+ if self.get_arch() == 'ppc64le':
+ config.append("--build=powerpc64le-redhat-linux-gnu")
+ configure(*config)
make()
make("install")
diff --git a/var/spack/repos/builtin/packages/star-ccm-plus/package.py b/var/spack/repos/builtin/packages/star-ccm-plus/package.py
new file mode 100644
index 0000000000..ba1516b62a
--- /dev/null
+++ b/var/spack/repos/builtin/packages/star-ccm-plus/package.py
@@ -0,0 +1,78 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+import glob
+import os
+
+
+class StarCcmPlus(Package):
+ """STAR-CCM+ (Computational Continuum Mechanics) CFD solver."""
+
+ homepage = "http://mdx.plm.automation.siemens.com/star-ccm-plus"
+
+ version('11.06.010_02', 'd349c6ac8293d8e6e7a53533d695588f')
+
+ variant('docs', default=False, description='Install the documentation')
+
+ # Licensing
+ license_required = True
+ license_vars = ['CDLMD_LICENSE_FILE', 'LM_LICENSE_FILE']
+
+ def url_for_version(self, version):
+ return "file://{0}/STAR-CCM+{1}_linux-x86_64.tar.gz".format(
+ os.getcwd(), version)
+
+ def install(self, spec, prefix):
+ # There is a known issue with the LaunchAnywhere application.
+ # Specifically, it cannot handle long prompts or prompts
+ # containing special characters and backslashes. It results in
+ # the following error message:
+ #
+ # An internal LaunchAnywhere application error has occured and this
+ # application cannot proceed. (LAX)
+ #
+ # Stack Trace:
+ # java.lang.IllegalArgumentException: Malformed \uxxxx encoding.
+ # at java.util.Properties.loadConvert(Unknown Source)
+ # at java.util.Properties.load0(Unknown Source)
+ # at java.util.Properties.load(Unknown Source)
+ # at com.zerog.common.java.util.PropertiesUtil.loadProperties(
+ # Unknown Source)
+ # at com.zerog.lax.LAX.<init>(Unknown Source)
+ # at com.zerog.lax.LAX.main(Unknown Source)
+ #
+ # https://www.maplesoft.com/support/faqs/detail.aspx?sid=35272
+ env['PS1'] = '>'
+ env['PROMPT_COMMAND'] = ''
+
+ installer = Executable(glob.glob('*.bin')[0])
+
+ installer(
+ '-i', 'silent',
+ '-DINSTALLDIR={0}'.format(prefix),
+ '-DINSTALLFLEX=false',
+ '-DADDSYSTEMPATH=false',
+ '-DNODOC={0}'.format('false' if '+docs' in spec else 'true')
+ )
diff --git a/var/spack/repos/builtin/packages/stat/package.py b/var/spack/repos/builtin/packages/stat/package.py
index 80d27e149f..c511fcee70 100644
--- a/var/spack/repos/builtin/packages/stat/package.py
+++ b/var/spack/repos/builtin/packages/stat/package.py
@@ -24,24 +24,39 @@
##############################################################################
from spack import *
+
class Stat(Package):
"""Library to create, manipulate, and export graphs Graphlib."""
+
homepage = "http://paradyn.org/STAT/STAT.html"
url = "https://github.com/lee218llnl/stat/archive/v2.0.0.tar.gz"
version('2.2.0', '26bd69dd57a15afdd5d0ebdb0b7fb6fc')
version('2.1.0', 'ece26beaf057aa9134d62adcdda1ba91')
version('2.0.0', 'c7494210b0ba26b577171b92838e1a9b')
+ version('3.0.0', 'a97cb235c266371c4a26329112de48a2',
+ url='https://github.com/LLNL/STAT/releases/download/v3.0.0/STAT-3.0.0.tar.gz')
+ # TODO: dysect requires Dyninst patch for version 3.0.0b
variant('dysect', default=False, description="enable DySectAPI")
+ variant('examples', default=False, description="enable examples")
+ depends_on('autoconf', type='build')
+ depends_on('automake', type='build')
+ depends_on('libtool', type='build')
depends_on('libelf')
depends_on('libdwarf')
- depends_on('dyninst')
- depends_on('graphlib')
- depends_on('graphviz')
+ depends_on('dyninst', when='~dysect')
+ depends_on('dyninst@8.2.1+stat_dysect', when='+dysect')
+ depends_on('graphlib@2.0.0', when='@2.0.0:2.2.0')
+ depends_on('graphlib@3.0.0', when='@3:')
+ depends_on('graphviz', type=('build', 'link', 'run'))
depends_on('launchmon')
depends_on('mrnet')
+ depends_on('python')
+ depends_on('py-pygtk')
+ depends_on('swig')
+ depends_on('mpi', when='+examples')
patch('configure_mpicxx.patch', when='@2.1.0')
@@ -49,16 +64,16 @@ class Stat(Package):
configure_args = [
"--enable-gui",
"--prefix=%s" % prefix,
- "--disable-examples", # Examples require MPI: avoid this dependency.
"--with-launchmon=%s" % spec['launchmon'].prefix,
"--with-mrnet=%s" % spec['mrnet'].prefix,
"--with-graphlib=%s" % spec['graphlib'].prefix,
"--with-stackwalker=%s" % spec['dyninst'].prefix,
"--with-libdwarf=%s" % spec['libdwarf'].prefix
- ]
+ ]
if '+dysect' in spec:
configure_args.append('--enable-dysectapi')
+ if '~examples' in spec:
+ configure_args.append('--disable-examples')
configure(*configure_args)
- make(parallel=False)
make("install")
diff --git a/var/spack/repos/builtin/packages/stream/package.py b/var/spack/repos/builtin/packages/stream/package.py
new file mode 100644
index 0000000000..8b3f32af8a
--- /dev/null
+++ b/var/spack/repos/builtin/packages/stream/package.py
@@ -0,0 +1,62 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class Stream(Package):
+ """The STREAM benchmark is a simple synthetic benchmark program that
+ measures sustainable memory bandwidth (in MB/s) and the corresponding
+ computation rate for simple vector kernels."""
+
+ homepage = "https://www.cs.virginia.edu/stream/ref.html"
+
+ version('5.10', git='https://github.com/jeffhammond/STREAM.git')
+
+ variant('openmp', default=False, description='Build with OpenMP support')
+
+ def patch(self):
+ makefile = FileFilter('Makefile')
+
+ # Use the Spack compiler wrappers
+ makefile.filter('CC = .*', 'CC = cc')
+ makefile.filter('FC = .*', 'FC = f77')
+
+ cflags = '-O2'
+ fflags = '-O2'
+ if '+openmp' in self.spec:
+ cflags += ' ' + self.compiler.openmp_flag
+ fflags += ' ' + self.compiler.openmp_flag
+
+ # Set the appropriate flags for this compiler
+ makefile.filter('CFLAGS = .*', 'CFLAGS = {0}'.format(cflags))
+ makefile.filter('FFLAGS = .*', 'FFLAGS = {0}'.format(fflags))
+
+ def install(self, spec, prefix):
+ make()
+
+ # Manual installation
+ mkdir(prefix.bin)
+ install('stream_c.exe', prefix.bin)
+ install('stream_f.exe', prefix.bin)
diff --git a/var/spack/repos/builtin/packages/sublime-text/package.py b/var/spack/repos/builtin/packages/sublime-text/package.py
new file mode 100644
index 0000000000..81d8690db8
--- /dev/null
+++ b/var/spack/repos/builtin/packages/sublime-text/package.py
@@ -0,0 +1,59 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+from distutils.dir_util import copy_tree
+
+
+class SublimeText(Package):
+ """Sublime Text is a sophisticated text editor for code, markup and
+ prose."""
+
+ homepage = "http://www.sublimetext.com/"
+ url = "https://download.sublimetext.com/sublime_text_3_build_3126_x64.tar.bz2"
+
+ version('3126', 'acc34252b0ea7dff1f581c5db1564dcb')
+ version('2.0.2', '699cd26d7fe0bada29eb1b2cd7b50e4b')
+
+ # Sublime text comes as a pre-compiled binary.
+ # Since we can't link to Spack packages, we'll just have to
+ # add them as runtime dependencies.
+
+ # depends_on('libgobject', type='run')
+ depends_on('glib', type='run')
+ depends_on('libx11', type='run')
+ depends_on('pcre', type='run')
+ depends_on('libffi', type='run')
+ depends_on('libxcb', type='run')
+ depends_on('libxau', type='run')
+
+ def url_for_version(self, version):
+ if version.up_to(1) == '2':
+ return "https://download.sublimetext.com/Sublime%20Text%20{0}%20x64.tar.bz2".format(version)
+ else:
+ return "https://download.sublimetext.com/sublime_text_3_build_{0}_x64.tar.bz2".format(version)
+
+ def install(self, spec, prefix):
+ # Sublime text comes as a pre-compiled binary.
+ copy_tree('.', prefix)
diff --git a/var/spack/repos/builtin/packages/subversion/package.py b/var/spack/repos/builtin/packages/subversion/package.py
index 68ee397857..02b7d96378 100644
--- a/var/spack/repos/builtin/packages/subversion/package.py
+++ b/var/spack/repos/builtin/packages/subversion/package.py
@@ -23,7 +23,7 @@
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
from spack import *
-#import os
+
class Subversion(Package):
"""Apache Subversion - an open source version control system."""
@@ -41,39 +41,43 @@ class Subversion(Package):
# Optional: We need swig if we want the Perl, Python or Ruby
# bindings.
- #depends_on('swig')
- #depends_on('python')
- #depends_on('perl')
- #depends_on('ruby')
+ # depends_on('swig')
+ # depends_on('python')
+ # depends_on('perl')
+ # depends_on('ruby')
+
+ # Installation has race cases.
+ parallel = False
def install(self, spec, prefix):
# configure, build, install:
- # Ref: http://www.linuxfromscratch.org/blfs/view/svn/general/subversion.html
+ # Ref:
+ # http://www.linuxfromscratch.org/blfs/view/svn/general/subversion.html
options = ['--prefix=%s' % prefix]
options.append('--with-apr=%s' % spec['apr'].prefix)
options.append('--with-apr-util=%s' % spec['apr-util'].prefix)
options.append('--with-zlib=%s' % spec['zlib'].prefix)
options.append('--with-sqlite=%s' % spec['sqlite'].prefix)
options.append('--with-serf=%s' % spec['serf'].prefix)
- #options.append('--with-swig=%s' % spec['swig'].prefix)
+ # options.append('--with-swig=%s' % spec['swig'].prefix)
configure(*options)
make()
make('install')
# python bindings
- #make('swig-py',
+ # make('swig-py',
# 'swig-pydir=/usr/lib/python2.7/site-packages/libsvn',
# 'swig_pydir_extra=/usr/lib/python2.7/site-packages/svn')
- #make('install-swig-py',
+ # make('install-swig-py',
# 'swig-pydir=/usr/lib/python2.7/site-packages/libsvn',
# 'swig_pydir_extra=/usr/lib/python2.7/site-packages/svn')
# perl bindings
- #make('swig-pl')
- #make('install-swig-pl')
+ # make('swig-pl')
+ # make('install-swig-pl')
# ruby bindings
- #make('swig-rb')
- #make('isntall-swig-rb')
+ # make('swig-rb')
+ # make('isntall-swig-rb')
diff --git a/var/spack/repos/builtin/packages/suite-sparse/package.py b/var/spack/repos/builtin/packages/suite-sparse/package.py
index dd0dfa3e23..aa22e1ecce 100644
--- a/var/spack/repos/builtin/packages/suite-sparse/package.py
+++ b/var/spack/repos/builtin/packages/suite-sparse/package.py
@@ -32,56 +32,79 @@ class SuiteSparse(Package):
homepage = 'http://faculty.cse.tamu.edu/davis/suitesparse.html'
url = 'http://faculty.cse.tamu.edu/davis/SuiteSparse/SuiteSparse-4.5.1.tar.gz'
+ version('4.5.3', '8ec57324585df3c6483ad7f556afccbd')
version('4.5.1', 'f0ea9aad8d2d1ffec66a5b6bfeff5319')
- # FIXME: (see below)
- # variant('tbb', default=True, description='Build with Intel TBB')
+ variant('tbb', default=True, description='Build with Intel TBB')
+ variant('fpic', default=True, description='Build position independent code (required to link with shared libraries)')
depends_on('blas')
depends_on('lapack')
- depends_on('metis@5.1.0', when='@4.5.1')
- # FIXME:
- # in @4.5.1. TBB support in SPQR seems to be broken as TBB-related linkng flags
- # does not seem to be used, which leads to linking errors on Linux.
- # Try re-enabling in future versions.
- # depends_on('tbb', when='+tbb')
+ depends_on('metis@5.1.0', when='@4.5.1:')
+ # in @4.5.1. TBB support in SPQR seems to be broken as TBB-related linkng
+ # flags does not seem to be used, which leads to linking errors on Linux.
+ depends_on('tbb', when='@4.5.3:+tbb')
+
+ patch('tbb_453.patch', when='@4.5.3')
def install(self, spec, prefix):
- # The build system of SuiteSparse is quite old-fashioned
- # It's basically a plain Makefile which include an header (SuiteSparse_config/SuiteSparse_config.mk)
- # with a lot of convoluted logic in it.
- # Any kind of customization will need to go through filtering of that file
+ # The build system of SuiteSparse is quite old-fashioned.
+ # It's basically a plain Makefile which include an header
+ # (SuiteSparse_config/SuiteSparse_config.mk)with a lot of convoluted
+ # logic in it. Any kind of customization will need to go through
+ # filtering of that file
make_args = ['INSTALL=%s' % prefix]
# inject Spack compiler wrappers
make_args.extend([
- 'AUTOCC=no',
- 'CC=cc',
- 'CXX=c++',
- 'F77=f77',
+ 'AUTOCC=no',
+ 'CC=cc',
+ 'CXX=c++',
+ 'F77=f77',
+ 'CUDA_ROOT =',
+ 'GPU_BLAS_PATH =',
+ 'GPU_CONFIG =',
+ 'CUDA_PATH =',
+ 'CUDART_LIB =',
+ 'CUBLAS_LIB =',
+ 'CUDA_INC_PATH =',
+ 'NV20 =',
+ 'NV30 =',
+ 'NV35 =',
+ 'NVCC = echo',
+ 'NVCCFLAGS =',
])
+ if '+fpic' in spec:
+ make_args.extend(['CFLAGS=-fPIC', 'FFLAGS=-fPIC'])
# use Spack's metis in CHOLMOD/Partition module,
# otherwise internal Metis will be compiled
make_args.extend([
- 'MY_METIS_LIB=-L%s -lmetis' % spec['metis'].prefix.lib,
- 'MY_METIS_INC=%s' % spec['metis'].prefix.include,
+ 'MY_METIS_LIB=-L%s -lmetis' % spec['metis'].prefix.lib,
+ 'MY_METIS_INC=%s' % spec['metis'].prefix.include,
])
# Intel TBB in SuiteSparseQR
- if '+tbb' in spec:
+ if 'tbb' in spec:
make_args.extend([
'SPQR_CONFIG=-DHAVE_TBB',
'TBB=-L%s -ltbb' % spec['tbb'].prefix.lib,
])
- # BLAS arguments require path to libraries
- # FIXME : (blas / lapack always provide libblas and liblapack as aliases)
+ # Make sure Spack's Blas/Lapack is used. Otherwise System's
+ # Blas/Lapack might be picked up.
+ blas = spec['blas'].blas_libs.ld_flags
+ lapack = spec['lapack'].lapack_libs.ld_flags
+ if '@4.5.1' in spec:
+ # adding -lstdc++ is clearly an ugly way to do this, but it follows
+ # with the TCOV path of SparseSuite 4.5.1's Suitesparse_config.mk
+ blas += ' -lstdc++'
+
make_args.extend([
- 'BLAS=-lblas',
- 'LAPACK=-llapack'
+ 'BLAS=%s' % blas,
+ 'LAPACK=%s' % lapack
])
make('install', *make_args)
diff --git a/var/spack/repos/builtin/packages/suite-sparse/tbb_453.patch b/var/spack/repos/builtin/packages/suite-sparse/tbb_453.patch
new file mode 100644
index 0000000000..70241ed017
--- /dev/null
+++ b/var/spack/repos/builtin/packages/suite-sparse/tbb_453.patch
@@ -0,0 +1,13 @@
+diff --git a/SPQR/Lib/Makefile b/SPQR/Lib/Makefile
+index eaade58..d0de852 100644
+--- a/SPQR/Lib/Makefile
++++ b/SPQR/Lib/Makefile
+@@ -13,7 +13,7 @@ ccode: all
+ include ../../SuiteSparse_config/SuiteSparse_config.mk
+
+ # SPQR depends on CHOLMOD, AMD, COLAMD, LAPACK, the BLAS and SuiteSparse_config
+-LDLIBS += -lamd -lcolamd -lcholmod -lsuitesparseconfig $(LAPACK) $(BLAS)
++LDLIBS += -lamd -lcolamd -lcholmod -lsuitesparseconfig $(TBB) $(LAPACK) $(BLAS)
+
+ # compile and install in SuiteSparse/lib
+ library:
diff --git a/var/spack/repos/builtin/packages/sundials/package.py b/var/spack/repos/builtin/packages/sundials/package.py
index ba2340f74c..6ee247b7ea 100644
--- a/var/spack/repos/builtin/packages/sundials/package.py
+++ b/var/spack/repos/builtin/packages/sundials/package.py
@@ -23,17 +23,156 @@
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
from spack import *
+import os
+
class Sundials(Package):
- """SUNDIALS (SUite of Nonlinear and DIfferential/ALgebraic equation Solvers)"""
+ """SUNDIALS (SUite of Nonlinear and DIfferential/ALgebraic equation
+ Solvers)"""
+
homepage = "http://computation.llnl.gov/casc/sundials/"
- url = "http://computation.llnl.gov/casc/sundials/download/code/sundials-2.5.0.tar.gz"
+ url = "http://computation.llnl.gov/projects/sundials-suite-nonlinear-differential-algebraic-equation-solvers/download/sundials-2.6.2.tar.gz"
+
+ version('2.6.2', '3deeb0ede9f514184c6bd83ecab77d95')
- version('2.5.0', 'aba8b56eec600de3109cfb967aa3ba0f')
+ variant('mpi', default=True, description='Enable MPI support')
+ variant('lapack', default=True,
+ description='Build with external BLAS/LAPACK libraries')
+ variant('klu', default=False,
+ description='Build with SuiteSparse KLU libraries')
+ variant('superlu', default=False,
+ description='Build with SuperLU_MT libraries')
+ variant('openmp', default=False, description='Enable OpenMP support')
+ variant('pthread', default=True,
+ description='Enable POSIX threads support')
- depends_on("mpi")
+ depends_on('cmake', type='build')
+ depends_on('mpi', when='+mpi')
+ depends_on('blas', when='+lapack')
+ depends_on('lapack', when='+lapack')
+ depends_on('suite-sparse', when='+klu')
+ depends_on('superlu-mt+openmp', when='+superlu+openmp')
+ depends_on('superlu-mt+pthread', when='+superlu+pthread')
def install(self, spec, prefix):
- configure("--prefix=%s" % prefix)
- make()
- make("install")
+ cmake_args = std_cmake_args[:]
+ cmake_args.extend([
+ '-DBUILD_SHARED_LIBS=ON',
+ '-DCMAKE_C_FLAGS=-fPIC',
+ '-DCMAKE_Fortran_FLAGS=-fPIC',
+ '-DEXAMPLES_ENABLE=ON',
+ '-DEXAMPLES_INSTALL=ON',
+ '-DFCMIX_ENABLE=ON'
+ ])
+
+ # MPI support
+ if '+mpi' in spec:
+ cmake_args.extend([
+ '-DMPI_ENABLE=ON',
+ '-DMPI_MPICC={0}'.format(spec['mpi'].mpicc),
+ '-DMPI_MPIF77={0}'.format(spec['mpi'].mpif77)
+ ])
+ else:
+ cmake_args.append('-DMPI_ENABLE=OFF')
+
+ # Building with LAPACK and BLAS
+ if '+lapack' in spec:
+ cmake_args.extend([
+ '-DLAPACK_ENABLE=ON',
+ '-DLAPACK_LIBRARIES={0}'.format(
+ (spec['lapack'].lapack_libs +
+ spec['blas'].blas_libs).joined(';')
+ )
+ ])
+ else:
+ cmake_args.append('-DLAPACK_ENABLE=OFF')
+
+ # Building with KLU
+ if '+klu' in spec:
+ cmake_args.extend([
+ '-DKLU_ENABLE=ON',
+ '-DKLU_INCLUDE_DIR={0}'.format(
+ spec['suite-sparse'].prefix.include),
+ '-DKLU_LIBRARY_DIR={0}'.format(
+ spec['suite-sparse'].prefix.lib)
+ ])
+ else:
+ cmake_args.append('-DKLU_ENABLE=OFF')
+
+ # Building with SuperLU_MT
+ if '+superlu' in spec:
+ cmake_args.extend([
+ '-DSUPERLUMT_ENABLE=ON',
+ '-DSUPERLUMT_INCLUDE_DIR={0}'.format(
+ spec['superlu-mt'].prefix.include),
+ '-DSUPERLUMT_LIBRARY_DIR={0}'.format(
+ spec['superlu-mt'].prefix.lib)
+ ])
+ if '+openmp' in spec:
+ cmake_args.append('-DSUPERLUMT_THREAD_TYPE=OpenMP')
+ elif '+pthread' in spec:
+ cmake_args.append('-DSUPERLUMT_THREAD_TYPE=Pthread')
+ else:
+ msg = 'You must choose either +openmp or +pthread when '
+ msg += 'building with SuperLU_MT'
+ raise RuntimeError(msg)
+ else:
+ cmake_args.append('-DSUPERLUMT_ENABLE=OFF')
+
+ # OpenMP support
+ if '+openmp' in spec:
+ cmake_args.append('-DOPENMP_ENABLE=ON')
+ else:
+ cmake_args.append('-DOPENMP_ENABLE=OFF')
+
+ # POSIX threads support
+ if '+pthread' in spec:
+ cmake_args.append('-DPTHREAD_ENABLE=ON')
+ else:
+ cmake_args.append('-DPTHREAD_ENABLE=OFF')
+
+ with working_dir('build', create=True):
+ cmake('..', *cmake_args)
+
+ make()
+ make('install')
+
+ install('LICENSE', prefix)
+
+ self.filter_compilers()
+
+ def filter_compilers(self):
+ """Run after install to tell the Makefiles to use
+ the compilers that Spack built the package with.
+
+ If this isn't done, they'll have CC, CPP, and F77 set to
+ Spack's generic cc and f77. We want them to be bound to
+ whatever compiler they were built with."""
+
+ kwargs = {'ignore_absent': True, 'backup': False, 'string': True}
+ dirname = os.path.join(self.prefix, 'examples')
+
+ cc_files = [
+ 'arkode/C_serial/Makefile', 'arkode/C_parallel/Makefile',
+ 'cvode/serial/Makefile', 'cvode/parallel/Makefile',
+ 'cvodes/serial/Makefile', 'cvodes/parallel/Makefile',
+ 'ida/serial/Makefile', 'ida/parallel/Makefile',
+ 'idas/serial/Makefile', 'idas/parallel/Makefile',
+ 'kinsol/serial/Makefile', 'kinsol/parallel/Makefile',
+ 'nvector/serial/Makefile', 'nvector/parallel/Makefile',
+ 'nvector/pthreads/Makefile'
+ ]
+
+ f77_files = [
+ 'arkode/F77_serial/Makefile', 'cvode/fcmix_serial/Makefile',
+ 'ida/fcmix_serial/Makefile', 'ida/fcmix_pthreads/Makefile',
+ 'kinsol/fcmix_serial/Makefile'
+ ]
+
+ for filename in cc_files:
+ filter_file(os.environ['CC'], self.compiler.cc,
+ os.path.join(dirname, filename), **kwargs)
+
+ for filename in f77_files:
+ filter_file(os.environ['F77'], self.compiler.f77,
+ os.path.join(dirname, filename), **kwargs)
diff --git a/var/spack/repos/builtin/packages/superlu-dist/package.py b/var/spack/repos/builtin/packages/superlu-dist/package.py
index 6c06b5497c..f076358e90 100644
--- a/var/spack/repos/builtin/packages/superlu-dist/package.py
+++ b/var/spack/repos/builtin/packages/superlu-dist/package.py
@@ -24,54 +24,67 @@
##############################################################################
from spack import *
import glob
+import os
+
class SuperluDist(Package):
- """A general purpose library for the direct solution of large, sparse, nonsymmetric systems of linear equations on high performance machines."""
+ """A general purpose library for the direct solution of large, sparse,
+ nonsymmetric systems of linear equations on high performance machines."""
homepage = "http://crd-legacy.lbl.gov/~xiaoye/SuperLU/"
- url = "http://crd-legacy.lbl.gov/~xiaoye/SuperLU/superlu_dist_4.1.tar.gz"
+ url = "http://crd-legacy.lbl.gov/~xiaoye/SuperLU/superlu_dist_4.1.tar.gz"
+ version('develop', git='https://github.com/xiaoyeli/superlu_dist', tag='master')
+ version('5.1.1', '12638c631733a27dcbd87110e9f9cb1e')
+ version('5.1.0', '6bb86e630bd4bd8650243aed8fd92eb9')
version('5.0.0', '2b53baf1b0ddbd9fcf724992577f0670')
- # default to version 4.3 since petsc and trilinos are not tested with 5.0.
- version('4.3', 'ee66c84e37b4f7cc557771ccc3dc43ae', preferred=True)
+ version('4.3', 'ee66c84e37b4f7cc557771ccc3dc43ae')
version('4.2', 'ae9fafae161f775fbac6eba11e530a65')
version('4.1', '4edee38cc29f687bd0c8eb361096a455')
version('4.0', 'c0b98b611df227ae050bc1635c6940e0')
+ version('3.3', 'f4805659157d93a962500902c219046b')
+
+ variant('int64', default=False,
+ description="Use 64bit integers")
- depends_on ('mpi')
- depends_on ('blas')
- depends_on ('lapack')
- depends_on ('parmetis')
- depends_on ('metis@5:')
+ depends_on('mpi')
+ depends_on('blas')
+ depends_on('lapack')
+ depends_on('parmetis')
+ depends_on('metis@5:')
def install(self, spec, prefix):
+ lapack_blas = spec['lapack'].lapack_libs + spec['blas'].blas_libs
makefile_inc = []
makefile_inc.extend([
'PLAT = _mac_x',
- 'DSuperLUroot = %s' % self.stage.source_path, #self.stage.path, prefix
+ 'DSuperLUroot = %s' % self.stage.source_path,
'DSUPERLULIB = $(DSuperLUroot)/lib/libsuperlu_dist.a',
'BLASDEF = -DUSE_VENDOR_BLAS',
- 'BLASLIB = -L%s -llapack %s -lblas' % (spec['lapack'].prefix.lib, spec['blas'].prefix.lib), # FIXME: avoid hardcoding blas/lapack lib names
+ 'BLASLIB = %s' % lapack_blas.ld_flags,
'METISLIB = -L%s -lmetis' % spec['metis'].prefix.lib,
'PARMETISLIB = -L%s -lparmetis' % spec['parmetis'].prefix.lib,
'FLIBS =',
- 'LIBS = $(DSUPERLULIB) $(BLASLIB) $(PARMETISLIB) $(METISLIB)',
+ 'LIBS = $(DSUPERLULIB) $(BLASLIB) $(PARMETISLIB) $(METISLIB)', # noqa
'ARCH = ar',
'ARCHFLAGS = cr',
'RANLIB = true',
- 'CC = mpicc', # FIXME avoid hardcoding MPI compiler names
- 'CFLAGS = -fPIC -std=c99 -O2 -I%s -I%s' %(spec['parmetis'].prefix.include, spec['metis'].prefix.include),
+ 'CC = {0}'.format(self.spec['mpi'].mpicc),
+ 'CFLAGS = -fPIC -std=c99 -O2 -I%s -I%s %s' % (
+ spec['parmetis'].prefix.include,
+ spec['metis'].prefix.include,
+ '-D_LONGINT' if '+int64' in spec else ''),
'NOOPTS = -fPIC -std=c99',
- 'FORTRAN = mpif77',
+ 'FORTRAN = {0}'.format(self.spec['mpi'].mpif77),
'F90FLAGS = -O2',
- 'LOADER = mpif77',
+ 'LOADER = {0}'.format(self.spec['mpi'].mpif77),
'LOADOPTS =',
'CDEFS = -DAdd_'
- ])
+ ])
- #with working_dir('src'):
with open('make.inc', 'w') as fh:
fh.write('\n'.join(makefile_inc))
+ mkdirp(os.path.join(self.stage.source_path, 'lib'))
make("lib", parallel=False)
# FIXME:
@@ -83,9 +96,10 @@ class SuperluDist(Package):
mkdirp(headers_location)
mkdirp(prefix.lib)
- headers = glob.glob(join_path(self.stage.source_path, 'SRC','*.h'))
+ headers = glob.glob(join_path(self.stage.source_path, 'SRC', '*.h'))
for h in headers:
- install(h,headers_location)
+ install(h, headers_location)
- superludist_lib = join_path(self.stage.source_path, 'lib/libsuperlu_dist.a')
- install(superludist_lib,self.prefix.lib)
+ superludist_lib = join_path(self.stage.source_path,
+ 'lib/libsuperlu_dist.a')
+ install(superludist_lib, self.prefix.lib)
diff --git a/var/spack/repos/builtin/packages/superlu-mt/package.py b/var/spack/repos/builtin/packages/superlu-mt/package.py
new file mode 100644
index 0000000000..ea94c2d4c5
--- /dev/null
+++ b/var/spack/repos/builtin/packages/superlu-mt/package.py
@@ -0,0 +1,136 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+import glob
+import os
+
+
+class SuperluMt(Package):
+ """SuperLU is a general purpose library for the direct solution of large,
+ sparse, nonsymmetric systems of linear equations on high performance
+ machines. SuperLU_MT is designed for shared memory parallel machines."""
+
+ homepage = "http://crd-legacy.lbl.gov/~xiaoye/SuperLU/#superlu_mt"
+ url = "http://crd-legacy.lbl.gov/~xiaoye/SuperLU/superlu_mt_3.1.tar.gz"
+
+ version('3.1', '06ac62f1b4b7d17123fffa0d0c315e91')
+
+ variant('blas', default=True,
+ description='Build with external BLAS library')
+
+ # Must choose one or the other
+ variant('openmp', default=False, description='Build with OpenMP support')
+ variant('pthread', default=True,
+ description='Build with POSIX threads support')
+
+ # NOTE: must link with a single-threaded BLAS library
+ depends_on('blas', when='+blas')
+
+ # Cannot be built in parallel
+ parallel = False
+
+ def configure(self, spec):
+ # Validate chosen variants
+ if '+openmp' in spec and '+pthread' in spec:
+ msg = 'You cannot choose both +openmp and +pthread'
+ raise RuntimeError(msg)
+ if '~openmp' in spec and '~pthread' in spec:
+ msg = 'You must choose either +openmp or +pthread'
+ raise RuntimeError(msg)
+
+ # List of configuration options
+ config = []
+
+ # The machine (platform) identifier to append to the library names
+ if '+openmp' in spec:
+ # OpenMP
+ config.extend([
+ 'PLAT = _OPENMP',
+ 'TMGLIB = libtmglib.a',
+ 'MPLIB = {0}'.format(self.compiler.openmp_flag),
+ 'CFLAGS = {0}'.format(self.compiler.openmp_flag),
+ 'FFLAGS = {0}'.format(self.compiler.openmp_flag)
+ ])
+ elif '+pthread' in spec:
+ # POSIX threads
+ config.extend([
+ 'PLAT = _PTHREAD',
+ 'TMGLIB = libtmglib$(PLAT).a',
+ 'MPLIB = -lpthread'
+ ])
+
+ # The BLAS library
+ # NOTE: must link with a single-threaded BLAS library
+ if '+blas' in spec:
+ config.extend([
+ 'BLASDEF = -DUSE_VENDOR_BLAS',
+ 'BLASLIB = {0}'.format(spec['blas'].blas_libs.ld_flags)
+ ])
+ else:
+ config.append('BLASLIB = ../lib/libblas$(PLAT).a')
+
+ # Generic options
+ config.extend([
+ # The name of the libraries to be created/linked to
+ 'SUPERLULIB = libsuperlu_mt$(PLAT).a',
+ 'MATHLIB = -lm',
+ # The archiver and the flag(s) to use when building archives
+ 'ARCH = ar',
+ 'ARCHFLAGS = cr',
+ 'RANLIB = {0}'.format('ranlib' if which('ranlib') else 'echo'),
+ # Definitions used by CPP
+ 'PREDEFS = -D_$(PLAT)',
+ # Compilers and flags
+ 'CC = {0}'.format(os.environ['CC']),
+ 'CFLAGS += $(PREDEFS) -D_LONGINT',
+ 'NOOPTS = -O0',
+ 'FORTRAN = {0}'.format(os.environ['FC']),
+ 'LOADER = {0}'.format(os.environ['CC']),
+ # C preprocessor defs for compilation
+ 'CDEFS = -DAdd_'
+ ])
+
+ # Write configuration options to include file
+ with open('make.inc', 'w') as inc:
+ for option in config:
+ inc.write('{0}\n'.format(option))
+
+ def install(self, spec, prefix):
+ # Set up make include file manually
+ self.configure(spec)
+
+ # BLAS needs to be compiled separately if using internal BLAS library
+ if '+blas' not in spec:
+ make('blaslib')
+
+ make()
+
+ # Install manually
+ install_tree('lib', prefix.lib)
+
+ headers = glob.glob(join_path('SRC', '*.h'))
+ mkdir(prefix.include)
+ for h in headers:
+ install(h, prefix.include)
diff --git a/var/spack/repos/builtin/packages/superlu/package.py b/var/spack/repos/builtin/packages/superlu/package.py
new file mode 100644
index 0000000000..829949e504
--- /dev/null
+++ b/var/spack/repos/builtin/packages/superlu/package.py
@@ -0,0 +1,120 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+import glob
+import os
+
+
+class Superlu(Package):
+ """SuperLU is a general purpose library for the direct solution of large,
+ sparse, nonsymmetric systems of linear equations on high performance
+ machines. SuperLU is designed for sequential machines."""
+
+ homepage = "http://crd-legacy.lbl.gov/~xiaoye/SuperLU/#superlu"
+ url = "http://crd-legacy.lbl.gov/~xiaoye/SuperLU/superlu_5.2.1.tar.gz"
+
+ version('5.2.1', '3a1a9bff20cb06b7d97c46d337504447')
+ version('4.3', 'b72c6309f25e9660133007b82621ba7c')
+
+ variant('fpic', default=False,
+ description='Build with position independent code')
+
+ depends_on('cmake', when='@5.2.1:', type='build')
+ depends_on('blas')
+
+ # CMake installation method
+ def install(self, spec, prefix):
+ cmake_args = [
+ '-Denable_blaslib=OFF',
+ '-DBLAS_blas_LIBRARY={0}'.format(spec['blas'].blas_libs.joined())
+ ]
+
+ if '+fpic' in spec:
+ cmake_args.extend([
+ '-DCMAKE_POSITION_INDEPENDENT_CODE=ON'
+ ])
+
+ cmake_args.extend(std_cmake_args)
+
+ with working_dir('spack-build', create=True):
+ cmake('..', *cmake_args)
+ make()
+ make('install')
+
+ # Pre-cmake installation method
+ @when('@4.3')
+ def install(self, spec, prefix):
+ config = []
+
+ # Define make.inc file
+ config.extend([
+ 'PLAT = _x86_64',
+ 'SuperLUroot = %s' % self.stage.source_path,
+ # 'SUPERLULIB = $(SuperLUroot)/lib/libsuperlu$(PLAT).a',
+ 'SUPERLULIB = $(SuperLUroot)/lib/libsuperlu_{0}.a' \
+ .format(self.spec.version),
+ 'BLASDEF = -DUSE_VENDOR_BLAS',
+ 'BLASLIB = {0}'.format(spec['blas'].blas_libs.ld_flags),
+ # or BLASLIB = -L/usr/lib64 -lblas
+ 'TMGLIB = libtmglib.a',
+ 'LIBS = $(SUPERLULIB) $(BLASLIB)',
+ 'ARCH = ar',
+ 'ARCHFLAGS = cr',
+ 'RANLIB = {0}'.format('ranlib' if which('ranlib') else 'echo'),
+ 'CC = {0}'.format(os.environ['CC']),
+ 'FORTRAN = {0}'.format(os.environ['FC']),
+ 'LOADER = {0}'.format(os.environ['CC']),
+ 'CDEFS = -DAdd_'
+ ])
+
+ if '+fpic' in spec:
+ config.extend([
+ # Use these lines instead when pic_flag capability arrives
+ 'CFLAGS = -O3 {0}'.format(self.compiler.pic_flag),
+ 'NOOPTS = {0}'.format(self.compiler.pic_flag),
+ 'FFLAGS = -O2 {0}'.format(self.compiler.pic_flag),
+ 'LOADOPTS = {0}'.format(self.compiler.pic_flag)
+ ])
+ else:
+ config.extend([
+ 'CFLAGS = -O3',
+ 'NOOPTS = ',
+ 'FFLAGS = -O2',
+ 'LOADOPTS = '
+ ])
+
+ # Write configuration options to make.inc file
+ with open('make.inc', 'w') as inc:
+ for option in config:
+ inc.write('{0}\n'.format(option))
+
+ make(parallel=False)
+
+ # Install manually
+ install_tree('lib', prefix.lib)
+ headers = glob.glob(join_path('SRC', '*.h'))
+ mkdir(prefix.include)
+ for h in headers:
+ install(h, prefix.include)
diff --git a/var/spack/repos/builtin/packages/swiftsim/package.py b/var/spack/repos/builtin/packages/swiftsim/package.py
new file mode 100644
index 0000000000..1c424b5ca0
--- /dev/null
+++ b/var/spack/repos/builtin/packages/swiftsim/package.py
@@ -0,0 +1,72 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+import llnl.util.tty as tty
+
+
+class Swiftsim(AutotoolsPackage):
+ """SPH With Inter-dependent Fine-grained Tasking (SWIFT) provides
+ astrophysicists with a state of the art framework to perform
+ particle based simulations.
+ """
+
+ homepage = 'http://icc.dur.ac.uk/swift/'
+ url = 'http://gitlab.cosma.dur.ac.uk/swift/swiftsim/repository/archive.tar.gz?ref=v0.3.0'
+
+ version('0.3.0', git='https://gitlab.cosma.dur.ac.uk/swift/swiftsim.git',
+ commit='254cc1b563b2f88ddcf437b1f71da123bb9db733')
+
+ variant('mpi', default=True,
+ description='Enable distributed memory parallelism')
+
+ # Build dependencies
+ depends_on('autoconf', type='build')
+ depends_on('automake', type='build')
+ depends_on('libtool', type='build')
+ depends_on('m4', type='build')
+ # link-time / run-time dependencies
+ depends_on('mpi', when='+mpi')
+ depends_on('metis')
+ depends_on('hdf5~mpi', when='~mpi')
+ depends_on('hdf5+mpi', when='+mpi')
+
+ def setup_environment(self, spack_env, run_env):
+ # Needed to be able to download from the Durham gitlab repository
+ tty.warn('Setting "GIT_SSL_NO_VERIFY=1"')
+ tty.warn('This is needed to clone SWIFT repository')
+ spack_env.set('GIT_SSL_NO_VERIFY', 1)
+
+ def autoreconf(self, spec, prefix):
+ libtoolize()
+ aclocal()
+ autoconf()
+ autogen = Executable('./autogen.sh')
+ autogen()
+
+ def configure_args(self):
+ return ['--prefix=%s' % self.prefix,
+ '--enable-mpi' if '+mpi' in self.spec else '--disable-mpi',
+ '--with-metis={0}'.format(self.spec['metis'].prefix),
+ '--enable-optimization']
diff --git a/var/spack/repos/builtin/packages/swig/package.py b/var/spack/repos/builtin/packages/swig/package.py
index f552621821..b43246dcee 100644
--- a/var/spack/repos/builtin/packages/swig/package.py
+++ b/var/spack/repos/builtin/packages/swig/package.py
@@ -22,9 +22,9 @@
# License along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
-
from spack import *
+
class Swig(Package):
"""SWIG is an interface compiler that connects programs written in
C and C++ with scripting languages such as Perl, Python, Ruby,
@@ -38,6 +38,7 @@ class Swig(Package):
homepage = "http://www.swig.org"
url = "http://prdownloads.sourceforge.net/swig/swig-3.0.8.tar.gz"
+ version('3.0.10', 'bb4ab8047159469add7d00910e203124')
version('3.0.8', 'c96a1d5ecb13d38604d7e92148c73c97')
version('3.0.2', '62f9b0d010cef36a13a010dc530d0d41')
version('2.0.12', 'c3fb0b2d710cc82ed0154b91e43085a4')
diff --git a/var/spack/repos/builtin/packages/symengine/package.py b/var/spack/repos/builtin/packages/symengine/package.py
new file mode 100644
index 0000000000..f3fc13474c
--- /dev/null
+++ b/var/spack/repos/builtin/packages/symengine/package.py
@@ -0,0 +1,113 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class Symengine(Package):
+ """SymEngine is a fast symbolic manipulation library, written in C++."""
+
+ homepage = "https://github.com/symengine/symengine"
+ url = "https://github.com/symengine/symengine/archive/v0.2.0.tar.gz"
+
+ version('0.2.0', '45401561add36a13c1f0b0c5f8d7422d')
+ version('0.1.0', '41ad7daed61fc5a77c285eb6c7303425')
+ version('develop', git='https://github.com/symengine/symengine.git')
+
+ variant('flint', default=True,
+ description='Compile with Flint integer library')
+ variant('mpc', default=True,
+ description='Compile with MPC library')
+ variant('mpfr', default=True,
+ description='Compile with MPFR library')
+ variant('piranha', default=False,
+ description='Compile with Piranha integer library')
+ variant('thread_safe', default=True,
+ description='Enable thread safety option')
+ variant('openmp', default=False,
+ description='Enable OpenMP support')
+ variant('shared', default=True,
+ description='Enables the build of shared libraries')
+
+ # Build dependencies
+ depends_on('cmake', type='build')
+
+ # Other dependencies
+ depends_on('gmp') # mpir is a drop-in replacement for this
+ depends_on('mpc', when='+mpc') # Could also be built against mpir
+ depends_on('mpfr', when='+mpfr') # Could also be built against mpir
+ depends_on('flint', when='+flint') # Could also be built against mpir
+ depends_on('piranha', when='+piranha~flint') # Could also be built against mpir # NOQA
+
+ def install(self, spec, prefix):
+ options = []
+ options.extend(std_cmake_args)
+
+ # CMAKE_BUILD_TYPE should be Debug | Release
+ for word in options[:]:
+ if word.startswith('-DCMAKE_BUILD_TYPE'):
+ options.remove(word)
+
+ # See https://github.com/symengine/symengine/blob/master/README.md
+ # for build options
+ options.extend([
+ '-DCMAKE_BUILD_TYPE=Release',
+ '-DWITH_SYMENGINE_RCP:BOOL=ON',
+ '-DWITH_SYMENGINE_THREAD_SAFE:BOOL=%s' % (
+ 'ON' if ('+thread_safe' or '+openmp') in spec else 'OFF'),
+ '-DBUILD_TESTS:BOOL=ON',
+ '-DBUILD_BENCHMARKS:BOOL=ON',
+ '-DWITH_MPC:BOOL=%s' % (
+ 'ON' if '+mpc' in spec else 'OFF'),
+ '-DWITH_MPFR:BOOL=%s' % (
+ 'ON' if '+mpfr' in spec else 'OFF'),
+ '-DINTEGER_CLASS:STRING=gmp',
+ '-DWITH_OPENMP:BOOL=%s' % (
+ 'ON' if '+openmp' in spec else 'OFF'),
+ '-DBUILD_SHARED_LIBS:BOOL=%s' % (
+ 'ON' if '+shared' in spec else 'OFF'),
+ ])
+
+ if '+flint' in spec:
+ options.extend([
+ '-DWITH_FLINT:BOOL=ON',
+ '-DINTEGER_CLASS:STRING=flint'
+ ])
+ elif '+piranha' in spec:
+ options.extend([
+ '-DWITH_PIRANHA:BOOL=ON',
+ '-DINTEGER_CLASS:STRING=piranha'
+ ])
+ else:
+ options.extend([
+ '-DINTEGER_CLASS:STRING=gmp'
+ ])
+
+ with working_dir('spack-build', create=True):
+ cmake('..', *options)
+
+ make()
+ make('install')
+ if self.run_tests:
+ ctest()
diff --git a/var/spack/repos/builtin/packages/sympol/lrs_mp_close.patch b/var/spack/repos/builtin/packages/sympol/lrs_mp_close.patch
new file mode 100644
index 0000000000..503a61ff65
--- /dev/null
+++ b/var/spack/repos/builtin/packages/sympol/lrs_mp_close.patch
@@ -0,0 +1,10 @@
+--- old/sympol/raycomputationlrs.cpp
++++ new/sympol/raycomputationlrs.cpp
+@@ -66,7 +66,6 @@
+ return true;
+ }
+
+- lrs_mp_close();
+
+ if (RayComputationLRS::ms_fIn != NULL) {
+ if (std::fclose(RayComputationLRS::ms_fIn)) {
diff --git a/var/spack/repos/builtin/packages/sympol/package.py b/var/spack/repos/builtin/packages/sympol/package.py
new file mode 100644
index 0000000000..7ce4995f03
--- /dev/null
+++ b/var/spack/repos/builtin/packages/sympol/package.py
@@ -0,0 +1,48 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+
+from spack import *
+
+
+class Sympol(Package):
+ """SymPol is a C++ tool to work with symmetric polyhedra"""
+ homepage = "http://www.math.uni-rostock.de/~rehn/software/sympol.html"
+ url = "http://www.math.uni-rostock.de/~rehn/software/sympol-0.1.8.tar.gz"
+
+ version('0.1.8', '7cba1997f8532c754cb7259bf70caacb')
+
+ depends_on("cmake", type='build')
+
+ depends_on("bliss")
+ depends_on("boost")
+ depends_on("gmp")
+ depends_on("lrslib")
+
+ patch("lrs_mp_close.patch")
+
+ def install(self, spec, prefix):
+ cmake(".", *std_cmake_args)
+ make()
+ make("install")
diff --git a/var/spack/repos/builtin/packages/szip/package.py b/var/spack/repos/builtin/packages/szip/package.py
index fd3a2a209d..91934f7d03 100644
--- a/var/spack/repos/builtin/packages/szip/package.py
+++ b/var/spack/repos/builtin/packages/szip/package.py
@@ -24,22 +24,22 @@
##############################################################################
from spack import *
-class Szip(Package):
- """Szip is an implementation of the extended-Rice lossless compression algorithm.
- It provides lossless compression of scientific data, and is provided with HDF
- software products."""
+
+class Szip(AutotoolsPackage):
+ """Szip is an implementation of the extended-Rice lossless
+ compression algorithm.
+
+ It provides lossless compression of scientific data, and is
+ provided with HDF software products.
+ """
homepage = "https://www.hdfgroup.org/doc_resource/SZIP/"
- url = "http://www.hdfgroup.org/ftp/lib-external/szip/2.1/src/szip-2.1.tar.gz"
+ url = "http://www.hdfgroup.org/ftp/lib-external/szip/2.1/src/szip-2.1.tar.gz"
version('2.1', '902f831bcefb69c6b635374424acbead')
- def install(self, spec, prefix):
- configure('--prefix=%s' % prefix,
- '--enable-production',
- '--enable-shared',
- '--enable-static',
- '--enable-encoding')
-
- make()
- make("install")
+ def configure_args(self):
+ return ['--enable-production',
+ '--enable-shared',
+ '--enable-static',
+ '--enable-encoding']
diff --git a/var/spack/repos/builtin/packages/tar/gnutar-configure-xattrs.patch b/var/spack/repos/builtin/packages/tar/gnutar-configure-xattrs.patch
new file mode 100644
index 0000000000..e5c183b720
--- /dev/null
+++ b/var/spack/repos/builtin/packages/tar/gnutar-configure-xattrs.patch
@@ -0,0 +1,482 @@
+diff --git a/Makefile.in b/Makefile.in
+index f9f1d1d..b403e46 100644
+--- a/Makefile.in
++++ b/Makefile.in
+@@ -1,4 +1,4 @@
+-# Makefile.in generated by automake 1.14 from Makefile.am.
++# Makefile.in generated by automake 1.14.1 from Makefile.am.
+ # @configure_input@
+
+ # Copyright (C) 1994-2013 Free Software Foundation, Inc.
+@@ -1583,9 +1583,10 @@ distcheck: dist
+ && dc_destdir="$${TMPDIR-/tmp}/am-dc-$$$$/" \
+ && am__cwd=`pwd` \
+ && $(am__cd) $(distdir)/_build \
+- && ../configure --srcdir=.. --prefix="$$dc_install_base" \
++ && ../configure \
+ $(AM_DISTCHECK_CONFIGURE_FLAGS) \
+ $(DISTCHECK_CONFIGURE_FLAGS) \
++ --srcdir=.. --prefix="$$dc_install_base" \
+ && $(MAKE) $(AM_MAKEFLAGS) \
+ && $(MAKE) $(AM_MAKEFLAGS) dvi \
+ && $(MAKE) $(AM_MAKEFLAGS) check \
+diff --git a/aclocal.m4 b/aclocal.m4
+index 0e09589..804c0b3 100644
+--- a/aclocal.m4
++++ b/aclocal.m4
+@@ -1,4 +1,4 @@
+-# generated automatically by aclocal 1.14 -*- Autoconf -*-
++# generated automatically by aclocal 1.14.1 -*- Autoconf -*-
+
+ # Copyright (C) 1996-2013 Free Software Foundation, Inc.
+
+@@ -35,7 +35,7 @@ AC_DEFUN([AM_AUTOMAKE_VERSION],
+ [am__api_version='1.14'
+ dnl Some users find AM_AUTOMAKE_VERSION and mistake it for a way to
+ dnl require some minimum version. Point them to the right macro.
+-m4_if([$1], [1.14], [],
++m4_if([$1], [1.14.1], [],
+ [AC_FATAL([Do not call $0, use AM_INIT_AUTOMAKE([$1]).])])dnl
+ ])
+
+@@ -51,7 +51,7 @@ m4_define([_AM_AUTOCONF_VERSION], [])
+ # Call AM_AUTOMAKE_VERSION and AM_AUTOMAKE_VERSION so they can be traced.
+ # This function is AC_REQUIREd by AM_INIT_AUTOMAKE.
+ AC_DEFUN([AM_SET_CURRENT_AUTOMAKE_VERSION],
+-[AM_AUTOMAKE_VERSION([1.14])dnl
++[AM_AUTOMAKE_VERSION([1.14.1])dnl
+ m4_ifndef([AC_AUTOCONF_VERSION],
+ [m4_copy([m4_PACKAGE_VERSION], [AC_AUTOCONF_VERSION])])dnl
+ _AM_AUTOCONF_VERSION(m4_defn([AC_AUTOCONF_VERSION]))])
+diff --git a/config.h.in b/config.h.in
+index 6646ea5..8571ef4 100644
+--- a/config.h.in
++++ b/config.h.in
+@@ -617,7 +617,7 @@
+ */
+ #undef HAVE_ALLOCA_H
+
+-/* define to 1 if we have <attr/xattr.h> header */
++/* Define to 1 if you have the <attr/xattr.h> header file. */
+ #undef HAVE_ATTR_XATTR_H
+
+ /* Define to 1 if you have the <bp-sym.h> header file. */
+@@ -924,21 +924,12 @@
+ /* Define to 1 if you have the <features.h> header file. */
+ #undef HAVE_FEATURES_H
+
+-/* Define to 1 if you have the `fgetxattr' function. */
+-#undef HAVE_FGETXATTR
+-
+-/* Define to 1 if you have the `flistxattr' function. */
+-#undef HAVE_FLISTXATTR
+-
+ /* Define to 1 if you have the `flockfile' function. */
+ #undef HAVE_FLOCKFILE
+
+ /* Define to 1 if fseeko (and presumably ftello) exists and is declared. */
+ #undef HAVE_FSEEKO
+
+-/* Define to 1 if you have the `fsetxattr' function. */
+-#undef HAVE_FSETXATTR
+-
+ /* Define to 1 if you have the `fstatat' function. */
+ #undef HAVE_FSTATAT
+
+@@ -990,9 +981,6 @@
+ /* Define to 1 if you have the `gettimeofday' function. */
+ #undef HAVE_GETTIMEOFDAY
+
+-/* Define to 1 if you have the `getxattr' function. */
+-#undef HAVE_GETXATTR
+-
+ /* Define to 1 if you have the `grantpt' function. */
+ #undef HAVE_GRANTPT
+
+@@ -1045,12 +1033,6 @@
+ /* Define to 1 if you have the `lchown' function. */
+ #undef HAVE_LCHOWN
+
+-/* Define to 1 if you have the `lgetxattr' function. */
+-#undef HAVE_LGETXATTR
+-
+-/* Define to 1 if you have the `attr' library (-lattr). */
+-#undef HAVE_LIBATTR
+-
+ /* Define to 1 if you have the <libgen.h> header file. */
+ #undef HAVE_LIBGEN_H
+
+@@ -1069,12 +1051,6 @@
+ /* Define to 1 if you have the <linux/fd.h> header file. */
+ #undef HAVE_LINUX_FD_H
+
+-/* Define to 1 if you have the `listxattr' function. */
+-#undef HAVE_LISTXATTR
+-
+-/* Define to 1 if you have the `llistxattr' function. */
+-#undef HAVE_LLISTXATTR
+-
+ /* Define to 1 if you have the <locale.h> header file. */
+ #undef HAVE_LOCALE_H
+
+@@ -1087,9 +1063,6 @@
+ /* Define to 1 if the system has the type 'long long int'. */
+ #undef HAVE_LONG_LONG_INT
+
+-/* Define to 1 if you have the `lsetxattr' function. */
+-#undef HAVE_LSETXATTR
+-
+ /* Define to 1 if you have the `lstat' function. */
+ #undef HAVE_LSTAT
+
+@@ -1867,9 +1840,6 @@
+ /* Define to 1 if you have the `setlocale' function. */
+ #undef HAVE_SETLOCALE
+
+-/* Define to 1 if you have the `setxattr' function. */
+-#undef HAVE_SETXATTR
+-
+ /* Define to 1 if you have the <sgtty.h> header file. */
+ #undef HAVE_SGTTY_H
+
+@@ -2074,7 +2044,7 @@
+ /* Define to 1 if you have the <sys/wait.h> header file. */
+ #undef HAVE_SYS_WAIT_H
+
+-/* define to 1 if we have <sys/xattr.h> header */
++/* Define to 1 if you have the <sys/xattr.h> header file. */
+ #undef HAVE_SYS_XATTR_H
+
+ /* Define if struct tm has the tm_gmtoff member. */
+diff --git a/configure b/configure
+index cfdd721..8cf6e91 100755
+--- a/configure
++++ b/configure
+@@ -663,8 +663,6 @@ RSH
+ LIBOBJS
+ TAR_COND_GRANTPT_FALSE
+ TAR_COND_GRANTPT_TRUE
+-TAR_LIB_ATTR_FALSE
+-TAR_LIB_ATTR_TRUE
+ TAR_COND_XATTR_H_FALSE
+ TAR_COND_XATTR_H_TRUE
+ GNULIB_TEST_WARN_CFLAGS
+@@ -34994,41 +34992,6 @@ else
+ TAR_COND_XATTR_H_FALSE=
+ fi
+
+- if false; then
+- TAR_LIB_ATTR_TRUE=
+- TAR_LIB_ATTR_FALSE='#'
+-else
+- TAR_LIB_ATTR_TRUE='#'
+- TAR_LIB_ATTR_FALSE=
+-fi
+-
+- if test "$ac_cv_header_sys_xattr_h" = yes; then
+- for ac_func in getxattr fgetxattr lgetxattr \
+- setxattr fsetxattr lsetxattr \
+- listxattr flistxattr llistxattr
+-do :
+- as_ac_var=`$as_echo "ac_cv_func_$ac_func" | $as_tr_sh`
+-ac_fn_c_check_func "$LINENO" "$ac_func" "$as_ac_var"
+-if eval test \"x\$"$as_ac_var"\" = x"yes"; then :
+- cat >>confdefs.h <<_ACEOF
+-#define `$as_echo "HAVE_$ac_func" | $as_tr_cpp` 1
+-_ACEOF
+- # only when functions are present
+-
+-$as_echo "#define HAVE_SYS_XATTR_H 1" >>confdefs.h
+-
+- if test "$with_xattrs" != no; then
+-
+-$as_echo "#define HAVE_XATTRS /**/" >>confdefs.h
+-
+- fi
+-
+-fi
+-done
+-
+- fi
+-
+- # If <sys/xattr.h> is not found, then check for <attr/xattr.h>
+ if test "$ac_cv_header_sys_xattr_h" != yes; then
+ for ac_header in attr/xattr.h
+ do :
+@@ -35050,13 +35013,20 @@ else
+ TAR_COND_XATTR_H_FALSE=
+ fi
+
+- { $as_echo "$as_me:${as_lineno-$LINENO}: checking for fgetxattr in -lattr" >&5
+-$as_echo_n "checking for fgetxattr in -lattr... " >&6; }
+-if ${ac_cv_lib_attr_fgetxattr+:} false; then :
++ fi
++
++ if test "$with_xattrs" != no; then
++ for i in getxattr fgetxattr lgetxattr \
++ setxattr fsetxattr lsetxattr \
++ listxattr flistxattr llistxattr
++ do
++ as_ac_Search=`$as_echo "ac_cv_search_$i" | $as_tr_sh`
++{ $as_echo "$as_me:${as_lineno-$LINENO}: checking for library containing $i" >&5
++$as_echo_n "checking for library containing $i... " >&6; }
++if eval \${$as_ac_Search+:} false; then :
+ $as_echo_n "(cached) " >&6
+ else
+- ac_check_lib_save_LIBS=$LIBS
+-LIBS="-lattr $LIBS"
++ ac_func_search_save_LIBS=$LIBS
+ cat confdefs.h - <<_ACEOF >conftest.$ac_ext
+ /* end confdefs.h. */
+
+@@ -35066,67 +35036,56 @@ cat confdefs.h - <<_ACEOF >conftest.$ac_ext
+ #ifdef __cplusplus
+ extern "C"
+ #endif
+-char fgetxattr ();
++char $i ();
+ int
+ main ()
+ {
+-return fgetxattr ();
++return $i ();
+ ;
+ return 0;
+ }
+ _ACEOF
+-if ac_fn_c_try_link "$LINENO"; then :
+- ac_cv_lib_attr_fgetxattr=yes
+-else
+- ac_cv_lib_attr_fgetxattr=no
++for ac_lib in '' attr; do
++ if test -z "$ac_lib"; then
++ ac_res="none required"
++ else
++ ac_res=-l$ac_lib
++ LIBS="-l$ac_lib $ac_func_search_save_LIBS"
++ fi
++ if ac_fn_c_try_link "$LINENO"; then :
++ eval "$as_ac_Search=\$ac_res"
+ fi
+ rm -f core conftest.err conftest.$ac_objext \
+- conftest$ac_exeext conftest.$ac_ext
+-LIBS=$ac_check_lib_save_LIBS
+-fi
+-{ $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_lib_attr_fgetxattr" >&5
+-$as_echo "$ac_cv_lib_attr_fgetxattr" >&6; }
+-if test "x$ac_cv_lib_attr_fgetxattr" = xyes; then :
+- cat >>confdefs.h <<_ACEOF
+-#define HAVE_LIBATTR 1
+-_ACEOF
+-
+- LIBS="-lattr $LIBS"
+-
++ conftest$ac_exeext
++ if eval \${$as_ac_Search+:} false; then :
++ break
+ fi
++done
++if eval \${$as_ac_Search+:} false; then :
+
+- if test "$ac_cv_lib_attr_fgetxattr" = yes; then
+- TAR_LIB_ATTR_TRUE=
+- TAR_LIB_ATTR_FALSE='#'
+ else
+- TAR_LIB_ATTR_TRUE='#'
+- TAR_LIB_ATTR_FALSE=
++ eval "$as_ac_Search=no"
++fi
++rm conftest.$ac_ext
++LIBS=$ac_func_search_save_LIBS
+ fi
++eval ac_res=\$$as_ac_Search
++ { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_res" >&5
++$as_echo "$ac_res" >&6; }
++eval ac_res=\$$as_ac_Search
++if test "$ac_res" != no; then :
++ test "$ac_res" = "none required" || LIBS="$ac_res $LIBS"
+
+- if test "$ac_cv_header_attr_xattr_h" = yes; then
+- for ac_func in getxattr fgetxattr lgetxattr \
+- setxattr fsetxattr lsetxattr \
+- listxattr flistxattr llistxattr
+-do :
+- as_ac_var=`$as_echo "ac_cv_func_$ac_func" | $as_tr_sh`
+-ac_fn_c_check_func "$LINENO" "$ac_func" "$as_ac_var"
+-if eval test \"x\$"$as_ac_var"\" = x"yes"; then :
+- cat >>confdefs.h <<_ACEOF
+-#define `$as_echo "HAVE_$ac_func" | $as_tr_cpp` 1
+-_ACEOF
+- # only when functions are present
++fi
+
+-$as_echo "#define HAVE_ATTR_XATTR_H 1" >>confdefs.h
++ eval found=\$ac_cv_search_$i
++ test "$found" = "no" && break
++ done
+
+- if test "$with_xattrs" != no; then
++ if test "$found" != no; then
+
+ $as_echo "#define HAVE_XATTRS /**/" >>confdefs.h
+
+- fi
+-
+-fi
+-done
+-
+ fi
+ fi
+
+@@ -38187,18 +38146,10 @@ if test -z "${TAR_COND_XATTR_H_TRUE}" && test -z "${TAR_COND_XATTR_H_FALSE}"; th
+ as_fn_error $? "conditional \"TAR_COND_XATTR_H\" was never defined.
+ Usually this means the macro was only invoked conditionally." "$LINENO" 5
+ fi
+-if test -z "${TAR_LIB_ATTR_TRUE}" && test -z "${TAR_LIB_ATTR_FALSE}"; then
+- as_fn_error $? "conditional \"TAR_LIB_ATTR\" was never defined.
+-Usually this means the macro was only invoked conditionally." "$LINENO" 5
+-fi
+ if test -z "${TAR_COND_XATTR_H_TRUE}" && test -z "${TAR_COND_XATTR_H_FALSE}"; then
+ as_fn_error $? "conditional \"TAR_COND_XATTR_H\" was never defined.
+ Usually this means the macro was only invoked conditionally." "$LINENO" 5
+ fi
+-if test -z "${TAR_LIB_ATTR_TRUE}" && test -z "${TAR_LIB_ATTR_FALSE}"; then
+- as_fn_error $? "conditional \"TAR_LIB_ATTR\" was never defined.
+-Usually this means the macro was only invoked conditionally." "$LINENO" 5
+-fi
+ if test -z "${TAR_COND_GRANTPT_TRUE}" && test -z "${TAR_COND_GRANTPT_FALSE}"; then
+ as_fn_error $? "conditional \"TAR_COND_GRANTPT\" was never defined.
+ Usually this means the macro was only invoked conditionally." "$LINENO" 5
+diff --git a/doc/Makefile.in b/doc/Makefile.in
+index ca44f1a..42a06b3 100644
+--- a/doc/Makefile.in
++++ b/doc/Makefile.in
+@@ -1,4 +1,4 @@
+-# Makefile.in generated by automake 1.14 from Makefile.am.
++# Makefile.in generated by automake 1.14.1 from Makefile.am.
+ # @configure_input@
+
+ # Copyright (C) 1994-2013 Free Software Foundation, Inc.
+diff --git a/gnu/Makefile.in b/gnu/Makefile.in
+index 03eed58..d908a03 100644
+--- a/gnu/Makefile.in
++++ b/gnu/Makefile.in
+@@ -1,4 +1,4 @@
+-# Makefile.in generated by automake 1.14 from Makefile.am.
++# Makefile.in generated by automake 1.14.1 from Makefile.am.
+ # @configure_input@
+
+ # Copyright (C) 1994-2013 Free Software Foundation, Inc.
+diff --git a/lib/Makefile.in b/lib/Makefile.in
+index 41a9aca..1254b8f 100644
+--- a/lib/Makefile.in
++++ b/lib/Makefile.in
+@@ -1,4 +1,4 @@
+-# Makefile.in generated by automake 1.14 from Makefile.am.
++# Makefile.in generated by automake 1.14.1 from Makefile.am.
+ # @configure_input@
+
+ # Copyright (C) 1994-2013 Free Software Foundation, Inc.
+diff --git a/rmt/Makefile.in b/rmt/Makefile.in
+index c3f2509..0f3dca4 100644
+--- a/rmt/Makefile.in
++++ b/rmt/Makefile.in
+@@ -1,4 +1,4 @@
+-# Makefile.in generated by automake 1.14 from Makefile.am.
++# Makefile.in generated by automake 1.14.1 from Makefile.am.
+ # @configure_input@
+
+ # Copyright (C) 1994-2013 Free Software Foundation, Inc.
+diff --git a/scripts/Makefile.in b/scripts/Makefile.in
+index 57c0d0d..bf344ed 100644
+--- a/scripts/Makefile.in
++++ b/scripts/Makefile.in
+@@ -1,4 +1,4 @@
+-# Makefile.in generated by automake 1.14 from Makefile.am.
++# Makefile.in generated by automake 1.14.1 from Makefile.am.
+ # @configure_input@
+
+ # Copyright (C) 1994-2013 Free Software Foundation, Inc.
+diff --git a/src/Makefile.in b/src/Makefile.in
+index 6f9a592..fc6fc87 100644
+--- a/src/Makefile.in
++++ b/src/Makefile.in
+@@ -1,4 +1,4 @@
+-# Makefile.in generated by automake 1.14 from Makefile.am.
++# Makefile.in generated by automake 1.14.1 from Makefile.am.
+ # @configure_input@
+
+ # Copyright (C) 1994-2013 Free Software Foundation, Inc.
+@@ -99,7 +99,6 @@ POST_UNINSTALL = :
+ build_triplet = @build@
+ host_triplet = @host@
+ bin_PROGRAMS = tar$(EXEEXT)
+-@TAR_LIB_ATTR_TRUE@am__append_1 = -lattr
+ subdir = src
+ DIST_COMMON = $(srcdir)/Makefile.in $(srcdir)/Makefile.am \
+ $(top_srcdir)/build-aux/depcomp $(noinst_HEADERS)
+@@ -254,7 +253,7 @@ am__DEPENDENCIES_2 = ../lib/libtar.a ../gnu/libgnu.a \
+ $(am__DEPENDENCIES_1) $(am__DEPENDENCIES_1)
+ tar_DEPENDENCIES = $(am__DEPENDENCIES_1) $(am__DEPENDENCIES_2) \
+ $(am__DEPENDENCIES_1) $(am__DEPENDENCIES_1) \
+- $(am__DEPENDENCIES_1) $(am__DEPENDENCIES_1)
++ $(am__DEPENDENCIES_1)
+ AM_V_P = $(am__v_P_@AM_V@)
+ am__v_P_ = $(am__v_P_@AM_DEFAULT_V@)
+ am__v_P_0 = false
+@@ -1225,8 +1224,7 @@ tar_SOURCES = \
+ AM_CPPFLAGS = -I$(top_srcdir)/gnu -I../ -I../gnu -I$(top_srcdir)/lib -I../lib
+ AM_CFLAGS = $(WARN_CFLAGS) $(WERROR_CFLAGS)
+ LDADD = ../lib/libtar.a ../gnu/libgnu.a $(LIBINTL) $(LIBICONV)
+-tar_LDADD = $(LIBS) $(LDADD) $(LIB_CLOCK_GETTIME) $(LIB_EACCESS) \
+- $(LIB_SELINUX) $(am__append_1)
++tar_LDADD = $(LIBS) $(LDADD) $(LIB_CLOCK_GETTIME) $(LIB_EACCESS) $(LIB_SELINUX)
+ all: all-am
+
+ .SUFFIXES:
+diff --git a/tests/Makefile.in b/tests/Makefile.in
+index 6807509..50d7689 100644
+--- a/tests/Makefile.in
++++ b/tests/Makefile.in
+@@ -1,4 +1,4 @@
+-# Makefile.in generated by automake 1.14 from Makefile.am.
++# Makefile.in generated by automake 1.14.1 from Makefile.am.
+ # @configure_input@
+
+ # Copyright (C) 1994-2013 Free Software Foundation, Inc.
+diff --git a/lib/xattr-at.c b/lib/xattr-at.c
+index 443ccae..009bde5 100644
+--- a/lib/xattr-at.c
++++ b/lib/xattr-at.c
+@@ -18,6 +18,11 @@
+
+ #include <config.h>
+
++/* Temporarily don't build. We are unable to build on (probably not only)
++ darwin due to lack of l*xattr callbacks (XATTR_NOFOLLOW is alternative) and
++ different function definitions. */
++#ifdef HAVE_XATTRS
++
+ #include "xattr-at.h"
+ #include "openat.h"
+
+@@ -108,3 +113,5 @@
+ #undef AT_FUNC_RESULT
+ #undef AT_FUNC_POST_FILE_PARAM_DECLS
+ #undef AT_FUNC_POST_FILE_ARGS
++
++#endif
+diff --git a/src/Makefile.am b/src/Makefile.am
+index 82b2d46..42daaef 100644
+--- a/src/Makefile.am
++++ b/src/Makefile.am
+@@ -52,7 +52,3 @@ AM_CFLAGS = $(WARN_CFLAGS) $(WERROR_CFLAGS)
+ LDADD = ../lib/libtar.a ../gnu/libgnu.a $(LIBINTL) $(LIBICONV)
+
+ tar_LDADD = $(LIBS) $(LDADD) $(LIB_CLOCK_GETTIME) $(LIB_EACCESS) $(LIB_SELINUX)
+-
+-if TAR_LIB_ATTR
+-tar_LDADD += -lattr
+-endif
+--
+1.9.3 \ No newline at end of file
diff --git a/var/spack/repos/builtin/packages/tar/package.py b/var/spack/repos/builtin/packages/tar/package.py
index c55b5165bf..7b6313827f 100644
--- a/var/spack/repos/builtin/packages/tar/package.py
+++ b/var/spack/repos/builtin/packages/tar/package.py
@@ -24,14 +24,17 @@
##############################################################################
from spack import *
-class Tar(Package):
- """GNU Tar provides the ability to create tar archives, as well as various other kinds of manipulation."""
+
+class Tar(AutotoolsPackage):
+ """GNU Tar provides the ability to create tar archives, as well as various
+ other kinds of manipulation."""
homepage = "https://www.gnu.org/software/tar/"
- url = "http://ftp.gnu.org/gnu/tar/tar-1.28.tar.gz"
+ url = "https://ftp.gnu.org/gnu/tar/tar-1.28.tar.gz"
+ version('1.29', 'cae466e6e58c7292355e7080248f244db3a4cf755f33f4fa25ca7f9a7ed09af0')
version('1.28', '6ea3dbea1f2b0409b234048e021a9fd7')
- def install(self, spec, prefix):
- configure("--prefix=%s" % prefix)
- make()
- make('install')
+ # see http://lists.gnu.org/archive/html/bug-tar/2014-08/msg00001.html and
+ # https://github.com/Homebrew/homebrew-core/commit/aef9a1792de4648d0322b4b04d32287532f046bb
+ # TODO: when=sys.platform=='darwin' ?
+ patch('gnutar-configure-xattrs.patch', when='@1.28')
diff --git a/var/spack/repos/builtin/packages/task/package.py b/var/spack/repos/builtin/packages/task/package.py
index dc52c4f9f6..785023fd03 100644
--- a/var/spack/repos/builtin/packages/task/package.py
+++ b/var/spack/repos/builtin/packages/task/package.py
@@ -24,6 +24,7 @@
##############################################################################
from spack import *
+
class Task(Package):
"""Feature-rich console based todo list manager"""
homepage = "http://www.taskwarrior.org"
@@ -31,6 +32,7 @@ class Task(Package):
version('2.4.4', '517450c4a23a5842df3e9905b38801b3')
+ depends_on('cmake', type='build')
depends_on("gnutls")
depends_on("libuuid")
# depends_on("gcc@4.8:")
diff --git a/var/spack/repos/builtin/packages/taskd/package.py b/var/spack/repos/builtin/packages/taskd/package.py
index 8a022b4fe9..bfe77aaa5c 100644
--- a/var/spack/repos/builtin/packages/taskd/package.py
+++ b/var/spack/repos/builtin/packages/taskd/package.py
@@ -24,14 +24,16 @@
##############################################################################
from spack import *
+
class Taskd(Package):
"""TaskWarrior task synchronization daemon"""
- # FIXME: add a proper url for your package's homepage here.
+
homepage = "http://www.taskwarrior.org"
url = "http://taskwarrior.org/download/taskd-1.1.0.tar.gz"
version('1.1.0', 'ac855828c16f199bdbc45fbc227388d0')
+ depends_on('cmake', type='build')
depends_on("libuuid")
depends_on("gnutls")
diff --git a/var/spack/repos/builtin/packages/tau/package.py b/var/spack/repos/builtin/packages/tau/package.py
index 3b181f3fa4..991841f137 100644
--- a/var/spack/repos/builtin/packages/tau/package.py
+++ b/var/spack/repos/builtin/packages/tau/package.py
@@ -22,20 +22,18 @@
# License along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
-
from spack import *
-
import os
-import os.path
-
+import glob
from llnl.util.filesystem import join_path
+
class Tau(Package):
- """
- A portable profiling and tracing toolkit for performance
+ """A portable profiling and tracing toolkit for performance
analysis of parallel programs written in Fortran, C, C++, UPC,
Java, Python.
"""
+
homepage = "http://www.cs.uoregon.edu/research/tau"
url = "https://www.cs.uoregon.edu/research/tau/tau_releases/tau-2.25.tar.gz"
@@ -45,15 +43,20 @@ class Tau(Package):
version('2.23.1', '6593b47ae1e7a838e632652f0426fe72')
# TODO : shmem variant missing
- variant('download', default=False, description='Downloads and builds various dependencies')
+ variant('download', default=False,
+ description='Downloads and builds various dependencies')
variant('scorep', default=False, description='Activates SCOREP support')
variant('openmp', default=True, description='Use OpenMP threads')
- variant('mpi', default=True, description='Specify use of TAU MPI wrapper library')
+ variant('mpi', default=True,
+ description='Specify use of TAU MPI wrapper library')
variant('phase', default=True, description='Generate phase based profiles')
- variant('comm', default=True, description=' Generate profiles with MPI communicator info')
+ variant('comm', default=True,
+ description=' Generate profiles with MPI communicator info')
- # TODO : Try to build direct OTF2 support? Some parts of the OTF support library in TAU are non-conformant,
- # TODO : and fail at compile-time. Further, SCOREP is compiled with OTF2 support.
+ # TODO : Try to build direct OTF2 support? Some parts of the OTF support
+ # TODO : library in TAU are non-conformant,
+ # TODO : and fail at compile-time. Further, SCOREP is compiled with OTF2
+ # support.
depends_on('pdt') # Required for TAU instrumentation
depends_on('scorep', when='+scorep')
depends_on('binutils', when='~download')
@@ -65,13 +68,17 @@ class Tau(Package):
##########
# Selecting a compiler with TAU configure is quite tricky:
- # 1 - compilers are mapped to a given set of strings (and spack cc, cxx, etc. wrappers are not among them)
+ # 1 - compilers are mapped to a given set of strings
+ # (and spack cc, cxx, etc. wrappers are not among them)
# 2 - absolute paths are not allowed
- # 3 - the usual environment variables seems not to be checked ('CC', 'CXX' and 'FC')
- # 4 - if no -cc=<compiler> -cxx=<compiler> is passed tau is built with system compiler silently
+ # 3 - the usual environment variables seems not to be checked
+ # ('CC', 'CXX' and 'FC')
+ # 4 - if no -cc=<compiler> -cxx=<compiler> is passed tau is built with
+ # system compiler silently
# (regardless of what %<compiler> is used in the spec)
#
- # In the following we give TAU what he expects and put compilers into PATH
+ # In the following we give TAU what he expects and put compilers into
+ # PATH
compiler_path = os.path.dirname(self.compiler.cc)
os.environ['PATH'] = ':'.join([compiler_path, os.environ['PATH']])
compiler_options = ['-c++=%s' % self.compiler.cxx_names[0],
@@ -80,7 +87,8 @@ class Tau(Package):
compiler_options.append('-fortran=%s' % self.compiler.fc_names[0])
##########
- # Construct the string of custom compiler flags and append it to compiler related options
+ # Construct the string of custom compiler flags and append it to
+ # compiler related options
useropt = ' '.join(useropt)
useropt = "-useropt=%s" % useropt
compiler_options.append(useropt)
@@ -92,8 +100,9 @@ class Tau(Package):
change_sed_delimiter('@', ';', 'utils/FixMakefile')
change_sed_delimiter('@', ';', 'utils/FixMakefile.sed.default')
- # TAU configure, despite the name , seems to be a manually written script (nothing related to autotools).
- # As such it has a few #peculiarities# that make this build quite hackish.
+ # TAU configure, despite the name , seems to be a manually
+ # written script (nothing related to autotools). As such it has
+ # a few #peculiarities# that make this build quite hackish.
options = ["-prefix=%s" % prefix,
"-iowrapper",
"-pdt=%s" % spec['pdt'].prefix]
@@ -137,3 +146,15 @@ class Tau(Package):
dest = join_path(self.prefix, d)
if os.path.isdir(src) and not os.path.exists(dest):
os.symlink(join_path(subdir, d), dest)
+
+ def setup_environment(self, spack_env, run_env):
+ pattern = join_path(self.prefix.lib, 'Makefile.*')
+ files = glob.glob(pattern)
+
+ # This function is called both at install time to set up
+ # the build environment and after install to generate the associated
+ # module file. In the former case there is no `self.prefix.lib`
+ # directory to inspect. The conditional below will set `TAU_MAKEFILE`
+ # in the latter case.
+ if files:
+ run_env.set('TAU_MAKEFILE', files[0])
diff --git a/var/spack/repos/builtin/packages/tbb/package.py b/var/spack/repos/builtin/packages/tbb/package.py
index 6c3ceb1e76..33dea6625b 100644
--- a/var/spack/repos/builtin/packages/tbb/package.py
+++ b/var/spack/repos/builtin/packages/tbb/package.py
@@ -23,9 +23,9 @@
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
from spack import *
-import os
import glob
+
class Tbb(Package):
"""Widely used C++ template library for task parallelism.
Intel Threading Building Blocks (Intel TBB) lets you easily write parallel
@@ -35,35 +35,44 @@ class Tbb(Package):
homepage = "http://www.threadingbuildingblocks.org/"
# Only version-specific URL's work for TBB
- version('4.4.3', '80707e277f69d9b20eeebdd7a5f5331137868ce1', url='https://www.threadingbuildingblocks.org/sites/default/files/software_releases/source/tbb44_20160128oss_src_0.tgz')
+ # can also use https://github.com/01org/tbb/releases/
+ version('2017.3', '2c451a5bcf6fc31487b98b4b29651c369874277c',
+ url='https://www.threadingbuildingblocks.org/sites/default/files/software_releases/source/tbb2017_20161128oss_src.tgz')
+ version('4.4.4', 'd4cee5e4ca75cab5181834877738619c56afeb71',
+ url='https://www.threadingbuildingblocks.org/sites/default/files/software_releases/source/tbb44_20160413oss_src.tgz')
+ version('4.4.3', '80707e277f69d9b20eeebdd7a5f5331137868ce1',
+ url='https://www.threadingbuildingblocks.org/sites/default/files/software_releases/source/tbb44_20160128oss_src_0.tgz')
- def coerce_to_spack(self,tbb_build_subdir):
- for compiler in ["icc","gcc","clang"]:
- fs = glob.glob(join_path(tbb_build_subdir,"*.%s.inc" % compiler ))
- for f in fs:
- lines = open(f).readlines()
- of = open(f,"w")
- for l in lines:
- if l.strip().startswith("CPLUS ="):
+ def coerce_to_spack(self, tbb_build_subdir):
+ for compiler in ["icc", "gcc", "clang"]:
+ fs = glob.glob(join_path(tbb_build_subdir,
+ "*.%s.inc" % compiler))
+ for f in fs:
+ lines = open(f).readlines()
+ of = open(f, "w")
+ for l in lines:
+ if l.strip().startswith("CPLUS ="):
of.write("# coerced to spack\n")
of.write("CPLUS = $(CXX)\n")
- elif l.strip().startswith("CPLUS ="):
+ elif l.strip().startswith("CPLUS ="):
of.write("# coerced to spack\n")
of.write("CONLY = $(CC)\n")
- else:
- of.write(l);
+ else:
+ of.write(l)
def install(self, spec, prefix):
- #
- # we need to follow TBB's compiler selection logic to get the proper build + link flags
- # but we still need to use spack's compiler wrappers
+ if spec.satisfies('%gcc@6.1:') and spec.satisfies('@:4.4.3'):
+ raise InstallError('Only TBB 4.4.4 and above build with GCC 6.1!')
+
+ # We need to follow TBB's compiler selection logic to get the proper
+ # build + link flags but we still need to use spack's compiler wrappers
# to accomplish this, we do two things:
#
- # * Look at the spack spec to determine which compiler we should pass to tbb's Makefile
+ # * Look at the spack spec to determine which compiler we should pass
+ # to tbb's Makefile;
#
# * patch tbb's build system to use the compiler wrappers (CC, CXX) for
- # icc, gcc, clang
- # (see coerce_to_spack())
+ # icc, gcc, clang (see coerce_to_spack());
#
self.coerce_to_spack("build")
@@ -74,7 +83,6 @@ class Tbb(Package):
else:
tbb_compiler = "gcc"
-
mkdirp(prefix)
mkdirp(prefix.lib)
@@ -82,10 +90,10 @@ class Tbb(Package):
# tbb does not have a configure script or make install target
# we simply call make, and try to put the pieces together
#
- make("compiler=%s" %(tbb_compiler))
+ make("compiler=%s" % (tbb_compiler))
# install headers to {prefix}/include
- install_tree('include',prefix.include)
+ install_tree('include', prefix.include)
# install libs to {prefix}/lib
tbb_lib_names = ["libtbb",
@@ -94,10 +102,10 @@ class Tbb(Package):
for lib_name in tbb_lib_names:
# install release libs
- fs = glob.glob(join_path("build","*release",lib_name + ".*"))
+ fs = glob.glob(join_path("build", "*release", lib_name + ".*"))
for f in fs:
install(f, prefix.lib)
# install debug libs if they exist
- fs = glob.glob(join_path("build","*debug",lib_name + "_debug.*"))
+ fs = glob.glob(join_path("build", "*debug", lib_name + "_debug.*"))
for f in fs:
install(f, prefix.lib)
diff --git a/var/spack/repos/builtin/packages/tcl/package.py b/var/spack/repos/builtin/packages/tcl/package.py
index a4d8b515bb..d9b535305d 100644
--- a/var/spack/repos/builtin/packages/tcl/package.py
+++ b/var/spack/repos/builtin/packages/tcl/package.py
@@ -24,7 +24,8 @@
##############################################################################
from spack import *
-class Tcl(Package):
+
+class Tcl(AutotoolsPackage):
"""Tcl (Tool Command Language) is a very powerful but easy to
learn dynamic programming language, suitable for a very wide
range of uses, including web and desktop applications,
@@ -34,9 +35,6 @@ class Tcl(Package):
extensible."""
homepage = "http://www.tcl.tk"
- def url_for_version(self, version):
- return 'http://prdownloads.sourceforge.net/tcl/tcl%s-src.tar.gz' % version
-
version('8.6.5', '0e6426a4ca9401825fbc6ecf3d89a326')
version('8.6.4', 'd7cbb91f1ded1919370a30edd1534304')
version('8.6.3', 'db382feca91754b7f93da16dc4cdad1f')
@@ -44,8 +42,20 @@ class Tcl(Package):
depends_on('zlib')
- def install(self, spec, prefix):
- with working_dir('unix'):
- configure("--prefix=%s" % prefix)
- make()
- make("install")
+ def url_for_version(self, version):
+ base_url = 'http://prdownloads.sourceforge.net/tcl'
+ return '{0}/tcl{1}-src.tar.gz'.format(base_url, version)
+
+ def setup_environment(self, spack_env, env):
+ # When using Tkinter from within spack provided python+tk, python
+ # will not be able to find Tcl/Tk unless TCL_LIBRARY is set.
+ env.set('TCL_LIBRARY', join_path(self.prefix.lib, 'tcl{0}'.format(
+ self.spec.version.up_to(2))))
+
+ def build_directory(self):
+ return 'unix'
+
+ @AutotoolsPackage.sanity_check('install')
+ def symlink_tclsh(self):
+ with working_dir(self.prefix.bin):
+ symlink('tclsh{0}'.format(self.version.up_to(2)), 'tclsh')
diff --git a/var/spack/repos/builtin/packages/tetgen/package.py b/var/spack/repos/builtin/packages/tetgen/package.py
index 5e87ed7fba..6e5ed79c36 100644
--- a/var/spack/repos/builtin/packages/tetgen/package.py
+++ b/var/spack/repos/builtin/packages/tetgen/package.py
@@ -22,22 +22,48 @@
# License along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
+
from spack import *
+import glob
+
class Tetgen(Package):
- """TetGen is a program and library that can be used to generate tetrahedral
- meshes for given 3D polyhedral domains. TetGen generates exact constrained
- Delaunay tetrahedralizations, boundary conforming Delaunay meshes, and
- Voronoi paritions."""
+ """TetGen is a program and library that can be used to generate
+ tetrahedral meshes for given 3D polyhedral domains. TetGen
+ generates exact constrained Delaunay tetrahedralizations,
+ boundary conforming Delaunay meshes, and Voronoi paritions.
+ """
homepage = "http://www.tetgen.org"
- url = "http://www.tetgen.org/files/tetgen1.4.3.tar.gz"
- version('1.4.3', 'd6a4bcdde2ac804f7ec66c29dcb63c18')
+ version('1.5.0', '3b9fd9cdec121e52527b0308f7aad5c1', url='http://www.tetgen.org/1.5/src/tetgen1.5.0.tar.gz')
+ version('1.4.3', 'd6a4bcdde2ac804f7ec66c29dcb63c18', url='http://www.tetgen.org/files/tetgen1.4.3.tar.gz')
+
+ variant('debug', default=False, description='Builds the library in debug mode.')
+ variant('except', default=False, description='Replaces asserts with exceptions for better C++ compatibility.')
+
+ patch('tetgen-1.5.0-free.patch', when='@1.5.0')
+
+ def patch(self):
+ cflags = '-g -O0' if '+debug' in self.spec else '-g0 -O3'
+
+ mff = FileFilter('makefile')
+ mff.filter(r'^(C(XX)?FLAGS\s*=)(.*)$', r'\1 {0}'.format(cflags))
+
+ if '+except' in self.spec:
+ hff = FileFilter('tetgen.h')
+ hff.filter(r'(\b)(throw)(\b)(.*);', r'\1assert_throw(false);')
+ hff.filter(r'^(#define\s*tetgenH\s*)$', r'\1{0}'.format("""\n
+#include <stdexcept>
+
+inline void assert_throw(bool assertion)
+{
+ if(!assertion)
+ throw std::runtime_error("Tetgen encountered a problem (assert failed)!");
+}\n"""))
- # TODO: Make this a build dependency once build dependencies are supported
- # (see: https://github.com/LLNL/spack/pull/378).
- depends_on('cmake@2.8.7:', when='@1.5.0:')
+ sff = FileFilter(*(glob.glob('*.cxx')))
+ sff.filter(r'(\b)(assert)(\b)', r'\1assert_throw\3')
def install(self, spec, prefix):
make('tetgen', 'tetlib')
diff --git a/var/spack/repos/builtin/packages/tetgen/tetgen-1.5.0-free.patch b/var/spack/repos/builtin/packages/tetgen/tetgen-1.5.0-free.patch
new file mode 100644
index 0000000000..009ccd834f
--- /dev/null
+++ b/var/spack/repos/builtin/packages/tetgen/tetgen-1.5.0-free.patch
@@ -0,0 +1,49 @@
+diff --git a/tetgen.h b/tetgen.h
+index 3196e03..2ff3138 100644
+--- a/tetgen.h
++++ b/tetgen.h
+@@ -2206,6 +2206,44 @@ public:
+ if (highordertable != NULL) {
+ delete [] highordertable;
+ }
++
++ bgm = NULL;
++
++ points = NULL;
++ dummypoint = NULL;
++
++ tetrahedrons = NULL;
++
++ subfaces = NULL;
++ subsegs = NULL;
++
++ tet2segpool = NULL;
++ tet2subpool = NULL;
++
++ flippool = NULL;
++ unflipqueue = NULL;
++
++ cavetetlist = NULL;
++ cavebdrylist = NULL;
++ caveoldtetlist = NULL;
++ cavetetvertlist = NULL;
++
++ caveshlist = NULL;
++ caveshbdlist = NULL;
++ cavesegshlist = NULL;
++ cavetetshlist = NULL;
++ cavetetseglist = NULL;
++ caveencshlist = NULL;
++ caveencseglist = NULL;
++
++ subsegstack = NULL;
++ subfacstack = NULL;
++ subvertstack = NULL;
++
++ idx2facetlist = NULL;
++ facetverticeslist = NULL;
++ segmentendpointslist = NULL;
++ highordertable = NULL;
+ }
+
+ ~tetgenmesh()
diff --git a/var/spack/repos/builtin/packages/tethex/package.py b/var/spack/repos/builtin/packages/tethex/package.py
new file mode 100644
index 0000000000..624942498e
--- /dev/null
+++ b/var/spack/repos/builtin/packages/tethex/package.py
@@ -0,0 +1,49 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class Tethex(Package):
+ """Tethex is designed to convert triangular (in 2D) or tetrahedral (in 3D)
+ Gmsh's mesh to quadrilateral or hexahedral one respectively. These meshes
+ can be used in software packages working with hexahedrals only - for
+ example, deal.II.
+ """
+
+ homepage = "https://github.com/martemyev/tethex"
+ url = "https://github.com/martemyev/tethex/archive/v0.0.7.tar.gz"
+
+ version('0.0.7', '6c9e4a18a6637deb4400c6d77ec03184')
+ version('develop', git='https://github.com/martemyev/tethex.git')
+
+ depends_on('cmake', type='build')
+
+ def install(self, spec, prefix):
+ cmake('.')
+ make()
+
+ # install by hand
+ mkdirp(prefix.bin)
+ install('tethex', prefix.bin)
diff --git a/var/spack/repos/builtin/packages/texinfo/package.py b/var/spack/repos/builtin/packages/texinfo/package.py
index 5c6fef0db6..e4fbc37235 100644
--- a/var/spack/repos/builtin/packages/texinfo/package.py
+++ b/var/spack/repos/builtin/packages/texinfo/package.py
@@ -26,21 +26,18 @@
from spack import *
-class Texinfo(Package):
- """
- Texinfo is the official documentation format of the GNU project. It was invented by Richard Stallman and Bob
- Chassell many years ago, loosely based on Brian Reid's Scribe and other formatting languages of the time. It is
- used by many non-GNU projects as well.FIXME: put a proper description of your package here.
- """
+class Texinfo(AutotoolsPackage):
+ """Texinfo is the official documentation format of the GNU project.
+
+ It was invented by Richard Stallman and Bob Chassell many years ago,
+ loosely based on Brian Reid's Scribe and other formatting languages
+ of the time. It is used by many non-GNU projects as well."""
+
homepage = "https://www.gnu.org/software/texinfo/"
url = "http://ftp.gnu.org/gnu/texinfo/texinfo-6.0.tar.gz"
+ version('6.3', '9b08daca9bf8eccae9b0f884aba41f9e')
version('6.0', 'e1a2ef5dce5018b53f0f6eed45b247a7')
version('5.2', '1b8f98b80a8e6c50422125e07522e8db')
version('5.1', '54e250014fe698fb4832016158747c03')
version('5.0', '918432285abe6fe96c98355594c5656a')
-
- def install(self, spec, prefix):
- configure('--prefix=%s' % prefix)
- make()
- make("install")
diff --git a/var/spack/repos/builtin/packages/texlive/package.py b/var/spack/repos/builtin/packages/texlive/package.py
new file mode 100644
index 0000000000..a960e5b68d
--- /dev/null
+++ b/var/spack/repos/builtin/packages/texlive/package.py
@@ -0,0 +1,67 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+import os
+
+
+class Texlive(Package):
+ """TeX Live is a free software distribution for the TeX typesetting
+ system"""
+
+ homepage = "http://www.tug.org/texlive"
+
+ # Pull from specific site because the texlive mirrors do not all
+ # update in synchrony.
+ #
+ # BEWARE: TexLive updates their installs frequently (probably why
+ # they call it *Live*...). There is no good way to provide a
+ # repeatable install of the package. We try to keep up with the
+ # digest values, but don't be surprised if this package is
+ # briefly unbuildable.
+ #
+ version('live', '01461ec2cc49fe0b14812eb67abbea46',
+ url="http://ctan.math.utah.edu/ctan/tex-archive/systems/texlive/tlnet/install-tl-unx.tar.gz")
+
+ # There does not seem to be a complete list of schemes.
+ # Examples include:
+ # full scheme (everything)
+ # medium scheme (small + more packages and languages)
+ # small scheme (basic + xetex, metapost, a few languages)
+ # basic scheme (plain and latex)
+ # minimal scheme (plain only)
+ # See:
+ # https://www.tug.org/texlive/doc/texlive-en/texlive-en.html#x1-25025r6
+ variant('scheme', default="small",
+ description='Package subset to install (e.g. full, small, basic)')
+
+ depends_on('perl', type='build')
+
+ def install(self, spec, prefix):
+ env = os.environ
+ env['TEXLIVE_INSTALL_PREFIX'] = prefix
+ perl = which('perl')
+ scheme = spec.variants['scheme'].value
+ perl('./install-tl', '-scheme', scheme,
+ '-portable', '-profile', '/dev/null')
diff --git a/var/spack/repos/builtin/packages/the-platinum-searcher/package.py b/var/spack/repos/builtin/packages/the-platinum-searcher/package.py
new file mode 100644
index 0000000000..eeddf194ea
--- /dev/null
+++ b/var/spack/repos/builtin/packages/the-platinum-searcher/package.py
@@ -0,0 +1,45 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+import os
+import shutil
+
+
+class ThePlatinumSearcher(Package):
+ """Fast parallel recursive grep alternative"""
+ homepage = "https://github.com/monochromegane/the_platinum_searcher"
+ url = "https://github.com/monochromegane/the_platinum_searcher"
+
+ package = 'github.com/monochromegane/the_platinum_searcher/...'
+
+ version('head', go=package)
+
+ extends("go", deptypes='build')
+
+ def install(self, spec, prefix):
+ env = os.environ
+ env['GOPATH'] = self.stage.source_path + ':' + env['GOPATH']
+ go('install', self.package, env=env)
+ shutil.copytree('bin', os.path.join(prefix, 'bin'))
diff --git a/var/spack/repos/builtin/packages/the_silver_searcher/package.py b/var/spack/repos/builtin/packages/the-silver-searcher/package.py
index e289b6d783..c98e964efa 100644
--- a/var/spack/repos/builtin/packages/the_silver_searcher/package.py
+++ b/var/spack/repos/builtin/packages/the-silver-searcher/package.py
@@ -24,16 +24,18 @@
##############################################################################
from spack import *
+
class TheSilverSearcher(Package):
"""Fast recursive grep alternative"""
homepage = "http://geoff.greer.fm/ag/"
- url = "http://geoff.greer.fm/ag/releases/the_silver_searcher-0.30.0.tar.gz"
+ url = "http://geoff.greer.fm/ag/releases/the_silver_searcher-0.32.0.tar.gz"
+ version('0.32.0', '3fdfd5836924246073d5344257a06823')
version('0.30.0', '95e2e7859fab1156c835aff7413481db')
depends_on('pcre')
depends_on('xz')
- depends_on('pkg-config')
+ depends_on('pkg-config', type='build')
def install(self, spec, prefix):
configure("--prefix=%s" % prefix)
diff --git a/var/spack/repos/builtin/packages/the_platinum_searcher/package.py b/var/spack/repos/builtin/packages/the_platinum_searcher/package.py
deleted file mode 100644
index 9c9a66cdef..0000000000
--- a/var/spack/repos/builtin/packages/the_platinum_searcher/package.py
+++ /dev/null
@@ -1,21 +0,0 @@
-from spack import *
-import os
-import shutil
-
-
-class ThePlatinumSearcher(Package):
- """Fast parallel recursive grep alternative"""
- homepage = "https://github.com/monochromegane/the_platinum_searcher"
- url = "https://github.com/monochromegane/the_platinum_searcher"
-
- package = 'github.com/monochromegane/the_platinum_searcher/...'
-
- version('head', go=package)
-
- extends("go")
-
- def install(self, spec, prefix):
- env = os.environ
- env['GOPATH'] = self.stage.source_path + ':' + env['GOPATH']
- go('install', self.package, env=env)
- shutil.copytree('bin', os.path.join(prefix, 'bin'))
diff --git a/var/spack/repos/builtin/packages/thrift/package.py b/var/spack/repos/builtin/packages/thrift/package.py
index 6d834c6321..755f7a80b9 100644
--- a/var/spack/repos/builtin/packages/thrift/package.py
+++ b/var/spack/repos/builtin/packages/thrift/package.py
@@ -24,12 +24,16 @@
##############################################################################
from spack import *
+
class Thrift(Package):
- """The Apache Thrift software framework, for scalable cross-language services
- development, combines a software stack with a code generation engine to build
- services that work efficiently and seamlessly between C++, Java, Python, PHP,
- Ruby, Erlang, Perl, Haskell, C#, Cocoa, JavaScript, Node.js, Smalltalk, OCaml
- and Delphi and other languages."""
+ """Software framework for scalable cross-language services development.
+
+ Thrift combines a software stack with a code generation engine to
+ build services that work efficiently and seamlessly between C++,
+ Java, Python, PHP, Ruby, Erlang, Perl, Haskell, C#, Cocoa,
+ JavaScript, Node.js, Smalltalk, OCaml and Delphi and other languages.
+
+ """
homepage = "http://thrift.apache.org"
url = "http://apache.mirrors.ionfish.org/thrift/0.9.2/thrift-0.9.2.tar.gz"
@@ -37,16 +41,18 @@ class Thrift(Package):
version('0.9.2', '89f63cc4d0100912f4a1f8a9dee63678')
# Currently only support for c-family and python
- variant('c', default=True, description="Build support for C-family languages")
- variant('python', default=True, description="Build support for python")
+ variant('c', default=True,
+ description="Build support for C-family languages")
+ variant('python', default=True,
+ description="Build support for python")
depends_on('jdk')
- depends_on('autoconf')
- depends_on('automake')
- depends_on('libtool')
+ depends_on('autoconf', type='build')
+ depends_on('automake', type='build')
+ depends_on('libtool', type='build')
depends_on('boost@1.53:')
- depends_on('bison')
- depends_on('flex')
+ depends_on('bison', type='build')
+ depends_on('flex', type='build')
depends_on('openssl')
# Variant dependencies
@@ -66,7 +72,8 @@ class Thrift(Package):
options.append('--enable-tests=no')
options.append('--with-c=%s' % ('yes' if '+c' in spec else 'no'))
- options.append('--with-python=%s' % ('yes' if '+python' in spec else 'no'))
+ options.append('--with-python=%s' %
+ ('yes' if '+python' in spec else 'no'))
options.append('--with-java=%s' % ('yes' if '+java' in spec else 'no'))
options.append('--with-go=%s' % ('yes' if '+go' in spec else 'no'))
options.append('--with-lua=%s' % ('yes' if '+lua' in spec else 'no'))
diff --git a/var/spack/repos/builtin/packages/tinyxml/CMakeLists.txt b/var/spack/repos/builtin/packages/tinyxml/CMakeLists.txt
new file mode 100644
index 0000000000..93f124d716
--- /dev/null
+++ b/var/spack/repos/builtin/packages/tinyxml/CMakeLists.txt
@@ -0,0 +1,17 @@
+cmake_minimum_required(VERSION 2.6)
+
+project(TinyXml)
+OPTION(TIXML_USE_STL "Use STL with TIXML" ON)
+if(TIXML_USE_STL)
+ add_definitions(-DTIXML_USE_STL)
+endif(TIXML_USE_STL)
+add_library(
+ tinyxml
+ tinyxml.cpp
+ tinystr.cpp
+ tinyxmlerror.cpp
+ tinyxmlparser.cpp
+)
+
+INSTALL( FILES tinyxml.h tinystr.h DESTINATION include )
+INSTALL( TARGETS tinyxml ARCHIVE DESTINATION lib )
diff --git a/var/spack/repos/builtin/packages/tinyxml/package.py b/var/spack/repos/builtin/packages/tinyxml/package.py
new file mode 100644
index 0000000000..1789d9022e
--- /dev/null
+++ b/var/spack/repos/builtin/packages/tinyxml/package.py
@@ -0,0 +1,39 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+import os.path
+
+
+class Tinyxml(CMakePackage):
+ """Simple, small, efficient, C++ XML parser"""
+
+ homepage = "http://grinninglizard.com/tinyxml/"
+ url = "https://sourceforge.net/projects/tinyxml/files/tinyxml/2.6.2/tinyxml_2_6_2.tar.gz"
+
+ version('2.6.2', 'cba3f50dd657cb1434674a03b21394df9913d764')
+
+ def patch(self):
+ copyfile(join_path(os.path.dirname(__file__),
+ "CMakeLists.txt"), "CMakeLists.txt")
diff --git a/var/spack/repos/builtin/packages/tinyxml2/package.py b/var/spack/repos/builtin/packages/tinyxml2/package.py
new file mode 100644
index 0000000000..d36bb5fa9b
--- /dev/null
+++ b/var/spack/repos/builtin/packages/tinyxml2/package.py
@@ -0,0 +1,39 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class Tinyxml2(CMakePackage):
+ """Simple, small, efficient, C++ XML parser"""
+
+ homepage = "http://grinninglizard.com/tinyxml2/"
+ url = "https://github.com/leethomason/tinyxml2/archive/3.0.0.tar.gz"
+
+ version('4.0.1', '08570d385788f6b02f50f5fd9df32a9d4f8482cc')
+ version('4.0.0', '7a6f0858d75f360922f3ca272f7067e8cdf00489')
+ version('3.0.0', '07acaae49f7dd3dab790da4fe72d0c7ef0d116d1')
+ version('2.2.0', '7869aa08241ce16f93ba3732c1cde155b1f2b6a0')
+ version('2.1.0', '70ef3221bdc190fd8fc50cdd4a6ef440f44b74dc')
+ version('2.0.2', 'c78a4de58540e2a35f4775fd3e577299ebd15117')
diff --git a/var/spack/repos/builtin/packages/tk/package.py b/var/spack/repos/builtin/packages/tk/package.py
index 330e1c77f5..071db04e63 100644
--- a/var/spack/repos/builtin/packages/tk/package.py
+++ b/var/spack/repos/builtin/packages/tk/package.py
@@ -24,7 +24,8 @@
##############################################################################
from spack import *
-class Tk(Package):
+
+class Tk(AutotoolsPackage):
"""Tk is a graphical user interface toolkit that takes developing
desktop applications to a higher level than conventional
approaches. Tk is the standard GUI not only for Tcl, but for
@@ -33,16 +34,27 @@ class Tk(Package):
and more."""
homepage = "http://www.tcl.tk"
- def url_for_version(self, version):
- return "http://prdownloads.sourceforge.net/tcl/tk%s-src.tar.gz" % version
-
+ version('8.6.5', '11dbbd425c3e0201f20d6a51482ce6c4')
version('8.6.3', '85ca4dbf4dcc19777fd456f6ee5d0221')
+ variant('X', default=False, description='Enable X11 support')
+
depends_on("tcl")
+ depends_on("libx11", when='+X')
+
+ def url_for_version(self, version):
+ base_url = "http://prdownloads.sourceforge.net/tcl"
+ return "{0}/tk{1}-src.tar.gz".format(base_url, version)
+
+ def setup_environment(self, spack_env, run_env):
+ # When using Tkinter from within spack provided python+tk, python
+ # will not be able to find Tcl/Tk unless TK_LIBRARY is set.
+ run_env.set('TK_LIBRARY', join_path(self.prefix.lib, 'tk{0}'.format(
+ self.spec.version.up_to(2))))
+
+ def build_directory(self):
+ return 'unix'
- def install(self, spec, prefix):
- with working_dir('unix'):
- configure("--prefix=%s" % prefix,
- "--with-tcl=%s" % spec['tcl'].prefix.lib)
- make()
- make("install")
+ def configure_args(self):
+ spec = self.spec
+ return ['--with-tcl={0}'.format(spec['tcl'].prefix.lib)]
diff --git a/var/spack/repos/builtin/packages/tmux/package.py b/var/spack/repos/builtin/packages/tmux/package.py
index c46425c0d3..89c9751486 100644
--- a/var/spack/repos/builtin/packages/tmux/package.py
+++ b/var/spack/repos/builtin/packages/tmux/package.py
@@ -24,26 +24,34 @@
##############################################################################
from spack import *
+
class Tmux(Package):
"""tmux is a terminal multiplexer. What is a terminal multiplexer? It lets
- you switch easily between several programs in one terminal, detach them (they
- keep running in the background) and reattach them to a different terminal. And
- do a lot more.
+ you switch easily between several programs in one terminal, detach them
+ (they keep running in the background) and reattach them to a different
+ terminal. And do a lot more.
"""
homepage = "http://tmux.github.io"
- url = "https://github.com/tmux/tmux/releases/download/2.1/tmux-2.1.tar.gz"
+ url = "https://github.com/tmux/tmux/releases/download/2.2/tmux-2.2.tar.gz"
- version('1.9a', 'b07601711f96f1d260b390513b509a2d')
+ version('2.3', 'fcfd1611d705d8b31df3c26ebc93bd3e')
+ version('2.2', 'bd95ee7205e489c62c616bb7af040099')
version('2.1', '74a2855695bccb51b6e301383ad4818c')
+ version('1.9a', 'b07601711f96f1d260b390513b509a2d')
depends_on('libevent')
depends_on('ncurses')
def install(self, spec, prefix):
+ pkg_config_path = ':'.join([
+ spec['libevent'].prefix,
+ spec['ncurses'].prefix
+ ])
+
configure(
"--prefix=%s" % prefix,
- "PKG_CONFIG_PATH=%s:%s" % (spec['libevent'].prefix, spec['ncurses'].prefix))
+ "PKG_CONFIG_PATH=%s" % pkg_config_path)
make()
make("install")
diff --git a/var/spack/repos/builtin/packages/tmuxinator/package.py b/var/spack/repos/builtin/packages/tmuxinator/package.py
index b9c92ea4db..66da4006f2 100644
--- a/var/spack/repos/builtin/packages/tmuxinator/package.py
+++ b/var/spack/repos/builtin/packages/tmuxinator/package.py
@@ -24,17 +24,18 @@
##############################################################################
from spack import *
+
class Tmuxinator(Package):
"""A session configuration creator and manager for tmux"""
homepage = "https://github.com/tmuxinator/tmuxinator"
url = "https://github.com/tmuxinator/tmuxinator"
version('0.6.11',
- git='https://github.com/tmuxinator/tmuxinator',
- tag='v0.6.11')
+ git='https://github.com/tmuxinator/tmuxinator',
+ tag='v0.6.11')
extends('ruby')
def install(self, spec, prefix):
- gem('build', 'tmuxinator.gemspec')
- gem('install', 'tmuxinator-{0}.gem'.format(self.version))
+ gem('build', 'tmuxinator.gemspec')
+ gem('install', 'tmuxinator-{0}.gem'.format(self.version))
diff --git a/var/spack/repos/builtin/packages/transset/package.py b/var/spack/repos/builtin/packages/transset/package.py
new file mode 100644
index 0000000000..0f60738741
--- /dev/null
+++ b/var/spack/repos/builtin/packages/transset/package.py
@@ -0,0 +1,46 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class Transset(Package):
+ """transset is an utility for setting opacity property."""
+
+ homepage = "http://cgit.freedesktop.org/xorg/app/transset"
+ url = "https://www.x.org/archive/individual/app/transset-1.0.1.tar.gz"
+
+ version('1.0.1', '4bbee6f6ea6fbd403280b4bb311db6dc')
+
+ depends_on('libx11')
+
+ depends_on('xproto@7.0.17:', type='build')
+ depends_on('pkg-config@0.9.0:', type='build')
+ depends_on('util-macros', type='build')
+
+ def install(self, spec, prefix):
+ configure('--prefix={0}'.format(prefix))
+
+ make()
+ make('install')
diff --git a/var/spack/repos/builtin/packages/trapproto/package.py b/var/spack/repos/builtin/packages/trapproto/package.py
new file mode 100644
index 0000000000..eebab74410
--- /dev/null
+++ b/var/spack/repos/builtin/packages/trapproto/package.py
@@ -0,0 +1,39 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class Trapproto(Package):
+ """X.org TrapProto protocol headers."""
+
+ homepage = "https://cgit.freedesktop.org/xorg/proto/trapproto"
+ url = "https://www.x.org/archive/individual/proto/trapproto-3.4.3.tar.gz"
+
+ version('3.4.3', '1344759ae8d7d923e64f5eec078a679b')
+
+ def install(self, spec, prefix):
+ configure('--prefix={0}'.format(prefix))
+
+ make('install')
diff --git a/var/spack/repos/builtin/packages/tree/package.py b/var/spack/repos/builtin/packages/tree/package.py
new file mode 100644
index 0000000000..795f8c997e
--- /dev/null
+++ b/var/spack/repos/builtin/packages/tree/package.py
@@ -0,0 +1,63 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+import sys
+
+
+class Tree(Package):
+ """Tree is a recursive directory listing command that produces a depth
+ indented listing of files, which is colorized ala dircolors if
+ the LS_COLORS environment variable is set and output is to
+ tty. Tree has been ported and reported to work under the
+ following operating systems: Linux, FreeBSD, OS X, Solaris,
+ HP/UX, Cygwin, HP Nonstop and OS/2."""
+
+ homepage = "http://mama.indstate.edu/users/ice/tree/"
+ url = "http://mama.indstate.edu/users/ice/tree/src/tree-1.7.0.tgz"
+
+ version('1.7.0', 'abe3e03e469c542d8e157cdd93f4d8a6')
+
+ def install(self, spec, prefix):
+ objs = [
+ 'tree.o',
+ 'unix.o',
+ 'html.o',
+ 'xml.o',
+ 'json.o',
+ 'hash.o',
+ 'color.o'
+ ]
+ if (sys.platform == 'darwin'):
+ objs.append('strverscmp.o')
+
+ args = [
+ 'prefix=%s' % prefix,
+ 'CC=%s' % spack_cc,
+ 'CFLAGS=',
+ 'OBJS=%s' % ' '.join(objs),
+ 'install'
+ ]
+
+ make(*args)
diff --git a/var/spack/repos/builtin/packages/triangle/package.py b/var/spack/repos/builtin/packages/triangle/package.py
index bc8b0ec639..f4ee9ca1c9 100644
--- a/var/spack/repos/builtin/packages/triangle/package.py
+++ b/var/spack/repos/builtin/packages/triangle/package.py
@@ -24,6 +24,7 @@
##############################################################################
from spack import *
+
class Triangle(Package):
"""Triangle is a two-dimensional mesh generator and Delaunay
triangulator. Triangle generates exact Delaunay triangulations,
diff --git a/var/spack/repos/builtin/packages/trilinos/package.py b/var/spack/repos/builtin/packages/trilinos/package.py
index 1eaec86405..4b3412ce7a 100644
--- a/var/spack/repos/builtin/packages/trilinos/package.py
+++ b/var/spack/repos/builtin/packages/trilinos/package.py
@@ -23,137 +23,235 @@
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
from spack import *
-import os, sys, glob
+import os
+import sys
-# Trilinos is complicated to build, as an inspiration a couple of links to other repositories which build it:
+# Trilinos is complicated to build, as an inspiration a couple of links to
+# other repositories which build it:
# https://github.com/hpcugent/easybuild-easyblocks/blob/master/easybuild/easyblocks/t/trilinos.py#L111
# https://github.com/koecher/candi/blob/master/deal.II-toolchain/packages/trilinos.package
# https://gitlab.com/configurations/cluster-config/blob/master/trilinos.sh
-# https://github.com/Homebrew/homebrew-science/blob/master/trilinos.rb
-# and some relevant documentation/examples:
+# https://github.com/Homebrew/homebrew-science/blob/master/trilinos.rb and some
+# relevant documentation/examples:
# https://github.com/trilinos/Trilinos/issues/175
-class Trilinos(Package):
- """The Trilinos Project is an effort to develop algorithms and enabling technologies within an object-oriented
- software framework for the solution of large-scale, complex multi-physics engineering and scientific problems.
+
+
+class Trilinos(CMakePackage):
+ """The Trilinos Project is an effort to develop algorithms and enabling
+ technologies within an object-oriented software framework for the solution
+ of large-scale, complex multi-physics engineering and scientific problems.
A unique design feature of Trilinos is its focus on packages.
"""
homepage = "https://trilinos.org/"
- url = "http://trilinos.csbsju.edu/download/files/trilinos-12.2.1-Source.tar.gz"
+ base_url = "https://github.com/trilinos/Trilinos/archive"
+
+ version('develop',
+ git='https://github.com/trilinos/Trilinos.git', tag='develop')
+ version('master',
+ git='https://github.com/trilinos/Trilinos.git', tag='master')
+ version('12.10.1', '40f28628b63310f9bd17c26d9ebe32b1')
+ version('12.8.1', '01c0026f1e2050842857db941060ecd5')
+ version('12.6.4', 'c2ea7b5aa0d10bcabdb9b9a6e3bac3ea')
+ version('12.6.3', '8de5cc00981a0ca0defea6199b2fe4c1')
+ version('12.6.2', 'dc7f9924872778798149ecadd81605a5')
+ version('12.6.1', '8aecea78546e7558f63ecc9a3b2949da')
+ version('12.4.2', '4c25a757d86bde3531090bd900a2cea8')
+ version('12.2.1', '85d011f7f99a776a9c6c2625e8cb721c')
+ version('12.0.1', 'bcb3fdefd14d05dd6aa65ba4c5b9aa0e')
+ version('11.14.3', 'dea62e57ebe51a886bee0b10a2176969')
+ version('11.14.2', 'e7c3cdbbfe3279a8a68838b873ad6d51')
+ version('11.14.1', 'b7760b142eef66c79ed13de7c9560f81')
- version('12.6.1', 'adcf2d3aab74cdda98f88fee19cd1442604199b0515ee3da4d80cbe8f37d00e4')
- version('12.4.2', '7c830f7f0f68b8ad324690603baf404e')
- version('12.2.1', '6161926ea247863c690e927687f83be9')
- version('12.0.1', 'bd99741d047471e127b8296b2ec08017')
- version('11.14.3', '2f4f83f8333e4233c57d0f01c4b57426')
- version('11.14.2', 'a43590cf896c677890d75bfe75bc6254')
- version('11.14.1', '40febc57f76668be8b6a77b7607bb67f')
+ def url_for_version(self, version):
+ return '%s/trilinos-release-%s.tar.gz' % \
+ (Trilinos.base_url, version.dashed)
- variant('metis', default=True, description='Compile with METIS and ParMETIS')
- variant('mumps', default=True, description='Compile with support for MUMPS solvers')
- variant('superlu-dist', default=True, description='Compile with SuperluDist solvers')
- variant('hypre', default=True, description='Compile with Hypre preconditioner')
+ variant('xsdkflags', default=False,
+ description='Compile using the default xSDK configuration')
+ variant('metis', default=True,
+ description='Compile with METIS and ParMETIS')
+ variant('mumps', default=True,
+ description='Compile with support for MUMPS solvers')
+ variant('superlu-dist', default=True,
+ description='Compile with SuperluDist solvers')
+ variant('superlu', default=False,
+ description='Compile with SuperLU solvers')
+ variant('hypre', default=True,
+ description='Compile with Hypre preconditioner')
variant('hdf5', default=True, description='Compile with HDF5')
- variant('suite-sparse', default=True, description='Compile with SuiteSparse solvers')
- # not everyone has py-numpy activated, keep it disabled by default to avoid configure errors
+ variant('suite-sparse', default=True,
+ description='Compile with SuiteSparse solvers')
+ # not everyone has py-numpy activated, keep it disabled by default to avoid
+ # configure errors
variant('python', default=False, description='Build python wrappers')
- variant('shared', default=True, description='Enables the build of shared libraries')
- variant('debug', default=False, description='Builds a debug version of the libraries')
+ variant('shared', default=True,
+ description='Enables the build of shared libraries')
+ variant('debug', default=False,
+ description='Builds a debug version of the libraries')
+ variant('boost', default=True, description='Compile with Boost')
# Everything should be compiled with -fpic
depends_on('blas')
depends_on('lapack')
- depends_on('boost')
+ depends_on('boost', when='+boost')
depends_on('matio')
depends_on('glm')
- depends_on('swig')
- depends_on('metis@5:',when='+metis')
- depends_on('suite-sparse',when='+suite-sparse')
+ depends_on('metis@5:', when='+metis')
+ depends_on('suite-sparse', when='+suite-sparse')
# MPI related dependencies
depends_on('mpi')
depends_on('netcdf+mpi')
- depends_on('parmetis',when='+metis')
- # Trilinos' Tribits config system is limited which makes it
- # very tricky to link Amesos with static MUMPS, see
+ depends_on('parmetis', when='+metis')
+ # Trilinos' Tribits config system is limited which makes it very tricky to
+ # link Amesos with static MUMPS, see
# https://trilinos.org/docs/dev/packages/amesos2/doc/html/classAmesos2_1_1MUMPS.html
- # One could work it out by getting linking flags from mpif90 --showme:link (or alike)
- # and adding results to -DTrilinos_EXTRA_LINK_FLAGS
- # together with Blas and Lapack and ScaLAPACK and Blacs and -lgfortran and
- # it may work at the end. But let's avoid all this by simply using shared libs
- depends_on('mumps@5.0:+mpi+shared',when='+mumps')
- depends_on('scalapack',when='+mumps')
- depends_on('superlu-dist',when='+superlu-dist')
- depends_on('hypre~internal-superlu',when='+hypre')
- depends_on('hdf5+mpi',when='+hdf5')
+ # One could work it out by getting linking flags from mpif90 --showme:link
+ # (or alike) and adding results to -DTrilinos_EXTRA_LINK_FLAGS together
+ # with Blas and Lapack and ScaLAPACK and Blacs and -lgfortran and it may
+ # work at the end. But let's avoid all this by simply using shared libs
+ depends_on('mumps@5.0:+mpi+shared', when='+mumps')
+ depends_on('scalapack', when='+mumps')
+ depends_on('superlu-dist@:4.3', when='@:12.6.1+superlu-dist')
+ depends_on('superlu-dist', when='@12.6.2:+superlu-dist')
+ depends_on('superlu+fpic@4.3', when='+superlu')
+ depends_on('hypre~internal-superlu', when='+hypre')
+ depends_on('hdf5+mpi', when='+hdf5')
+ depends_on('python', when='+python')
+ depends_on('py-numpy', when='+python')
+ depends_on('swig', when='+python')
- depends_on('python',when='+python')
-
- patch('umfpack_from_suitesparse.patch')
+ patch('umfpack_from_suitesparse.patch', when='@:12.8.1')
# check that the combination of variants makes sense
def variants_check(self):
if '+superlu-dist' in self.spec and self.spec.satisfies('@:11.4.3'):
- # For Trilinos v11 we need to force SuperLUDist=OFF,
- # since only the deprecated SuperLUDist v3.3 together with an Amesos patch
- # is working.
- raise RuntimeError('The superlu-dist variant can only be used with Trilinos @12.0.1:')
+ # For Trilinos v11 we need to force SuperLUDist=OFF, since only the
+ # deprecated SuperLUDist v3.3 together with an Amesos patch is
+ # working.
+ raise RuntimeError('The superlu-dist variant can only be used' +
+ ' with Trilinos @12.0.1:')
+ if '+superlu-dist' in self.spec and '+superlu' in self.spec:
+ # Only choose one type of superlu
+ raise RuntimeError('The superlu-dist and superlu variant' +
+ ' cannot be used together')
- def install(self, spec, prefix):
+ def cmake_args(self):
+ spec = self.spec
self.variants_check()
cxx_flags = []
options = []
- options.extend(std_cmake_args)
mpi_bin = spec['mpi'].prefix.bin
- options.extend(['-DTrilinos_ENABLE_ALL_PACKAGES:BOOL=ON',
- '-DTrilinos_ENABLE_ALL_OPTIONAL_PACKAGES:BOOL=ON',
- '-DTrilinos_VERBOSE_CONFIGURE:BOOL=OFF',
- '-DTrilinos_ENABLE_TESTS:BOOL=OFF',
- '-DTrilinos_ENABLE_EXAMPLES:BOOL=OFF',
- '-DCMAKE_BUILD_TYPE:STRING=%s' % ('DEBUG' if '+debug' in spec else 'RELEASE'),
- '-DBUILD_SHARED_LIBS:BOOL=%s' % ('ON' if '+shared' in spec else 'OFF'),
- '-DTPL_ENABLE_MPI:BOOL=ON',
- '-DMPI_BASE_DIR:PATH=%s' % spec['mpi'].prefix,
- '-DTPL_ENABLE_BLAS=ON',
- '-DBLAS_LIBRARY_NAMES=blas', # FIXME: don't hardcode names
- '-DBLAS_LIBRARY_DIRS=%s' % spec['blas'].prefix.lib,
- '-DTPL_ENABLE_LAPACK=ON',
- '-DLAPACK_LIBRARY_NAMES=lapack',
- '-DLAPACK_LIBRARY_DIRS=%s' % spec['lapack'].prefix,
- '-DTPL_ENABLE_Boost:BOOL=ON',
- '-DBoost_INCLUDE_DIRS:PATH=%s' % spec['boost'].prefix.include,
- '-DBoost_LIBRARY_DIRS:PATH=%s' % spec['boost'].prefix.lib,
- '-DTrilinos_ENABLE_EXPLICIT_INSTANTIATION:BOOL=ON',
- '-DTrilinos_ENABLE_CXX11:BOOL=ON',
- '-DTPL_ENABLE_Netcdf:BOOL=ON',
- '-DTPL_ENABLE_HYPRE:BOOL=%s' % ('ON' if '+hypre' in spec else 'OFF'),
- '-DTPL_ENABLE_HDF5:BOOL=%s' % ('ON' if '+hdf5' in spec else 'OFF'),
- ])
+ # Note: -DXYZ_LIBRARY_NAMES= needs semicolon separated list of names
+ blas = spec['blas'].blas_libs
+ lapack = spec['lapack'].lapack_libs
+ options.extend([
+ '-DTrilinos_ENABLE_ALL_PACKAGES:BOOL=ON',
+ '-DTrilinos_ENABLE_ALL_OPTIONAL_PACKAGES:BOOL=ON',
+ '-DTrilinos_VERBOSE_CONFIGURE:BOOL=OFF',
+ '-DTrilinos_ENABLE_TESTS:BOOL=OFF',
+ '-DTrilinos_ENABLE_EXAMPLES:BOOL=OFF',
+ '-DCMAKE_BUILD_TYPE:STRING=%s' % (
+ 'DEBUG' if '+debug' in spec else 'RELEASE'),
+ '-DBUILD_SHARED_LIBS:BOOL=%s' % (
+ 'ON' if '+shared' in spec else 'OFF'),
+ '-DTPL_ENABLE_MPI:BOOL=ON',
+ '-DMPI_BASE_DIR:PATH=%s' % spec['mpi'].prefix,
+ '-DTPL_ENABLE_BLAS=ON',
+ '-DBLAS_LIBRARY_NAMES=%s' % ';'.join(blas.names),
+ '-DBLAS_LIBRARY_DIRS=%s' % ';'.join(blas.directories),
+ '-DTPL_ENABLE_LAPACK=ON',
+ '-DLAPACK_LIBRARY_NAMES=%s' % ';'.join(lapack.names),
+ '-DLAPACK_LIBRARY_DIRS=%s' % ';'.join(lapack.directories),
+ '-DTrilinos_ENABLE_EXPLICIT_INSTANTIATION:BOOL=ON',
+ '-DTrilinos_ENABLE_CXX11:BOOL=ON',
+ '-DTPL_ENABLE_Netcdf:BOOL=ON',
+ '-DCMAKE_INSTALL_NAME_DIR:PATH=%s/lib' % self.prefix
+ ])
- # Fortran lib
- libgfortran = os.path.dirname (os.popen('%s --print-file-name libgfortran.a' % join_path(mpi_bin,'mpif90') ).read())
+ # Force Trilinos to use the MPI wrappers instead of raw compilers
+ # this is needed on Apple systems that require full resolution of
+ # all symbols when linking shared libraries
options.extend([
- '-DTrilinos_EXTRA_LINK_FLAGS:STRING=-L%s/ -lgfortran' % libgfortran,
- '-DTrilinos_ENABLE_Fortran=ON'
+ '-DCMAKE_C_COMPILER=%s' % spec['mpi'].mpicc,
+ '-DCMAKE_CXX_COMPILER=%s' % spec['mpi'].mpicxx,
+ '-DCMAKE_Fortran_COMPILER=%s' % spec['mpi'].mpifc
])
+ if '+hypre' in spec:
+ options.extend([
+ '-DTPL_ENABLE_HYPRE:BOOL=ON',
+ '-DHYPRE_INCLUDE_DIRS:PATH=%s' % spec['hypre'].prefix.include,
+ '-DHYPRE_LIBRARY_DIRS:PATH=%s' % spec['hypre'].prefix.lib
+ ])
+
+ if spec.satisfies('%intel') and spec.satisfies('@12.6.2'):
+ # Panzer uses some std:chrono that is not recognized by Intel
+ # Don't know which (maybe all) Trilinos versions this applies to
+ # Don't know which (maybe all) Intel versions this applies to
+ options.extend([
+ '-DTrilinos_ENABLE_Panzer:BOOL=OFF'
+ ])
+
+ if '+xsdkflags' in spec:
+ options.extend(['-DUSE_XSDK_DEFAULTS=YES'])
+ if '+hdf5' in spec:
+ options.extend([
+ '-DTPL_ENABLE_HDF5:BOOL=ON',
+ '-DHDF5_INCLUDE_DIRS:PATH=%s' % spec['hdf5'].prefix.include,
+ '-DHDF5_LIBRARY_DIRS:PATH=%s' % spec['hdf5'].prefix.lib
+ ])
+ else:
+ options.extend(['-DTPL_ENABLE_HDF5:BOOL=OFF'])
+
+ if '+boost' in spec:
+ options.extend([
+ '-DTPL_ENABLE_Boost:BOOL=ON',
+ '-DBoost_INCLUDE_DIRS:PATH=%s' % spec['boost'].prefix.include,
+ '-DBoost_LIBRARY_DIRS:PATH=%s' % spec['boost'].prefix.lib
+ ])
+ else:
+ options.extend(['-DTPL_ENABLE_Boost:BOOL=OFF'])
- # for build-debug only:
- #options.extend([
- # '-DCMAKE_VERBOSE_MAKEFILE:BOOL=TRUE'
- #])
+ if '+hdf5' in spec:
+ options.extend([
+ '-DTPL_ENABLE_HDF5:BOOL=ON',
+ '-DHDF5_INCLUDE_DIRS:PATH=%s' % spec['hdf5'].prefix.include,
+ '-DHDF5_LIBRARY_DIRS:PATH=%s' % spec['hdf5'].prefix.lib
+ ])
+ else:
+ options.extend(['-DTPL_ENABLE_HDF5:BOOL=OFF'])
+
+ # Fortran lib
+ if spec.satisfies('%gcc') or spec.satisfies('%clang'):
+ libgfortran = os.path.dirname(os.popen(
+ '%s --print-file-name libgfortran.a' %
+ join_path(mpi_bin, 'mpif90')).read())
+ options.extend([
+ '-DTrilinos_EXTRA_LINK_FLAGS:STRING=-L%s/ -lgfortran' % (
+ libgfortran),
+ '-DTrilinos_ENABLE_Fortran=ON'
+ ])
# suite-sparse related
if '+suite-sparse' in spec:
options.extend([
- '-DTPL_ENABLE_Cholmod:BOOL=OFF', # FIXME: Trilinos seems to be looking for static libs only, patch CMake TPL file?
- #'-DTPL_ENABLE_Cholmod:BOOL=ON',
- #'-DCholmod_LIBRARY_DIRS:PATH=%s' % spec['suite-sparse'].prefix.lib,
- #'-DCholmod_INCLUDE_DIRS:PATH=%s' % spec['suite-sparse'].prefix.include,
+ # FIXME: Trilinos seems to be looking for static libs only,
+ # patch CMake TPL file?
+ '-DTPL_ENABLE_Cholmod:BOOL=OFF',
+ # '-DTPL_ENABLE_Cholmod:BOOL=ON',
+ # '-DCholmod_LIBRARY_DIRS:PATH=%s' % (
+ # spec['suite-sparse'].prefix.lib,
+ # '-DCholmod_INCLUDE_DIRS:PATH=%s' % (
+ # spec['suite-sparse'].prefix.include,
'-DTPL_ENABLE_UMFPACK:BOOL=ON',
- '-DUMFPACK_LIBRARY_DIRS:PATH=%s' % spec['suite-sparse'].prefix.lib,
- '-DUMFPACK_INCLUDE_DIRS:PATH=%s' % spec['suite-sparse'].prefix.include,
- '-DUMFPACK_LIBRARY_NAMES=umfpack;amd;colamd;cholmod;suitesparseconfig'
+ '-DUMFPACK_LIBRARY_DIRS:PATH=%s' % (
+ spec['suite-sparse'].prefix.lib),
+ '-DUMFPACK_INCLUDE_DIRS:PATH=%s' % (
+ spec['suite-sparse'].prefix.include),
+ '-DUMFPACK_LIBRARY_NAMES=umfpack;amd;colamd;cholmod;' +
+ 'suitesparseconfig'
])
else:
options.extend([
@@ -169,9 +267,11 @@ class Trilinos(Package):
'-DMETIS_LIBRARY_NAMES=metis',
'-DTPL_METIS_INCLUDE_DIRS=%s' % spec['metis'].prefix.include,
'-DTPL_ENABLE_ParMETIS:BOOL=ON',
- '-DParMETIS_LIBRARY_DIRS=%s;%s' % (spec['parmetis'].prefix.lib,spec['metis'].prefix.lib),
+ '-DParMETIS_LIBRARY_DIRS=%s;%s' % (
+ spec['parmetis'].prefix.lib, spec['metis'].prefix.lib),
'-DParMETIS_LIBRARY_NAMES=parmetis;metis',
- '-DTPL_ParMETIS_INCLUDE_DIRS=%s' % spec['parmetis'].prefix.include
+ '-DTPL_ParMETIS_INCLUDE_DIRS=%s' % (
+ spec['parmetis'].prefix.include)
])
else:
options.extend([
@@ -184,11 +284,14 @@ class Trilinos(Package):
options.extend([
'-DTPL_ENABLE_MUMPS:BOOL=ON',
'-DMUMPS_LIBRARY_DIRS=%s' % spec['mumps'].prefix.lib,
- '-DMUMPS_LIBRARY_NAMES=dmumps;mumps_common;pord', # order is important!
+ # order is important!
+ '-DMUMPS_LIBRARY_NAMES=dmumps;mumps_common;pord',
'-DTPL_ENABLE_SCALAPACK:BOOL=ON',
- '-DSCALAPACK_LIBRARY_NAMES=scalapack' # FIXME: for MKL it's mkl_scalapack_lp64;mkl_blacs_mpich_lp64
+ # FIXME: for MKL it's mkl_scalapack_lp64;mkl_blacs_mpich_lp64
+ '-DSCALAPACK_LIBRARY_NAMES=scalapack'
])
- # see https://github.com/trilinos/Trilinos/blob/master/packages/amesos/README-MUMPS
+ # see
+ # https://github.com/trilinos/Trilinos/blob/master/packages/amesos/README-MUMPS
cxx_flags.extend([
'-DMUMPS_5_0'
])
@@ -201,16 +304,20 @@ class Trilinos(Package):
# superlu-dist:
if '+superlu-dist' in spec:
# Amesos, conflicting types of double and complex SLU_D
- # see https://trilinos.org/pipermail/trilinos-users/2015-March/004731.html
- # and https://trilinos.org/pipermail/trilinos-users/2015-March/004802.html
+ # see
+ # https://trilinos.org/pipermail/trilinos-users/2015-March/004731.html
+ # and
+ # https://trilinos.org/pipermail/trilinos-users/2015-March/004802.html
options.extend([
'-DTeuchos_ENABLE_COMPLEX:BOOL=OFF',
'-DKokkosTSQR_ENABLE_Complex:BOOL=OFF'
])
options.extend([
'-DTPL_ENABLE_SuperLUDist:BOOL=ON',
- '-DSuperLUDist_LIBRARY_DIRS=%s' % spec['superlu-dist'].prefix.lib,
- '-DSuperLUDist_INCLUDE_DIRS=%s' % spec['superlu-dist'].prefix.include
+ '-DSuperLUDist_LIBRARY_DIRS=%s' %
+ spec['superlu-dist'].prefix.lib,
+ '-DSuperLUDist_INCLUDE_DIRS=%s' %
+ spec['superlu-dist'].prefix.include
])
if spec.satisfies('^superlu-dist@4.0:'):
options.extend([
@@ -221,6 +328,19 @@ class Trilinos(Package):
'-DTPL_ENABLE_SuperLUDist:BOOL=OFF',
])
+ # superlu:
+ if '+superlu' in spec:
+ options.extend([
+ '-DTPL_ENABLE_SuperLU:BOOL=ON',
+ '-DSuperLU_LIBRARY_DIRS=%s' %
+ spec['superlu'].prefix.lib,
+ '-DSuperLU_INCLUDE_DIRS=%s' %
+ spec['superlu'].prefix.include
+ ])
+ else:
+ options.extend([
+ '-DTPL_ENABLE_SuperLU:BOOL=OFF',
+ ])
# python
if '+python' in spec:
@@ -247,24 +367,20 @@ class Trilinos(Package):
options.extend([
'-DTrilinos_ENABLE_FEI=OFF'
])
+ return options
-
- with working_dir('spack-build', create=True):
- cmake('..', *options)
- make()
- make('install')
-
- # When trilinos is built with Python, libpytrilinos is included through
- # cmake configure files. Namely, Trilinos_LIBRARIES in TrilinosConfig.cmake
- # contains pytrilinos. This leads to a run-time error:
- # Symbol not found: _PyBool_Type
- # and prevents Trilinos to be used in any C++ code, which links executable
- # against the libraries listed in Trilinos_LIBRARIES.
- # See https://github.com/Homebrew/homebrew-science/issues/2148#issuecomment-103614509
- # A workaround it to remove PyTrilinos from the COMPONENTS_LIST :
- if '+python' in self.spec:
- filter_file(r'(SET\(COMPONENTS_LIST.*)(PyTrilinos;)(.*)', (r'\1\3'), '%s/cmake/Trilinos/TrilinosConfig.cmake' % prefix.lib)
-
- # The shared libraries are not installed correctly on Darwin; correct this
- if (sys.platform == 'darwin') and ('+shared' in spec):
- fix_darwin_install_name(prefix.lib)
+ @CMakePackage.sanity_check('install')
+ def filter_python(self):
+ # When trilinos is built with Python, libpytrilinos is included
+ # through cmake configure files. Namely, Trilinos_LIBRARIES in
+ # TrilinosConfig.cmake contains pytrilinos. This leads to a
+ # run-time error: Symbol not found: _PyBool_Type and prevents
+ # Trilinos to be used in any C++ code, which links executable
+ # against the libraries listed in Trilinos_LIBRARIES. See
+ # https://github.com/Homebrew/homebrew-science/issues/2148#issuecomment-103614509
+ # A workaround is to remove PyTrilinos from the COMPONENTS_LIST :
+ if '+python' in self.spec:
+ filter_file(r'(SET\(COMPONENTS_LIST.*)(PyTrilinos;)(.*)',
+ (r'\1\3'),
+ '%s/cmake/Trilinos/TrilinosConfig.cmake' %
+ self.prefix.lib)
diff --git a/var/spack/repos/builtin/packages/turbomole/package.py b/var/spack/repos/builtin/packages/turbomole/package.py
index 6ccce23f97..cf14259da4 100644
--- a/var/spack/repos/builtin/packages/turbomole/package.py
+++ b/var/spack/repos/builtin/packages/turbomole/package.py
@@ -29,21 +29,21 @@ import subprocess
class Turbomole(Package):
"""TURBOMOLE: Program Package for ab initio Electronic Structure
- Calculations. NB: Requires a license to download."""
+ Calculations.
- # NOTE: Turbomole requires purchase of a license to download. Go to the
- # NOTE: Turbomole home page, http://www.turbomole-gmbh.com, for details.
- # NOTE: Spack will search the current directory for this file. It is
- # NOTE: probably best to add this file to a Spack mirror so that it can be
- # NOTE: found from anywhere. For information on setting up a Spack mirror
- # NOTE: see http://software.llnl.gov/spack/mirrors.html
+ Note: Turbomole requires purchase of a license to download. Go to the
+ Turbomole home page, http://www.turbomole-gmbh.com, for details.
+ Spack will search the current directory for this file. It is
+ probably best to add this file to a Spack mirror so that it can be
+ found from anywhere. For information on setting up a Spack mirror
+ see http://spack.readthedocs.io/en/latest/mirrors.html"""
homepage = "http://www.turbomole-gmbh.com/"
version('7.0.2', '92b97e1e52e8dcf02a4d9ac0147c09d6',
url="file://%s/turbolinux702.tar.gz" % os.getcwd())
- variant('mpi', default=False, description='Set up MPI environment')
+ variant('mpi', default=True, description='Set up MPI environment')
variant('smp', default=False, description='Set up SMP environment')
# Turbomole's install is odd. There are three variants
diff --git a/var/spack/repos/builtin/packages/twm/package.py b/var/spack/repos/builtin/packages/twm/package.py
new file mode 100644
index 0000000000..3e37f4903d
--- /dev/null
+++ b/var/spack/repos/builtin/packages/twm/package.py
@@ -0,0 +1,56 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class Twm(Package):
+ """twm is a window manager for the X Window System. It provides
+ titlebars, shaped windows, several forms of icon management,
+ user-defined macro functions, click-to-type and pointer-driven
+ keyboard focus, and user-specified key and pointer button bindings."""
+
+ homepage = "http://cgit.freedesktop.org/xorg/app/twm"
+ url = "https://www.x.org/archive/individual/app/twm-1.0.9.tar.gz"
+
+ version('1.0.9', 'e98fcb32f774ac1ff7bf82101b79f61e')
+
+ depends_on('libx11')
+ depends_on('libxext')
+ depends_on('libxt')
+ depends_on('libxmu')
+ depends_on('libice')
+ depends_on('libsm')
+
+ depends_on('xproto@7.0.17:', type='build')
+ depends_on('bison', type='build')
+ depends_on('flex', type='build')
+ depends_on('pkg-config@0.9.0:', type='build')
+ depends_on('util-macros', type='build')
+
+ def install(self, spec, prefix):
+ configure('--prefix={0}'.format(prefix))
+
+ make()
+ make('install')
diff --git a/var/spack/repos/builtin/packages/uberftp/package.py b/var/spack/repos/builtin/packages/uberftp/package.py
new file mode 100644
index 0000000000..b0c6c8a42f
--- /dev/null
+++ b/var/spack/repos/builtin/packages/uberftp/package.py
@@ -0,0 +1,44 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class Uberftp(Package):
+ """UberFTP is an interactive (text-based) client for GridFTP"""
+
+ homepage = "http://toolkit.globus.org/grid_software/data/uberftp.php"
+ url = "https://github.com/JasonAlt/UberFTP/archive/Version_2_8.tar.gz"
+
+ version('2_8', 'bc7a159955a9c4b9f5f42f3d2b8fc830')
+ version('2_7', 'faaea2d6e1958c1105cfc9147824e03c')
+ version('2_6', '784210976f259f9d19c0798c19778d34')
+
+ depends_on('globus-toolkit')
+
+ def install(self, spec, prefix):
+ configure('--prefix={0}'.format(prefix))
+
+ make()
+ make('install')
diff --git a/var/spack/repos/builtin/packages/udunits2/package.py b/var/spack/repos/builtin/packages/udunits2/package.py
index aed39668fd..cfc8e30c41 100644
--- a/var/spack/repos/builtin/packages/udunits2/package.py
+++ b/var/spack/repos/builtin/packages/udunits2/package.py
@@ -24,17 +24,16 @@
##############################################################################
from spack import *
-class Udunits2(Package):
+
+class Udunits2(AutotoolsPackage):
"""Automated units conversion"""
homepage = "http://www.unidata.ucar.edu/software/udunits"
- url = "ftp://ftp.unidata.ucar.edu/pub/udunits/udunits-2.2.20.tar.gz"
+ url = "ftp://ftp.unidata.ucar.edu/pub/udunits/udunits-2.2.21.tar.gz"
- version('2.2.20', '1586b70a49dfe05da5fcc29ef239dce0')
+ version('2.2.21', '1f6d3375efc1f124790a4efb7102cdb7')
depends_on('expat')
- def install(self, spec, prefix):
- configure("--prefix=%s" % prefix)
- make()
- make("install")
+ depends_on('bison', type='build')
+ depends_on('flex', type='build')
diff --git a/var/spack/repos/builtin/packages/uncrustify/package.py b/var/spack/repos/builtin/packages/uncrustify/package.py
index db96bc301e..c3182d0dc8 100644
--- a/var/spack/repos/builtin/packages/uncrustify/package.py
+++ b/var/spack/repos/builtin/packages/uncrustify/package.py
@@ -24,8 +24,9 @@
##############################################################################
from spack import *
+
class Uncrustify(Package):
- """Source Code Beautifier for C, C++, C#, ObjectiveC, D, Java, Pawn and VALA"""
+ """Source Code Beautifier for C, C++, C#, ObjectiveC, Java, and others."""
homepage = "http://uncrustify.sourceforge.net/"
url = "http://downloads.sourceforge.net/project/uncrustify/uncrustify/uncrustify-0.61/uncrustify-0.61.tar.gz"
diff --git a/var/spack/repos/builtin/packages/unibilium/package.py b/var/spack/repos/builtin/packages/unibilium/package.py
index d9e0ad6bcb..943e4737e1 100644
--- a/var/spack/repos/builtin/packages/unibilium/package.py
+++ b/var/spack/repos/builtin/packages/unibilium/package.py
@@ -24,6 +24,7 @@
##############################################################################
from spack import *
+
class Unibilium(Package):
"""A terminfo parsing library"""
homepage = "https://github.com/mauke/unibilium"
@@ -32,5 +33,5 @@ class Unibilium(Package):
version('1.2.0', '9b1c97839a880a373da6c097443b43c4')
def install(self, spec, prefix):
- make("PREFIX="+prefix)
- make("install", "PREFIX="+prefix)
+ make("PREFIX=" + prefix)
+ make("install", "PREFIX=" + prefix)
diff --git a/var/spack/repos/builtin/packages/unison/package.py b/var/spack/repos/builtin/packages/unison/package.py
new file mode 100644
index 0000000000..181e1e6410
--- /dev/null
+++ b/var/spack/repos/builtin/packages/unison/package.py
@@ -0,0 +1,51 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class Unison(Package):
+ """Unison is a file-synchronization tool for OSX, Unix, and
+ Windows. It allows two replicas of a collection of files and
+ directories to be stored on different hosts (or different disks
+ on the same host), modified separately, and then brought up to
+ date by propagating the changes in each replica to the
+ other."""
+
+ homepage = "https://www.cis.upenn.edu/~bcpierce/unison/"
+ url = "https://www.seas.upenn.edu/~bcpierce/unison//download/releases/stable/unison-2.48.3.tar.gz"
+
+ version('2.48.4', '5334b78c7e68169df7de95f4c6c4b60f')
+
+ depends_on('ocaml', type='build')
+
+ parallel = False
+
+ def install(self, spec, prefix):
+ make('./mkProjectInfo')
+ make('UISTYLE=text')
+
+ mkdirp(prefix.bin)
+ install('unison', prefix.bin)
+ set_executable(join_path(prefix.bin, 'unison'))
diff --git a/var/spack/repos/builtin/packages/unixodbc/package.py b/var/spack/repos/builtin/packages/unixodbc/package.py
new file mode 100644
index 0000000000..15de127b7e
--- /dev/null
+++ b/var/spack/repos/builtin/packages/unixodbc/package.py
@@ -0,0 +1,42 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class Unixodbc(Package):
+ """ODBC is an open specification for providing application developers with
+ a predictable API with which to access Data Sources. Data Sources include
+ SQL Servers and any Data Source with an ODBC Driver."""
+
+ homepage = "http://www.unixodbc.org/"
+ url = "ftp://ftp.unixodbc.org/pub/unixODBC/unixODBC-2.3.4.tar.gz"
+
+ version('2.3.4', 'bd25d261ca1808c947cb687e2034be81')
+
+ def install(self, spec, prefix):
+ configure('--prefix={0}'.format(prefix))
+
+ make()
+ make('install')
diff --git a/var/spack/repos/builtin/packages/util-linux/package.py b/var/spack/repos/builtin/packages/util-linux/package.py
index bf6972683d..99af170ca1 100644
--- a/var/spack/repos/builtin/packages/util-linux/package.py
+++ b/var/spack/repos/builtin/packages/util-linux/package.py
@@ -23,7 +23,7 @@
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
from spack import *
-import os
+
class UtilLinux(Package):
"""Util-linux is a suite of essential utilities for any Linux system."""
@@ -36,9 +36,9 @@ class UtilLinux(Package):
depends_on("python@2.7:")
def install(self, spec, prefix):
- configure("--prefix=%s" % prefix,
- "PKG_CONFIG_PATH=%s/pkgconfig" % spec['python'].prefix.lib,
- "--disable-use-tty-group")
+ configure("--prefix=%s" % prefix,
+ "PKG_CONFIG_PATH=%s/pkgconfig" % spec['python'].prefix.lib,
+ "--disable-use-tty-group")
make()
make("install")
diff --git a/var/spack/repos/builtin/packages/util-macros/package.py b/var/spack/repos/builtin/packages/util-macros/package.py
new file mode 100644
index 0000000000..486d4463b0
--- /dev/null
+++ b/var/spack/repos/builtin/packages/util-macros/package.py
@@ -0,0 +1,41 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class UtilMacros(Package):
+ """This is a set of autoconf macros used by the configure.ac scripts in
+ other Xorg modular packages, and is needed to generate new versions
+ of their configure scripts with autoconf."""
+
+ homepage = "http://cgit.freedesktop.org/xorg/util/macros/"
+ url = "https://www.x.org/archive/individual/util/util-macros-1.19.0.tar.bz2"
+
+ version('1.19.0', '1cf984125e75f8204938d998a8b6c1e1')
+
+ def install(self, spec, prefix):
+ configure('--prefix={0}'.format(prefix))
+
+ make('install')
diff --git a/var/spack/repos/builtin/packages/uuid/package.py b/var/spack/repos/builtin/packages/uuid/package.py
new file mode 100644
index 0000000000..5bcf59d9bf
--- /dev/null
+++ b/var/spack/repos/builtin/packages/uuid/package.py
@@ -0,0 +1,37 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class Uuid(AutotoolsPackage):
+ """OSSP uuid is a ISO-C:1999 application programming interface (API) and
+ corresponding command line interface (CLI) for the generation of DCE 1.1,
+ ISO/IEC 11578:1996 and RFC 4122 compliant Universally Unique Identifier
+ (UUID)."""
+
+ homepage = "http://www.ossp.org/pkg/lib/uuid"
+ url = "http://www.mirrorservice.org/sites/ftp.ossp.org/pkg/lib/uuid/uuid-1.6.2.tar.gz"
+
+ version('1.6.2', '5db0d43a9022a6ebbbc25337ae28942f')
diff --git a/var/spack/repos/builtin/packages/valgrind/package.py b/var/spack/repos/builtin/packages/valgrind/package.py
index afd4cc6ad0..e7ae227c27 100644
--- a/var/spack/repos/builtin/packages/valgrind/package.py
+++ b/var/spack/repos/builtin/packages/valgrind/package.py
@@ -27,12 +27,15 @@ from spack import *
class Valgrind(Package):
- """
- Valgrind is an instrumentation framework for building dynamic analysis tools. There are Valgrind tools that can
- automatically detect many memory management and threading bugs, and profile your programs in detail. You can also
- use Valgrind to build new tools.
+ """An instrumentation framework for building dynamic analysis.
+
+ There are Valgrind tools that can automatically detect many memory
+ management and threading bugs, and profile your programs in
+ detail. You can also use Valgrind to build new tools.
+
+ Valgrind is Open Source / Free Software, and is freely available
+ under the GNU General Public License, version 2.
- Valgrind is Open Source / Free Software, and is freely available under the GNU General Public License, version 2.
"""
homepage = "http://valgrind.org/"
url = "http://valgrind.org/downloads/valgrind-3.11.0.tar.bz2"
@@ -42,7 +45,8 @@ class Valgrind(Package):
version('3.10.0', '7c311a72a20388aceced1aa5573ce970')
variant('mpi', default=True, description='Activates MPI support for valgrind')
- variant('boost', default=True, description='Activates boost support for valgrind')
+ variant('boost', default=True,
+ description='Activates boost support for valgrind')
depends_on('mpi', when='+mpi')
depends_on('boost', when='+boost')
diff --git a/var/spack/repos/builtin/packages/veclibfort/package.py b/var/spack/repos/builtin/packages/veclibfort/package.py
new file mode 100644
index 0000000000..b906d4f9c9
--- /dev/null
+++ b/var/spack/repos/builtin/packages/veclibfort/package.py
@@ -0,0 +1,70 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+import sys
+
+
+class Veclibfort(Package):
+ """Lightweight but flexible shim designed to rectify the incompatibilities
+ between the Accelerate/vecLib BLAS and LAPACK libraries shipped with macOS
+ and FORTRAN code compiled with modern compilers such as GNU Fortran."""
+
+ homepage = "https://github.com/mcg1969/vecLibFort"
+ url = "https://github.com/mcg1969/vecLibFort/archive/0.4.2.tar.gz"
+
+ version('0.4.2', '83395ffcbe8a2122c3f726a5c3a7cf93')
+ version('develop', git='https://github.com/mcg1969/vecLibFort.git')
+
+ variant('shared', default=True,
+ description="Build shared libraries as well as static libs.")
+
+ # virtual dependency
+ provides('blas')
+ provides('lapack')
+
+ @property
+ def blas_libs(self):
+ shared = True if '+shared' in self.spec else False
+ return find_libraries(
+ ['libvecLibFort'], root=self.prefix, shared=shared, recurse=True
+ )
+
+ @property
+ def lapack_libs(self):
+ return self.blas_libs
+
+ def install(self, spec, prefix):
+ if sys.platform != 'darwin':
+ raise InstallError('vecLibFort can be installed on macOS only')
+
+ make('all')
+ make('PREFIX=%s' % prefix, 'install')
+
+ # test
+ fc = which('fc')
+ flags = ['-o', 'tester', '-O', 'tester.f90']
+ flags.extend(self.lapack_libs.ld_flags.split())
+ fc(*flags)
+ Executable('./tester')()
diff --git a/var/spack/repos/builtin/packages/videoproto/package.py b/var/spack/repos/builtin/packages/videoproto/package.py
new file mode 100644
index 0000000000..93b0e61ca4
--- /dev/null
+++ b/var/spack/repos/builtin/packages/videoproto/package.py
@@ -0,0 +1,45 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class Videoproto(Package):
+ """X Video Extension.
+
+ This extension provides a protocol for a video output mechanism,
+ mainly to rescale video playback in the video controller hardware."""
+
+ homepage = "http://cgit.freedesktop.org/xorg/proto/videoproto"
+ url = "https://www.x.org/archive/individual/proto/videoproto-2.3.3.tar.gz"
+
+ version('2.3.3', 'd984100603ee2420072f27bb491f4b7d')
+
+ depends_on('pkg-config@0.9.0:', type='build')
+ depends_on('util-macros', type='build')
+
+ def install(self, spec, prefix):
+ configure('--prefix={0}'.format(prefix))
+
+ make('install')
diff --git a/var/spack/repos/builtin/packages/viewres/package.py b/var/spack/repos/builtin/packages/viewres/package.py
new file mode 100644
index 0000000000..3a32555075
--- /dev/null
+++ b/var/spack/repos/builtin/packages/viewres/package.py
@@ -0,0 +1,48 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class Viewres(Package):
+ """viewres displays a tree showing the widget class hierarchy of the
+ Athena Widget Set (libXaw)."""
+
+ homepage = "http://cgit.freedesktop.org/xorg/app/viewres"
+ url = "https://www.x.org/archive/individual/app/viewres-1.0.4.tar.gz"
+
+ version('1.0.4', 'a3c7fe561945951f848e319680753760')
+
+ depends_on('libxaw')
+ depends_on('libxmu')
+ depends_on('libxt')
+
+ depends_on('pkg-config@0.9.0:', type='build')
+ depends_on('util-macros', type='build')
+
+ def install(self, spec, prefix):
+ configure('--prefix={0}'.format(prefix))
+
+ make()
+ make('install')
diff --git a/var/spack/repos/builtin/packages/vim/package.py b/var/spack/repos/builtin/packages/vim/package.py
index f66c20894b..e2542c3f56 100644
--- a/var/spack/repos/builtin/packages/vim/package.py
+++ b/var/spack/repos/builtin/packages/vim/package.py
@@ -24,6 +24,7 @@
##############################################################################
from spack import *
+
class Vim(Package):
"""Vim is a highly configurable text editor built to enable efficient text
editing. It is an improved version of the vi editor distributed with most
@@ -34,24 +35,14 @@ class Vim(Package):
"""
homepage = "http://www.vim.org"
- url = "ftp://ftp.vim.org/pub/vim/unix/vim-7.4.tar.bz2"
- list_url = "http://ftp.vim.org/pub/vim/unix/"
-
- version('7.4', '607e135c559be642f210094ad023dc65')
- version('7.3', '5b9510a17074e2b37d8bb38ae09edbf2')
- version('7.2', 'f0901284b338e448bfd79ccca0041254')
- version('7.1', '44c6b4914f38d6f9aa959640b89da329')
- version('7.0', '4ca69757678272f718b1041c810d82d8')
- version('6.4', '774c14d93ce58674b3b2c880edd12d77')
- version('6.3', '821fda8f14d674346b87e3ef9cb96389')
- version('6.2', 'c49d360bbd069d00e2a57804f2a123d9')
- version('6.1.405', 'd220ff58f2c72ed606e6d0297c2f2a7c')
- version('6.1', '7fd0f915adc7c0dab89772884268b030')
- version('6.0', '9d9ca84d489af6b3f54639dd97af3774')
+ url = "https://github.com/vim/vim/archive/v8.0.0134.tar.gz"
+
+ version('8.0.0134', 'c74668d25c2acc85d655430dd60886cd')
+ version('7.4.2367', 'a0a7bc394f7ab1d95571fe6ab05da3ea')
feature_sets = ('huge', 'big', 'normal', 'small', 'tiny')
for fs in feature_sets:
- variant(fs, default=False, description="Use '%s' feature set" % fs)
+ variant(fs, default=False, description="Use '%s' feature set" % fs)
variant('python', default=False, description="build with Python")
depends_on('python', when='+python')
@@ -60,48 +51,52 @@ class Vim(Package):
depends_on('ruby', when='+ruby')
variant('cscope', default=False, description="build with cscope support")
- depends_on('cscope', when='+cscope')
+ depends_on('cscope', when='+cscope', type='run')
variant('gui', default=False, description="build with gui (gvim)")
# virtual dependency?
+ depends_on('ncurses', when="@7.4:")
+
def install(self, spec, prefix):
- feature_set = None
- for fs in self.feature_sets:
- if "+" + fs in spec:
- if feature_set is not None:
- tty.error("Only one feature set allowed, both %s and %s specified"
- % (feature_set, fs))
- feature_set = fs
- if '+gui' in spec:
- if feature_set is not None:
- if feature_set is not 'huge':
- tty.error("+gui variant requires 'huge' feature set, %s was specified"
- % feature_set)
- feature_set = 'huge'
- if feature_set is None:
- feature_set = 'normal'
-
- configure_args = []
- configure_args.append("--with-features=" + feature_set)
-
- if '+python' in spec:
- configure_args.append("--enable-pythoninterp=yes")
- else:
- configure_args.append("--enable-pythoninterp=dynamic")
-
- if '+ruby' in spec:
- configure_args.append("--enable-rubyinterp=yes")
- else:
- configure_args.append("--enable-rubyinterp=dynamic")
-
- if '+gui' in spec:
- configure_args.append("--enable-gui=auto")
-
- if '+cscope' in spec:
- configure_args.append("--enable-cscope")
-
- configure("--prefix=%s" % prefix, *configure_args)
-
- make()
- make("install")
+ feature_set = None
+ for fs in self.feature_sets:
+ if "+" + fs in spec:
+ if feature_set is not None:
+ raise InstallError(
+ "Only one feature set allowed, specified %s and %s"
+ % (feature_set, fs))
+ feature_set = fs
+ if '+gui' in spec:
+ if feature_set is not None:
+ if feature_set != 'huge':
+ raise InstallError(
+ "+gui variant requires 'huge' feature set, "
+ "%s was specified" % feature_set)
+ feature_set = 'huge'
+ if feature_set is None:
+ feature_set = 'normal'
+
+ configure_args = []
+ configure_args.append("--with-features=" + feature_set)
+
+ if '+python' in spec:
+ configure_args.append("--enable-pythoninterp=yes")
+ else:
+ configure_args.append("--enable-pythoninterp=dynamic")
+
+ if '+ruby' in spec:
+ configure_args.append("--enable-rubyinterp=yes")
+ else:
+ configure_args.append("--enable-rubyinterp=dynamic")
+
+ if '+gui' in spec:
+ configure_args.append("--enable-gui=auto")
+
+ if '+cscope' in spec:
+ configure_args.append("--enable-cscope")
+
+ configure("--prefix=%s" % prefix, *configure_args)
+
+ make()
+ make("install")
diff --git a/var/spack/repos/builtin/packages/visit/package.py b/var/spack/repos/builtin/packages/visit/package.py
index ae19fd0450..48a3762a2a 100644
--- a/var/spack/repos/builtin/packages/visit/package.py
+++ b/var/spack/repos/builtin/packages/visit/package.py
@@ -26,30 +26,38 @@ from spack import *
class Visit(Package):
- """VisIt is an Open Source, interactive, scalable, visualization, animation and analysis tool."""
+ """VisIt is an Open Source, interactive, scalable, visualization,
+ animation and analysis tool."""
homepage = "https://wci.llnl.gov/simulation/computer-codes/visit/"
url = "http://portal.nersc.gov/project/visit/releases/2.10.1/visit2.10.1.tar.gz"
- version('2.10.1', '3cbca162fdb0249f17c4456605c4211e')
+ version('2.10.3', 'a1082a6f6dab3e2dcb58993603456c2b')
version('2.10.2', '253de0837a9d69fb689befc98ea4d068')
+ version('2.10.1', '3cbca162fdb0249f17c4456605c4211e')
- depends_on("vtk@6.1.0~opengl2")
- depends_on("qt@4.8.6")
- depends_on("python")
- depends_on("silo+shared")
+ depends_on('cmake', type='build')
+ depends_on('vtk@6.1.0~opengl2')
+ depends_on('qt@4.8.6')
+ depends_on('python')
+ depends_on('silo+shared')
+ depends_on('hdf5~mpi')
def install(self, spec, prefix):
- with working_dir('spack-build', create=True):
-
- feature_args = std_cmake_args[:]
- feature_args.extend(["-DVTK_MAJOR_VERSION=6",
- "-DVTK_MINOR_VERSION=1",
- "-DVISIT_LOC_QMAKE_EXE:FILEPATH=%s/qmake-qt4" % spec['qt'].prefix.bin,
- "-DPYTHON_EXECUTABLE:FILEPATH=%s/python" % spec['python'].prefix.bin,
- "-DVISIT_SILO_DIR:PATH=%s" % spec['silo'].prefix,
- "-DVISIT_HDF5_DIR:PATH=%s" % spec['hdf5'].prefix])
-
- cmake('../src', *feature_args)
+ qt_bin = spec['qt'].prefix.bin
+ with working_dir('spack-build', create=True):
+ cmake_args = std_cmake_args[:]
+ cmake_args.extend([
+ '-DVTK_MAJOR_VERSION=6',
+ '-DVTK_MINOR_VERSION=1',
+ '-DVISIT_USE_GLEW=OFF',
+ '-DVISIT_LOC_QMAKE_EXE:FILEPATH={0}/qmake-qt4'.format(qt_bin),
+ '-DPYTHON_DIR:PATH={0}'.format(spec['python'].prefix),
+ '-DVISIT_SILO_DIR:PATH={0}'.format(spec['silo'].prefix),
+ '-DVISIT_HDF5_DIR:PATH={0}'.format(spec['hdf5'].prefix),
+ '-DVISIT_VTK_DIR:PATH={0}'.format(spec['vtk'].prefix),
+ ])
+
+ cmake(join_path('..', 'src'), *cmake_args)
make()
- make("install")
+ make('install')
diff --git a/var/spack/repos/builtin/packages/vizglow/package.py b/var/spack/repos/builtin/packages/vizglow/package.py
new file mode 100644
index 0000000000..9e4506b05a
--- /dev/null
+++ b/var/spack/repos/builtin/packages/vizglow/package.py
@@ -0,0 +1,58 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+import glob
+import os
+
+
+class Vizglow(Package):
+ """VizGlow software tool is used for high-fidelity multi-dimensional
+ modeling of non-equilibrium plasma discharges.
+
+ Note: VizGlow is licensed software. You will need to create an account on
+ the EsgeeTech homepage and download VizGlow yourself. Spack will search
+ your current directory for a file of this format. Alternatively, add this
+ file to a mirror so that Spack can find it. For instructions on how to
+ set up a mirror, see http://spack.readthedocs.io/en/latest/mirrors.html"""
+
+ homepage = "http://esgeetech.com/products/vizglow-plasma-modeling/"
+
+ version('2.2a-15', 'be2b5044f30f2b2c3bbe87a0037bf228', expand=False,
+ url="file://{0}/VizGlow_v2.2alpha15-Linux-x86_64-R31October2016-Install".format(os.getcwd()))
+
+ # Licensing
+ license_required = True
+ license_comment = '#'
+ license_files = ['esgeelm.lic']
+ license_vars = ['ESGEE_LICENSE_FILE']
+
+ def install(self, spec, prefix):
+ installer = glob.glob('VizGlow*Install')[0]
+
+ chmod = which('chmod')
+ chmod('+x', installer)
+
+ installer = Executable(installer)
+ installer('--mode', 'silent', '--prefix', prefix)
diff --git a/var/spack/repos/builtin/packages/vtk/gcc.patch b/var/spack/repos/builtin/packages/vtk/gcc.patch
new file mode 100644
index 0000000000..37ba5ac45a
--- /dev/null
+++ b/var/spack/repos/builtin/packages/vtk/gcc.patch
@@ -0,0 +1,21 @@
+--- old/CMake/vtkCompilerExtras.cmake
++++ new/CMake/vtkCompilerExtras.cmake
+@@ -26,6 +26,8 @@
+ execute_process(COMMAND ${CMAKE_C_COMPILER} --version
+ OUTPUT_VARIABLE _gcc_version_info
+ ERROR_VARIABLE _gcc_version_info)
++
++ string (REPLACE ";" "" _gcc_version_info "${_gcc_version_info}")
+
+ string (REGEX MATCH "[345]\\.[0-9]\\.[0-9]"
+ _gcc_version "${_gcc_version_info}")
+--- old/CMake/GenerateExportHeader.cmake
++++ new/CMake/GenerateExportHeader.cmake
+@@ -166,6 +166,7 @@
+ execute_process(COMMAND ${CMAKE_C_COMPILER} --version
+ OUTPUT_VARIABLE _gcc_version_info
+ ERROR_VARIABLE _gcc_version_info)
++ string (REPLACE ";" "" _gcc_version_info "${_gcc_version_info}")
+ string(REGEX MATCH "[345]\\.[0-9]\\.[0-9]"
+ _gcc_version "${_gcc_version_info}")
+ # gcc on mac just reports: "gcc (GCC) 3.3 20030304 ..." without the
diff --git a/var/spack/repos/builtin/packages/vtk/package.py b/var/spack/repos/builtin/packages/vtk/package.py
index d2296dbc26..c2d5ff399f 100644
--- a/var/spack/repos/builtin/packages/vtk/package.py
+++ b/var/spack/repos/builtin/packages/vtk/package.py
@@ -22,61 +22,78 @@
# License along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
+
from spack import *
+
class Vtk(Package):
"""The Visualization Toolkit (VTK) is an open-source, freely
available software system for 3D computer graphics, image
processing and visualization. """
+
homepage = "http://www.vtk.org"
- url = "http://www.vtk.org/files/release/6.1/VTK-6.1.0.tar.gz"
+ base_url = "http://www.vtk.org/files/release"
- version("7.0.0", "5fe35312db5fb2341139b8e4955c367d", url="http://www.vtk.org/files/release/7.0/VTK-7.0.0.tar.gz")
+ version('7.0.0', '5fe35312db5fb2341139b8e4955c367d')
+ version('6.3.0', '0231ca4840408e9dd60af48b314c5b6d')
+ version('6.1.0', '25e4dfb3bad778722dcaec80cd5dab7d')
- version("6.3.0", '0231ca4840408e9dd60af48b314c5b6d', url="http://www.vtk.org/files/release/6.3/VTK-6.3.0.tar.gz")
+ # VTK7 defaults to OpenGL2 rendering backend
+ variant('opengl2', default=True, description='Build with OpenGL2 instead of OpenGL as rendering backend')
+ variant('python', default=False, description='Build the python modules')
- version('6.1.0', '25e4dfb3bad778722dcaec80cd5dab7d')
+ patch('gcc.patch')
- depends_on("qt")
+ depends_on('cmake', type='build')
+ depends_on('qt')
- # VTK7 defaults to OpenGL2 rendering backend
- variant('opengl2', default=True, description='Build with OpenGL instead of OpenGL2 as rendering backend')
+ extends('python', when='+python')
+ depends_on('python', when='+python')
+
+ def url_for_version(self, ver):
+ return '{0}/{1}/VTK-{2}.tar.gz'.format(Vtk.base_url, ver.up_to(2), ver)
def install(self, spec, prefix):
def feature_to_bool(feature, on='ON', off='OFF'):
- if feature in spec:
- return on
- return off
+ return on if '+{0}'.format(feature) in spec else off
with working_dir('spack-build', create=True):
- cmake_args = [
- "..",
- "-DBUILD_SHARED_LIBS=ON",
- # Disable wrappers for other languages.
- "-DVTK_WRAP_PYTHON=OFF",
- "-DVTK_WRAP_JAVA=OFF",
- "-DVTK_WRAP_TCL=OFF"]
- cmake_args.extend(std_cmake_args)
+ opengl_ver = 'OpenGL{0}'.format('2' if '+opengl2' in spec else '')
+ qt_ver = spec['qt'].version.up_to(1)
+ qt_bin = spec['qt'].prefix.bin
- # Enable Qt support here.
+ cmake_args = std_cmake_args[:]
cmake_args.extend([
- "-DQT_QMAKE_EXECUTABLE:PATH=%s/qmake" % spec['qt'].prefix.bin,
- "-DVTK_Group_Qt:BOOL=ON",
- # Ignore webkit because it's hard to build w/Qt
- "-DVTK_Group_Qt=OFF",
- "-DModule_vtkGUISupportQt:BOOL=ON",
- "-DModule_vtkGUISupportQtOpenGL:BOOL=ON"
- ])
+ '-DBUILD_SHARED_LIBS=ON',
+ '-DVTK_RENDERING_BACKEND:STRING={0}'.format(opengl_ver),
- if spec['qt'].satisfies('@5'):
- cmake_args.append("-DVTK_QT_VERSION:STRING=5")
+ # Enable/Disable wrappers for Python.
+ '-DVTK_WRAP_PYTHON={0}'.format(feature_to_bool('python')),
- if spec.satisfies("@6.1.0"):
- cmake_args.append("-DCMAKE_C_FLAGS=-DGLX_GLXEXT_LEGACY")
- cmake_args.append("-DCMAKE_CXX_FLAGS=-DGLX_GLXEXT_LEGACY")
+ # Disable wrappers for other languages.
+ '-DVTK_WRAP_JAVA=OFF',
+ '-DVTK_WRAP_TCL=OFF',
+
+ # Enable Qt support here.
+ '-DVTK_QT_VERSION:STRING={0}'.format(qt_ver),
+ '-DQT_QMAKE_EXECUTABLE:PATH={0}/qmake'.format(qt_bin),
+ '-DVTK_Group_Qt:BOOL=ON',
+ ])
+
+ # NOTE: The following definitions are required in order to allow
+ # VTK to build with qt~webkit versions (see the documentation for
+ # more info: http://www.vtk.org/Wiki/VTK/Tutorials/QtSetup).
+ if '~webkit' in spec['qt']:
+ cmake_args.extend([
+ '-DVTK_Group_Qt:BOOL=OFF',
+ '-DModule_vtkGUISupportQt:BOOL=ON',
+ '-DModule_vtkGUISupportQtOpenGL:BOOL=ON',
+ ])
- cmake_args.append('-DVTK_RENDERING_BACKEND:STRING=%s' % feature_to_bool('+opengl2', 'OpenGL2', 'OpenGL'))
+ if spec.satisfies('@:6.1.0'):
+ cmake_args.append('-DCMAKE_C_FLAGS=-DGLX_GLXEXT_LEGACY')
+ cmake_args.append('-DCMAKE_CXX_FLAGS=-DGLX_GLXEXT_LEGACY')
- cmake(*cmake_args)
+ cmake('..', *cmake_args)
make()
- make("install")
+ make('install')
diff --git a/var/spack/repos/builtin/packages/wannier90/make.sys b/var/spack/repos/builtin/packages/wannier90/make.sys
new file mode 100644
index 0000000000..f96fa23fb2
--- /dev/null
+++ b/var/spack/repos/builtin/packages/wannier90/make.sys
@@ -0,0 +1,7 @@
+F90 = @F90
+COMMS=mpi
+MPIF90=@MPIF90
+FCOPTS=-O2 -fpic
+LDOPTS=-O2 -fpic
+
+LIBS = @LIBS
diff --git a/var/spack/repos/builtin/packages/wannier90/package.py b/var/spack/repos/builtin/packages/wannier90/package.py
new file mode 100644
index 0000000000..119d2cf769
--- /dev/null
+++ b/var/spack/repos/builtin/packages/wannier90/package.py
@@ -0,0 +1,116 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+import inspect
+import os.path
+import shutil
+
+from spack import *
+
+
+class Wannier90(Package):
+ """Wannier90 calculates maximally-localised Wannier functions (MLWFs).
+
+ Wannier90 is released under the GNU General Public License.
+ """
+ homepage = 'http://wannier.org'
+ url = 'http://wannier.org/code/wannier90-2.0.1.tar.gz'
+
+ version('2.0.1', '4edd742506eaba93317249d33261fb22')
+
+ depends_on('mpi')
+ depends_on('lapack')
+ depends_on('blas')
+
+ parallel = False
+
+ def install(self, spec, prefix):
+
+ lapack = self.spec['lapack'].lapack_libs
+ blas = self.spec['blas'].blas_libs
+ substitutions = {
+ '@F90': spack_fc,
+ '@MPIF90': self.spec['mpi'].mpifc,
+ '@LIBS': (lapack + blas).joined()
+ }
+ #######
+ # TODO : this part is replicated in PEXSI
+ # TODO : and may be a common pattern for Editable Makefiles
+ # TODO : see #1186
+ template = join_path(
+ os.path.dirname(inspect.getmodule(self).__file__),
+ 'make.sys'
+ )
+ makefile = join_path(
+ self.stage.source_path,
+ 'make.sys'
+ )
+
+ shutil.copy(template, makefile)
+ for key, value in substitutions.items():
+ filter_file(key, value, makefile)
+ ######
+
+ make('wannier')
+ mkdirp(self.prefix.bin)
+ install(
+ join_path(self.stage.source_path, 'wannier90.x'),
+ join_path(self.prefix.bin, 'wannier90.x')
+ )
+
+ make('post')
+ install(
+ join_path(self.stage.source_path, 'postw90.x'),
+ join_path(self.prefix.bin, 'postw90.x')
+ )
+
+ make('lib')
+ mkdirp(self.prefix.lib)
+ install(
+ join_path(self.stage.source_path, 'libwannier.a'),
+ join_path(self.prefix.lib, 'libwannier.a')
+ )
+
+ make('w90chk2chk')
+ install(
+ join_path(self.stage.source_path, 'w90chk2chk.x'),
+ join_path(self.prefix.bin, 'w90chk2chk.x')
+ )
+
+ make('w90vdw')
+ install(
+ join_path(self.stage.source_path, 'utility', 'w90vdw', 'w90vdw.x'),
+ join_path(self.prefix.bin, 'w90vdw.x')
+ )
+
+ make('w90pov')
+ install(
+ join_path(self.stage.source_path, 'utility', 'w90pov', 'w90pov'),
+ join_path(self.prefix.bin, 'w90pov')
+ )
+
+ install_tree(
+ join_path(self.stage.source_path, 'pseudo'),
+ join_path(self.prefix.bin, 'pseudo')
+ )
diff --git a/var/spack/repos/builtin/packages/wget/package.py b/var/spack/repos/builtin/packages/wget/package.py
index 532cf332e9..aff771b723 100644
--- a/var/spack/repos/builtin/packages/wget/package.py
+++ b/var/spack/repos/builtin/packages/wget/package.py
@@ -24,6 +24,7 @@
##############################################################################
from spack import *
+
class Wget(Package):
"""GNU Wget is a free software package for retrieving files using
HTTP, HTTPS and FTP, the most widely-used Internet protocols. It
@@ -40,9 +41,11 @@ class Wget(Package):
depends_on("openssl")
def install(self, spec, prefix):
- configure("--prefix=%s" % prefix,
- "--with-ssl=openssl",
- "OPENSSL_CFLAGS=-I%s" % spec['openssl'].prefix.include,
- "OPENSSL_LIBS=-L%s -lssl -lcrypto -lz" % spec['openssl'].prefix.lib)
+ configure(
+ "--prefix=%s" % prefix,
+ "--with-ssl=openssl",
+ "OPENSSL_CFLAGS=-I%s" % spec['openssl'].prefix.include,
+ "OPENSSL_LIBS=-L%s -lssl -lcrypto -lz" % spec[
+ 'openssl'].prefix.lib)
make()
make("install")
diff --git a/var/spack/repos/builtin/packages/windowswmproto/package.py b/var/spack/repos/builtin/packages/windowswmproto/package.py
new file mode 100644
index 0000000000..f163d1afb0
--- /dev/null
+++ b/var/spack/repos/builtin/packages/windowswmproto/package.py
@@ -0,0 +1,44 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class Windowswmproto(Package):
+ """This module provides the definition of the WindowsWM extension to the
+ X11 protocol, used for coordination between an X11 server and the
+ Microsoft Windows native window manager.
+
+ WindowsWM is only intended to be used on Cygwin when running a
+ rootless XWin server."""
+
+ homepage = "http://cgit.freedesktop.org/xorg/proto/windowswmproto"
+ url = "https://www.x.org/archive/individual/proto/windowswmproto-1.0.4.tar.gz"
+
+ version('1.0.4', '558db92a8e4e1b07e9c62eca3f04dd8d')
+
+ def install(self, spec, prefix):
+ configure('--prefix={0}'.format(prefix))
+
+ make('install')
diff --git a/var/spack/repos/builtin/packages/wx/package.py b/var/spack/repos/builtin/packages/wx/package.py
index c000824803..42d39df9e6 100644
--- a/var/spack/repos/builtin/packages/wx/package.py
+++ b/var/spack/repos/builtin/packages/wx/package.py
@@ -24,6 +24,7 @@
##############################################################################
from spack import *
+
class Wx(Package):
"""wxWidgets is a C++ library that lets developers create
applications for Windows, Mac OS X, Linux and other platforms
@@ -34,17 +35,24 @@ class Wx(Package):
rather than emulating the GUI. It's also extensive, free,
open-source and mature."""
homepage = "http://www.wxwidgets.org/"
+ url = "https://github.com/wxWidgets/wxWidgets/releases/download/v3.1.0/wxWidgets-3.1.0.tar.bz2"
- version('2.8.12', '2fa39da14bc06ea86fe902579fedc5b1',
- url="https://sourceforge.net/projects/wxwindows/files/2.8.12/wxWidgets-2.8.12.tar.gz")
- version('3.0.1', 'dad1f1cd9d4c370cbc22700dc492da31',
- url="https://sourceforge.net/projects/wxwindows/files/3.0.1/wxWidgets-3.0.1.tar.bz2")
+ version('3.1.0', '2170839cfa9d9322e8ee8368b21a15a2497b4f11')
+ version('3.0.2', '6461eab4428c0a8b9e41781b8787510484dea800')
+ version('3.0.1', '73e58521d6871c9f4d1e7974c6e3a81629fddcf8')
depends_on('gtkplus')
- def install(self, spec, prefix):
- configure("--prefix=%s" % prefix, "--enable-unicode", "--disable-precomp-headers")
+ def make_wx(self):
+ make()
+ @when('@:3.0.2')
+ def make_wx(self):
make(parallel=False)
- make("install")
+ def install(self, spec, prefix):
+ configure("--prefix=%s" % prefix, "--enable-unicode",
+ "--disable-precomp-headers")
+
+ self.make_wx()
+ make("install")
diff --git a/var/spack/repos/builtin/packages/wxpropgrid/package.py b/var/spack/repos/builtin/packages/wxpropgrid/package.py
index 2283e1acf1..cc9ff445d6 100644
--- a/var/spack/repos/builtin/packages/wxpropgrid/package.py
+++ b/var/spack/repos/builtin/packages/wxpropgrid/package.py
@@ -24,6 +24,7 @@
##############################################################################
from spack import *
+
class Wxpropgrid(Package):
"""wxPropertyGrid is a property sheet control for wxWidgets. In
other words, it is a specialized two-column grid for editing
@@ -37,8 +38,8 @@ class Wxpropgrid(Package):
depends_on("wx")
def install(self, spec, prefix):
- configure("--prefix=%s" % prefix, "--with-wxdir=%s" % spec['wx'].prefix.bin, "--enable-unicode")
+ configure("--prefix=%s" % prefix, "--with-wxdir=%s" %
+ spec['wx'].prefix.bin, "--enable-unicode")
make()
make("install")
-
diff --git a/var/spack/repos/builtin/packages/x11perf/package.py b/var/spack/repos/builtin/packages/x11perf/package.py
new file mode 100644
index 0000000000..91db1e8a59
--- /dev/null
+++ b/var/spack/repos/builtin/packages/x11perf/package.py
@@ -0,0 +1,49 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class X11perf(Package):
+ """Simple X server performance benchmarker."""
+
+ homepage = "http://cgit.freedesktop.org/xorg/app/x11perf"
+ url = "https://www.x.org/archive/individual/app/x11perf-1.6.0.tar.gz"
+
+ version('1.6.0', '8dcdb74db8c70dca4b4eab11dc33dd31')
+
+ depends_on('libx11')
+ depends_on('libxmu')
+ depends_on('libxrender')
+ depends_on('libxft')
+
+ depends_on('xproto@7.0.17:', type='build')
+ depends_on('pkg-config@0.9.0:', type='build')
+ depends_on('util-macros', type='build')
+
+ def install(self, spec, prefix):
+ configure('--prefix={0}'.format(prefix))
+
+ make()
+ make('install')
diff --git a/var/spack/repos/builtin/packages/xauth/package.py b/var/spack/repos/builtin/packages/xauth/package.py
new file mode 100644
index 0000000000..6d6a03c899
--- /dev/null
+++ b/var/spack/repos/builtin/packages/xauth/package.py
@@ -0,0 +1,51 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class Xauth(Package):
+ """The xauth program is used to edit and display the authorization
+ information used in connecting to the X server."""
+
+ homepage = "http://cgit.freedesktop.org/xorg/app/xauth"
+ url = "https://www.x.org/archive/individual/app/xauth-1.0.9.tar.gz"
+
+ version('1.0.9', 'def3b4588504ee3d8ec7be607826df02')
+
+ depends_on('libx11')
+ depends_on('libxau')
+ depends_on('libxext')
+ depends_on('libxmu')
+
+ depends_on('xproto@7.0.17:')
+ depends_on('pkg-config@0.9.0:', type='build')
+ depends_on('util-macros', type='build')
+
+ def install(self, spec, prefix):
+ configure('--prefix={0}'.format(prefix))
+
+ make()
+ # make('check') # TODO: add package for cmdtest build dependency
+ make('install')
diff --git a/var/spack/repos/builtin/packages/xbacklight/package.py b/var/spack/repos/builtin/packages/xbacklight/package.py
new file mode 100644
index 0000000000..f1a9ecc124
--- /dev/null
+++ b/var/spack/repos/builtin/packages/xbacklight/package.py
@@ -0,0 +1,49 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class Xbacklight(Package):
+ """Xbacklight is used to adjust the backlight brightness where supported.
+ It uses the RandR extension to find all outputs on the X server
+ supporting backlight brightness control and changes them all in the
+ same way."""
+
+ homepage = "http://cgit.freedesktop.org/xorg/app/xbacklight"
+ url = "https://www.x.org/archive/individual/app/xbacklight-1.2.1.tar.gz"
+
+ version('1.2.1', 'e8e4c86b0f867e23aa3532618a697609')
+
+ depends_on('libxcb')
+ depends_on('xcb-util')
+
+ depends_on('pkg-config@0.9.0:', type='build')
+ depends_on('util-macros', type='build')
+
+ def install(self, spec, prefix):
+ configure('--prefix={0}'.format(prefix))
+
+ make()
+ make('install')
diff --git a/var/spack/repos/builtin/packages/xbiff/package.py b/var/spack/repos/builtin/packages/xbiff/package.py
new file mode 100644
index 0000000000..f5c53c5997
--- /dev/null
+++ b/var/spack/repos/builtin/packages/xbiff/package.py
@@ -0,0 +1,51 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class Xbiff(Package):
+ """xbiff provides graphical notification of new e-mail.
+ It only handles mail stored in a filesystem accessible file,
+ not via IMAP, POP or other remote access protocols."""
+
+ homepage = "http://cgit.freedesktop.org/xorg/app/xbiff"
+ url = "https://www.x.org/archive/individual/app/xbiff-1.0.3.tar.gz"
+
+ version('1.0.3', '779c888cb45da82a612e7f47971df9ab')
+
+ depends_on('libxaw')
+ depends_on('libxmu')
+ depends_on('libxext')
+ depends_on('libx11')
+
+ depends_on('xbitmaps', type='build')
+ depends_on('pkg-config@0.9.0:', type='build')
+ depends_on('util-macros', type='build')
+
+ def install(self, spec, prefix):
+ configure('--prefix={0}'.format(prefix))
+
+ make()
+ make('install')
diff --git a/var/spack/repos/builtin/packages/xbitmaps/package.py b/var/spack/repos/builtin/packages/xbitmaps/package.py
new file mode 100644
index 0000000000..1c6fb79d3a
--- /dev/null
+++ b/var/spack/repos/builtin/packages/xbitmaps/package.py
@@ -0,0 +1,43 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class Xbitmaps(Package):
+ """The xbitmaps package contains bitmap images used by multiple
+ applications built in Xorg."""
+
+ homepage = "https://cgit.freedesktop.org/xorg/data/bitmaps/"
+ url = "https://www.x.org/archive/individual/data/xbitmaps-1.1.1.tar.gz"
+
+ version('1.1.1', '288bbe310db67280a9e2e5ebc5602595')
+
+ depends_on('pkg-config@0.9.0:', type='build')
+ depends_on('util-macros', type='build')
+
+ def install(self, spec, prefix):
+ configure('--prefix={0}'.format(prefix))
+
+ make('install')
diff --git a/var/spack/repos/builtin/packages/xcalc/package.py b/var/spack/repos/builtin/packages/xcalc/package.py
new file mode 100644
index 0000000000..a470d1c9d0
--- /dev/null
+++ b/var/spack/repos/builtin/packages/xcalc/package.py
@@ -0,0 +1,49 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class Xcalc(Package):
+ """xcalc is a scientific calculator X11 client that can emulate a TI-30
+ or an HP-10C."""
+
+ homepage = "http://cgit.freedesktop.org/xorg/app/xcalc"
+ url = "https://www.x.org/archive/individual/app/xcalc-1.0.6.tar.gz"
+
+ version('1.0.6', 'a192ebb5e5f33925c71713501173d8e0')
+
+ depends_on('libxaw')
+ depends_on('libxt')
+ depends_on('libx11')
+
+ depends_on('xproto@7.0.17:', type='build')
+ depends_on('pkg-config@0.9.0:', type='build')
+ depends_on('util-macros', type='build')
+
+ def install(self, spec, prefix):
+ configure('--prefix={0}'.format(prefix))
+
+ make()
+ make('install')
diff --git a/var/spack/repos/builtin/packages/xcb-demo/package.py b/var/spack/repos/builtin/packages/xcb-demo/package.py
new file mode 100644
index 0000000000..62433e3b32
--- /dev/null
+++ b/var/spack/repos/builtin/packages/xcb-demo/package.py
@@ -0,0 +1,51 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class XcbDemo(Package):
+ """xcb-demo: A collection of demo programs that use the XCB library."""
+
+ homepage = "https://xcb.freedesktop.org/"
+ url = "https://xcb.freedesktop.org/dist/xcb-demo-0.1.tar.gz"
+
+ version('0.1', '803c5c91d54e734e6f6fa3f04f2463ff')
+
+ depends_on('libxcb')
+ depends_on('xcb-util')
+ depends_on('xcb-util-image')
+ depends_on('xcb-util-wm')
+
+ depends_on('pkg-config@0.9.0:', type='build')
+
+ def install(self, spec, prefix):
+ configure('--prefix={0}'.format(prefix))
+
+ # FIXME: crashes with the following error message
+ # X11/XCB/xcb.h: No such file or directory
+
+ make()
+ make('check')
+ make('install')
diff --git a/var/spack/repos/builtin/packages/xcb-proto/package.py b/var/spack/repos/builtin/packages/xcb-proto/package.py
index efcbdf0aea..d2ac54d34f 100644
--- a/var/spack/repos/builtin/packages/xcb-proto/package.py
+++ b/var/spack/repos/builtin/packages/xcb-proto/package.py
@@ -24,16 +24,21 @@
##############################################################################
from spack import *
+
class XcbProto(Package):
- """Protocol for libxcb"""
+ """xcb-proto provides the XML-XCB protocol descriptions that libxcb uses to
+ generate the majority of its code and API."""
homepage = "http://xcb.freedesktop.org/"
url = "http://xcb.freedesktop.org/dist/xcb-proto-1.11.tar.gz"
+ version('1.12', '5ee1ec124ea8d56bd9e83b8e9e0b84c4')
version('1.11', 'c8c6cb72c84f58270f4db1f39607f66a')
+ extends('python')
+
def install(self, spec, prefix):
- configure("--prefix=%s" % prefix)
+ configure('--prefix={0}'.format(prefix))
- make()
- make("install")
+ # make('check') # fails xmllint validation
+ make('install')
diff --git a/var/spack/repos/builtin/packages/xcb-util-cursor/package.py b/var/spack/repos/builtin/packages/xcb-util-cursor/package.py
new file mode 100644
index 0000000000..b25fb181a6
--- /dev/null
+++ b/var/spack/repos/builtin/packages/xcb-util-cursor/package.py
@@ -0,0 +1,51 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class XcbUtilCursor(Package):
+ """The XCB util modules provides a number of libraries which sit on top
+ of libxcb, the core X protocol library, and some of the extension
+ libraries. These experimental libraries provide convenience functions
+ and interfaces which make the raw X protocol more usable. Some of the
+ libraries also provide client-side code which is not strictly part of
+ the X protocol but which have traditionally been provided by Xlib."""
+
+ homepage = "https://xcb.freedesktop.org/"
+ url = "https://xcb.freedesktop.org/dist/xcb-util-cursor-0.1.3.tar.gz"
+
+ version('0.1.3', '4b0768fa497127131a47f07e5c8cf745')
+
+ depends_on('libxcb@1.4:')
+ depends_on('xcb-util-renderutil')
+ depends_on('xcb-util-image')
+
+ depends_on('pkg-config@0.9.0:', type='build')
+
+ def install(self, spec, prefix):
+ configure('--prefix={0}'.format(prefix))
+
+ make()
+ make('install')
diff --git a/var/spack/repos/builtin/packages/xcb-util-errors/package.py b/var/spack/repos/builtin/packages/xcb-util-errors/package.py
new file mode 100644
index 0000000000..c287a0ec6e
--- /dev/null
+++ b/var/spack/repos/builtin/packages/xcb-util-errors/package.py
@@ -0,0 +1,51 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class XcbUtilErrors(Package):
+ """The XCB util modules provides a number of libraries which sit on top
+ of libxcb, the core X protocol library, and some of the extension
+ libraries. These experimental libraries provide convenience functions
+ and interfaces which make the raw X protocol more usable. Some of the
+ libraries also provide client-side code which is not strictly part of
+ the X protocol but which have traditionally been provided by Xlib."""
+
+ homepage = "https://xcb.freedesktop.org/"
+ url = "https://xcb.freedesktop.org/dist/xcb-util-errors-1.0.tar.gz"
+
+ version('1.0', 'dc4a6ce073a81a0b7e614a2988f275cc')
+
+ depends_on('libxcb@1.4:')
+
+ depends_on('xcb-proto', type='build')
+ depends_on('pkg-config@0.9.0:', type='build')
+
+ def install(self, spec, prefix):
+ configure('--prefix={0}'.format(prefix))
+
+ make()
+ make('check')
+ make('install')
diff --git a/var/spack/repos/builtin/packages/xcb-util-image/package.py b/var/spack/repos/builtin/packages/xcb-util-image/package.py
new file mode 100644
index 0000000000..4413c7e11d
--- /dev/null
+++ b/var/spack/repos/builtin/packages/xcb-util-image/package.py
@@ -0,0 +1,52 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class XcbUtilImage(Package):
+ """The XCB util modules provides a number of libraries which sit on top
+ of libxcb, the core X protocol library, and some of the extension
+ libraries. These experimental libraries provide convenience functions
+ and interfaces which make the raw X protocol more usable. Some of the
+ libraries also provide client-side code which is not strictly part of
+ the X protocol but which have traditionally been provided by Xlib."""
+
+ homepage = "https://xcb.freedesktop.org/"
+ url = "https://xcb.freedesktop.org/dist/xcb-util-image-0.4.0.tar.gz"
+
+ version('0.4.0', '32c9c2f72ebd58a2b2e210f27fee86f7')
+
+ depends_on('libxcb@1.4:')
+ depends_on('xcb-util')
+
+ depends_on('xproto@7.0.8:', type='build')
+ depends_on('pkg-config@0.9.0:', type='build')
+
+ def install(self, spec, prefix):
+ configure('--prefix={0}'.format(prefix))
+
+ make()
+ make('check')
+ make('install')
diff --git a/var/spack/repos/builtin/packages/xcb-util-keysyms/package.py b/var/spack/repos/builtin/packages/xcb-util-keysyms/package.py
new file mode 100644
index 0000000000..0de6391b18
--- /dev/null
+++ b/var/spack/repos/builtin/packages/xcb-util-keysyms/package.py
@@ -0,0 +1,50 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class XcbUtilKeysyms(Package):
+ """The XCB util modules provides a number of libraries which sit on top
+ of libxcb, the core X protocol library, and some of the extension
+ libraries. These experimental libraries provide convenience functions
+ and interfaces which make the raw X protocol more usable. Some of the
+ libraries also provide client-side code which is not strictly part of
+ the X protocol but which have traditionally been provided by Xlib."""
+
+ homepage = "https://xcb.freedesktop.org/"
+ url = "https://xcb.freedesktop.org/dist/xcb-util-keysyms-0.4.0.tar.gz"
+
+ version('0.4.0', '2decde7b02b4b3bde99a02c17b64d5dc')
+
+ depends_on('libxcb@1.4:')
+
+ depends_on('xproto@7.0.8:', type='build')
+ depends_on('pkg-config@0.9.0:', type='build')
+
+ def install(self, spec, prefix):
+ configure('--prefix={0}'.format(prefix))
+
+ make()
+ make('install')
diff --git a/var/spack/repos/builtin/packages/xcb-util-renderutil/package.py b/var/spack/repos/builtin/packages/xcb-util-renderutil/package.py
new file mode 100644
index 0000000000..d41c88206c
--- /dev/null
+++ b/var/spack/repos/builtin/packages/xcb-util-renderutil/package.py
@@ -0,0 +1,49 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class XcbUtilRenderutil(Package):
+ """The XCB util modules provides a number of libraries which sit on top
+ of libxcb, the core X protocol library, and some of the extension
+ libraries. These experimental libraries provide convenience functions
+ and interfaces which make the raw X protocol more usable. Some of the
+ libraries also provide client-side code which is not strictly part of
+ the X protocol but which have traditionally been provided by Xlib."""
+
+ homepage = "https://xcb.freedesktop.org/"
+ url = "https://xcb.freedesktop.org/dist/xcb-util-renderutil-0.3.9.tar.gz"
+
+ version('0.3.9', 'ac18c1b70ae69845e112f1d987926436')
+
+ depends_on('libxcb@1.4:')
+
+ depends_on('pkg-config@0.9.0:', type='build')
+
+ def install(self, spec, prefix):
+ configure('--prefix={0}'.format(prefix))
+
+ make()
+ make('install')
diff --git a/var/spack/repos/builtin/packages/xcb-util-wm/package.py b/var/spack/repos/builtin/packages/xcb-util-wm/package.py
new file mode 100644
index 0000000000..ef3db06aec
--- /dev/null
+++ b/var/spack/repos/builtin/packages/xcb-util-wm/package.py
@@ -0,0 +1,49 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class XcbUtilWm(Package):
+ """The XCB util modules provides a number of libraries which sit on top
+ of libxcb, the core X protocol library, and some of the extension
+ libraries. These experimental libraries provide convenience functions
+ and interfaces which make the raw X protocol more usable. Some of the
+ libraries also provide client-side code which is not strictly part of
+ the X protocol but which have traditionally been provided by Xlib."""
+
+ homepage = "https://xcb.freedesktop.org/"
+ url = "https://xcb.freedesktop.org/dist/xcb-util-wm-0.4.1.tar.gz"
+
+ version('0.4.1', '0831399918359bf82930124fa9fd6a9b')
+
+ depends_on('libxcb@1.4:')
+
+ depends_on('pkg-config@0.9.0:', type='build')
+
+ def install(self, spec, prefix):
+ configure('--prefix={0}'.format(prefix))
+
+ make()
+ make('install')
diff --git a/var/spack/repos/builtin/packages/xcb-util/package.py b/var/spack/repos/builtin/packages/xcb-util/package.py
new file mode 100644
index 0000000000..820592a319
--- /dev/null
+++ b/var/spack/repos/builtin/packages/xcb-util/package.py
@@ -0,0 +1,49 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class XcbUtil(Package):
+ """The XCB util modules provides a number of libraries which sit on top
+ of libxcb, the core X protocol library, and some of the extension
+ libraries. These experimental libraries provide convenience functions
+ and interfaces which make the raw X protocol more usable. Some of the
+ libraries also provide client-side code which is not strictly part of
+ the X protocol but which have traditionally been provided by Xlib."""
+
+ homepage = "https://xcb.freedesktop.org/"
+ url = "https://xcb.freedesktop.org/dist/xcb-util-0.4.0.tar.gz"
+
+ version('0.4.0', '157d82738aa89934b6adaf3ca508a0f5')
+
+ depends_on('libxcb@1.4:')
+
+ depends_on('pkg-config@0.9.0:', type='build')
+
+ def install(self, spec, prefix):
+ configure('--prefix={0}'.format(prefix))
+
+ make()
+ make('install')
diff --git a/var/spack/repos/builtin/packages/xclipboard/package.py b/var/spack/repos/builtin/packages/xclipboard/package.py
new file mode 100644
index 0000000000..d9af19da71
--- /dev/null
+++ b/var/spack/repos/builtin/packages/xclipboard/package.py
@@ -0,0 +1,53 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class Xclipboard(Package):
+ """xclipboard is used to collect and display text selections that are
+ sent to the CLIPBOARD by other clients. It is typically used to save
+ CLIPBOARD selections for later use. It stores each CLIPBOARD
+ selection as a separate string, each of which can be selected."""
+
+ homepage = "http://cgit.freedesktop.org/xorg/app/xclipboard"
+ url = "https://www.x.org/archive/individual/app/xclipboard-1.1.3.tar.gz"
+
+ version('1.1.3', 'cee91df9df1b5d63034681546fd78c0b')
+
+ depends_on('libxaw')
+ depends_on('libxmu')
+ depends_on('libxt@1.1:')
+ depends_on('libx11')
+ depends_on('libxkbfile')
+
+ depends_on('xproto@7.0.17:', type='build')
+ depends_on('pkg-config@0.9.0:', type='build')
+ depends_on('util-macros', type='build')
+
+ def install(self, spec, prefix):
+ configure('--prefix={0}'.format(prefix))
+
+ make()
+ make('install')
diff --git a/var/spack/repos/builtin/packages/xclock/package.py b/var/spack/repos/builtin/packages/xclock/package.py
new file mode 100644
index 0000000000..5bd38826db
--- /dev/null
+++ b/var/spack/repos/builtin/packages/xclock/package.py
@@ -0,0 +1,54 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class Xclock(Package):
+ """xclock is the classic X Window System clock utility. It displays
+ the time in analog or digital form, continuously updated at a
+ frequency which may be specified by the user."""
+
+ homepage = "http://cgit.freedesktop.org/xorg/app/xclock"
+ url = "https://www.x.org/archive/individual/app/xclock-1.0.7.tar.gz"
+
+ version('1.0.7', 'bbade10e6234d8db276212014e8c77fa')
+
+ depends_on('libxaw')
+ depends_on('libxmu')
+ depends_on('libx11')
+ depends_on('libxrender')
+ depends_on('libxft')
+ depends_on('libxkbfile')
+ depends_on('libxt')
+
+ depends_on('xproto@7.0.17:', type='build')
+ depends_on('pkg-config@0.9.0:', type='build')
+ depends_on('util-macros', type='build')
+
+ def install(self, spec, prefix):
+ configure('--prefix={0}'.format(prefix))
+
+ make()
+ make('install')
diff --git a/var/spack/repos/builtin/packages/xcmiscproto/package.py b/var/spack/repos/builtin/packages/xcmiscproto/package.py
new file mode 100644
index 0000000000..2b15d1b3e7
--- /dev/null
+++ b/var/spack/repos/builtin/packages/xcmiscproto/package.py
@@ -0,0 +1,45 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class Xcmiscproto(Package):
+ """XC-MISC Extension.
+
+ This extension defines a protocol that provides Xlib two ways to query
+ the server for available resource IDs."""
+
+ homepage = "http://cgit.freedesktop.org/xorg/proto/xcmiscproto"
+ url = "https://www.x.org/archive/individual/proto/xcmiscproto-1.2.2.tar.gz"
+
+ version('1.2.2', 'ded6cd23fb2800df93ebf2b3f3b01119')
+
+ depends_on('pkg-config@0.9.0:', type='build')
+ depends_on('util-macros', type='build')
+
+ def install(self, spec, prefix):
+ configure('--prefix={0}'.format(prefix))
+
+ make('install')
diff --git a/var/spack/repos/builtin/packages/xcmsdb/package.py b/var/spack/repos/builtin/packages/xcmsdb/package.py
new file mode 100644
index 0000000000..4d12e3a843
--- /dev/null
+++ b/var/spack/repos/builtin/packages/xcmsdb/package.py
@@ -0,0 +1,48 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class Xcmsdb(Package):
+ """xcmsdb is used to load, query, or remove Device Color Characterization
+ data stored in properties on the root window of the screen as
+ specified in section 7, Device Color Characterization, of the
+ X11 Inter-Client Communication Conventions Manual (ICCCM)."""
+
+ homepage = "http://cgit.freedesktop.org/xorg/app/xcmsdb"
+ url = "https://www.x.org/archive/individual/app/xcmsdb-1.0.5.tar.gz"
+
+ version('1.0.5', 'e7b1699c831b44d7005bff45977ed56a')
+
+ depends_on('libx11')
+
+ depends_on('pkg-config@0.9.0:', type='build')
+ depends_on('util-macros', type='build')
+
+ def install(self, spec, prefix):
+ configure('--prefix={0}'.format(prefix))
+
+ make()
+ make('install')
diff --git a/var/spack/repos/builtin/packages/xcompmgr/package.py b/var/spack/repos/builtin/packages/xcompmgr/package.py
new file mode 100644
index 0000000000..fc5bbb4b9c
--- /dev/null
+++ b/var/spack/repos/builtin/packages/xcompmgr/package.py
@@ -0,0 +1,51 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class Xcompmgr(Package):
+ """xcompmgr is a sample compositing manager for X servers supporting the
+ XFIXES, DAMAGE, RENDER, and COMPOSITE extensions. It enables basic
+ eye-candy effects."""
+
+ homepage = "http://cgit.freedesktop.org/xorg/app/xcompmgr"
+ url = "https://www.x.org/archive/individual/app/xcompmgr-1.1.7.tar.gz"
+
+ version('1.1.7', '4992895c8934bbc99bb2447dfe5081f2')
+
+ depends_on('libxcomposite')
+ depends_on('libxfixes')
+ depends_on('libxdamage')
+ depends_on('libxrender')
+ depends_on('libxext')
+
+ depends_on('pkg-config@0.9.0:', type='build')
+ depends_on('util-macros', type='build')
+
+ def install(self, spec, prefix):
+ configure('--prefix={0}'.format(prefix))
+
+ make()
+ make('install')
diff --git a/var/spack/repos/builtin/packages/xconsole/package.py b/var/spack/repos/builtin/packages/xconsole/package.py
new file mode 100644
index 0000000000..f86fe753b6
--- /dev/null
+++ b/var/spack/repos/builtin/packages/xconsole/package.py
@@ -0,0 +1,50 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class Xconsole(Package):
+ """xconsole displays in a X11 window the messages which are usually sent
+ to /dev/console."""
+
+ homepage = "http://cgit.freedesktop.org/xorg/app/xconsole"
+ url = "https://www.x.org/archive/individual/app/xconsole-1.0.6.tar.gz"
+
+ version('1.0.6', '46cb988e31a0cf9a02c2bbc4a82bd572')
+
+ depends_on('libxaw')
+ depends_on('libxmu')
+ depends_on('libxt@1.0:')
+ depends_on('libx11')
+
+ depends_on('xproto@7.0.17:')
+ depends_on('pkg-config@0.9.0:', type='build')
+ depends_on('util-macros', type='build')
+
+ def install(self, spec, prefix):
+ configure('--prefix={0}'.format(prefix))
+
+ make()
+ make('install')
diff --git a/var/spack/repos/builtin/packages/xcursor-themes/package.py b/var/spack/repos/builtin/packages/xcursor-themes/package.py
new file mode 100644
index 0000000000..7c38c9999c
--- /dev/null
+++ b/var/spack/repos/builtin/packages/xcursor-themes/package.py
@@ -0,0 +1,53 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class XcursorThemes(Package):
+ """This is a default set of cursor themes for use with libXcursor,
+ originally created for the XFree86 Project, and now shipped as part
+ of the X.Org software distribution."""
+
+ homepage = "http://cgit.freedesktop.org/xorg/data/cursors"
+ url = "https://www.x.org/archive/individual/data/xcursor-themes-1.0.4.tar.gz"
+
+ version('1.0.4', 'c82628f35e9950ba225050ad5803b92a')
+
+ depends_on('libxcursor')
+
+ depends_on('xcursorgen', type='build')
+ depends_on('pkg-config@0.9.0:', type='build')
+ depends_on('util-macros', type='build')
+
+ def install(self, spec, prefix):
+ configure('--prefix={0}'.format(prefix))
+
+ make()
+ make('install')
+
+ # `make install` copies the files to the libxcursor installation.
+ # Create a fake directory to convince Spack that we actually
+ # installed something.
+ mkdir(prefix.lib)
diff --git a/var/spack/repos/builtin/packages/xcursorgen/package.py b/var/spack/repos/builtin/packages/xcursorgen/package.py
new file mode 100644
index 0000000000..4e43844646
--- /dev/null
+++ b/var/spack/repos/builtin/packages/xcursorgen/package.py
@@ -0,0 +1,47 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class Xcursorgen(Package):
+ """xcursorgen prepares X11 cursor sets for use with libXcursor."""
+
+ homepage = "http://cgit.freedesktop.org/xorg/app/xcursorgen"
+ url = "https://www.x.org/archive/individual/app/xcursorgen-1.0.6.tar.gz"
+
+ version('1.0.6', '669df84fc30d89c12ce64b95aba26677')
+
+ depends_on('libx11')
+ depends_on('libxcursor')
+ depends_on('libpng@1.2.0:')
+
+ depends_on('pkg-config@0.9.0:', type='build')
+ depends_on('util-macros', type='build')
+
+ def install(self, spec, prefix):
+ configure('--prefix={0}'.format(prefix))
+
+ make()
+ make('install')
diff --git a/var/spack/repos/builtin/packages/xdbedizzy/package.py b/var/spack/repos/builtin/packages/xdbedizzy/package.py
new file mode 100644
index 0000000000..7a8f97401f
--- /dev/null
+++ b/var/spack/repos/builtin/packages/xdbedizzy/package.py
@@ -0,0 +1,47 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class Xdbedizzy(Package):
+ """xdbedizzy is a demo of the X11 Double Buffer Extension (DBE)
+ creating a double buffered spinning scene."""
+
+ homepage = "http://cgit.freedesktop.org/xorg/app/xdbedizzy"
+ url = "https://www.x.org/archive/individual/app/xdbedizzy-1.1.0.tar.gz"
+
+ version('1.1.0', '969be2f6bc62455431ab027f99720dc3')
+
+ depends_on('libx11')
+ depends_on('libxext')
+
+ depends_on('pkg-config@0.9.0:', type='build')
+ depends_on('util-macros', type='build')
+
+ def install(self, spec, prefix):
+ configure('--prefix={0}'.format(prefix))
+
+ make()
+ make('install')
diff --git a/var/spack/repos/builtin/packages/xditview/package.py b/var/spack/repos/builtin/packages/xditview/package.py
new file mode 100644
index 0000000000..3fececd12e
--- /dev/null
+++ b/var/spack/repos/builtin/packages/xditview/package.py
@@ -0,0 +1,48 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class Xditview(Package):
+ """xditview displays ditroff output on an X display."""
+
+ homepage = "http://cgit.freedesktop.org/xorg/app/xditview"
+ url = "https://www.x.org/archive/individual/app/xditview-1.0.4.tar.gz"
+
+ version('1.0.4', '46dffbbc4de3039fdecabb73d10d6a4f')
+
+ depends_on('libxaw')
+ depends_on('libxmu')
+ depends_on('libxt')
+ depends_on('libx11')
+
+ depends_on('pkg-config@0.9.0:', type='build')
+ depends_on('util-macros', type='build')
+
+ def install(self, spec, prefix):
+ configure('--prefix={0}'.format(prefix))
+
+ make()
+ make('install')
diff --git a/var/spack/repos/builtin/packages/xdm/package.py b/var/spack/repos/builtin/packages/xdm/package.py
new file mode 100644
index 0000000000..d42ced9a57
--- /dev/null
+++ b/var/spack/repos/builtin/packages/xdm/package.py
@@ -0,0 +1,54 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class Xdm(Package):
+ """X Display Manager / XDMCP server."""
+
+ homepage = "http://cgit.freedesktop.org/xorg/app/xdm"
+ url = "https://www.x.org/archive/individual/app/xdm-1.1.11.tar.gz"
+
+ version('1.1.11', 'aaf8c3d05d4a1e689d2d789c99a6023c')
+
+ depends_on('libxmu')
+ depends_on('libx11')
+ depends_on('libxau')
+ depends_on('libxinerama')
+ depends_on('libxft')
+ depends_on('libxpm')
+ depends_on('libxaw')
+ depends_on('libxdmcp')
+ depends_on('libxt')
+ depends_on('libxext')
+
+ depends_on('pkg-config@0.9.0:', type='build')
+ depends_on('util-macros', type='build')
+
+ def install(self, spec, prefix):
+ configure('--prefix={0}'.format(prefix))
+
+ make()
+ make('install')
diff --git a/var/spack/repos/builtin/packages/xdpyinfo/package.py b/var/spack/repos/builtin/packages/xdpyinfo/package.py
new file mode 100644
index 0000000000..c69af3b357
--- /dev/null
+++ b/var/spack/repos/builtin/packages/xdpyinfo/package.py
@@ -0,0 +1,54 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class Xdpyinfo(Package):
+ """xdpyinfo is a utility for displaying information about an X server.
+
+ It is used to examine the capabilities of a server, the predefined
+ values for various parameters used in communicating between clients
+ and the server, and the different types of screens, visuals, and X11
+ protocol extensions that are available."""
+
+ homepage = "http://cgit.freedesktop.org/xorg/app/xdpyinfo"
+ url = "https://www.x.org/archive/individual/app/xdpyinfo-1.3.2.tar.gz"
+
+ version('1.3.2', 'dab410719d36c9df690cf3a8cd7d117e')
+
+ depends_on('libxext')
+ depends_on('libx11')
+ depends_on('libxtst')
+ depends_on('libxcb')
+
+ depends_on('xproto@7.0.22:', type='build')
+ depends_on('pkg-config@0.9.0:', type='build')
+ depends_on('util-macros', type='build')
+
+ def install(self, spec, prefix):
+ configure('--prefix={0}'.format(prefix))
+
+ make()
+ make('install')
diff --git a/var/spack/repos/builtin/packages/xdriinfo/package.py b/var/spack/repos/builtin/packages/xdriinfo/package.py
new file mode 100644
index 0000000000..7548175f6f
--- /dev/null
+++ b/var/spack/repos/builtin/packages/xdriinfo/package.py
@@ -0,0 +1,52 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class Xdriinfo(Package):
+ """xdriinfo - query configuration information of X11 DRI drivers."""
+
+ homepage = "http://cgit.freedesktop.org/xorg/app/xdriinfo"
+ url = "https://www.x.org/archive/individual/app/xdriinfo-1.0.5.tar.gz"
+
+ version('1.0.5', '34a4a9ae69c60f4c2566bf9ea4bcf311')
+
+ depends_on('libx11')
+ depends_on('expat')
+ depends_on('libxshmfence')
+ depends_on('libxext')
+ depends_on('libxdamage')
+ depends_on('libxfixes')
+ depends_on('pcre')
+
+ depends_on('glproto', type='build')
+ depends_on('pkg-config@0.9.0:', type='build')
+ depends_on('util-macros', type='build')
+
+ def install(self, spec, prefix):
+ configure('--prefix={0}'.format(prefix))
+
+ make()
+ make('install')
diff --git a/var/spack/repos/builtin/packages/xedit/package.py b/var/spack/repos/builtin/packages/xedit/package.py
new file mode 100644
index 0000000000..da5f28809f
--- /dev/null
+++ b/var/spack/repos/builtin/packages/xedit/package.py
@@ -0,0 +1,48 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class Xedit(Package):
+ """Xedit is a simple text editor for X."""
+
+ homepage = "https://cgit.freedesktop.org/xorg/app/xedit"
+ url = "https://www.x.org/archive/individual/app/xedit-1.2.2.tar.gz"
+
+ version('1.2.2', '9fb9d6f63b574e5a4937384fbe6579c1')
+
+ depends_on('libxaw')
+ depends_on('libxmu')
+ depends_on('libxt@1.0:')
+ depends_on('libx11')
+
+ depends_on('pkg-config@0.9.0:', type='build')
+ depends_on('util-macros', type='build')
+
+ def install(self, spec, prefix):
+ configure('--prefix={0}'.format(prefix))
+
+ make()
+ make('install')
diff --git a/var/spack/repos/builtin/packages/xerces-c/package.py b/var/spack/repos/builtin/packages/xerces-c/package.py
index 2efccc3c08..9f3ad8a4dc 100644
--- a/var/spack/repos/builtin/packages/xerces-c/package.py
+++ b/var/spack/repos/builtin/packages/xerces-c/package.py
@@ -24,16 +24,18 @@
##############################################################################
from spack import *
+
class XercesC(Package):
- """ Xerces-C++ is a validating XML parser written in a portable subset of C++.
- Xerces-C++ makes it easy to give your application the ability to read and
- write XML data. A shared library is provided for parsing, generating,
- manipulating, and validating XML documents using the DOM, SAX, and SAX2 APIs.
+ """Xerces-C++ is a validating XML parser written in a portable subset of
+ C++. Xerces-C++ makes it easy to give your application the ability to read
+ and write XML data. A shared library is provided for parsing, generating,
+ manipulating, and validating XML documents using the DOM, SAX, and SAX2
+ APIs.
"""
homepage = "https://xerces.apache.org/xerces-c"
- url = "https://www.apache.org/dist/xerces/c/3/sources/xerces-c-3.1.3.tar.bz2"
- version('3.1.3', '5e333b55cb43e6b025ddf0e5d0f0fb0d')
+ url = "https://www.apache.org/dist/xerces/c/3/sources/xerces-c-3.1.4.tar.bz2"
+ version('3.1.4', 'd04ae9d8b2dee2157c6db95fa908abfd')
def install(self, spec, prefix):
configure("--prefix=%s" % prefix,
@@ -41,4 +43,3 @@ class XercesC(Package):
make("clean")
make()
make("install")
-
diff --git a/var/spack/repos/builtin/packages/xev/package.py b/var/spack/repos/builtin/packages/xev/package.py
new file mode 100644
index 0000000000..5727d4e428
--- /dev/null
+++ b/var/spack/repos/builtin/packages/xev/package.py
@@ -0,0 +1,53 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class Xev(Package):
+ """xev creates a window and then asks the X server to send it X11 events
+ whenever anything happens to the window (such as it being moved,
+ resized, typed in, clicked in, etc.). You can also attach it to an
+ existing window. It is useful for seeing what causes events to occur
+ and to display the information that they contain; it is essentially a
+ debugging and development tool, and should not be needed in normal
+ usage."""
+
+ homepage = "http://cgit.freedesktop.org/xorg/app/xev"
+ url = "https://www.x.org/archive/individual/app/xev-1.2.2.tar.gz"
+
+ version('1.2.2', 'fdb374f77cdad8e104b989a0148c4c1f')
+
+ depends_on('libxrandr@1.2:')
+ depends_on('libx11')
+
+ depends_on('xproto@7.0.17:', type='build')
+ depends_on('pkg-config@0.9.0:', type='build')
+ depends_on('util-macros', type='build')
+
+ def install(self, spec, prefix):
+ configure('--prefix={0}'.format(prefix))
+
+ make()
+ make('install')
diff --git a/var/spack/repos/builtin/packages/xextproto/package.py b/var/spack/repos/builtin/packages/xextproto/package.py
new file mode 100644
index 0000000000..9c1c123527
--- /dev/null
+++ b/var/spack/repos/builtin/packages/xextproto/package.py
@@ -0,0 +1,44 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class Xextproto(Package):
+ """X Protocol Extensions."""
+
+ homepage = "http://cgit.freedesktop.org/xorg/proto/xextproto"
+ url = "https://www.x.org/archive/individual/proto/xextproto-7.3.0.tar.gz"
+
+ version('7.3.0', '37b700baa8c8ea7964702d948dd13821')
+
+ depends_on('pkg-config@0.9.0:', type='build')
+ depends_on('util-macros', type='build')
+
+ parallel = False
+
+ def install(self, spec, prefix):
+ configure('--prefix={0}'.format(prefix))
+
+ make('install')
diff --git a/var/spack/repos/builtin/packages/xeyes/package.py b/var/spack/repos/builtin/packages/xeyes/package.py
new file mode 100644
index 0000000000..cfea92fda3
--- /dev/null
+++ b/var/spack/repos/builtin/packages/xeyes/package.py
@@ -0,0 +1,49 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class Xeyes(Package):
+ """xeyes - a follow the mouse X demo, using the X SHAPE extension"""
+
+ homepage = "http://cgit.freedesktop.org/xorg/app/xeyes"
+ url = "https://www.x.org/archive/individual/app/xeyes-1.1.1.tar.gz"
+
+ version('1.1.1', '2c0522bce5c61bbe784d2b3491998d31')
+
+ depends_on('libx11')
+ depends_on('libxt')
+ depends_on('libxext')
+ depends_on('libxmu')
+ depends_on('libxrender@0.4:')
+
+ depends_on('pkg-config@0.9.0:', type='build')
+ depends_on('util-macros', type='build')
+
+ def install(self, spec, prefix):
+ configure('--prefix={0}'.format(prefix))
+
+ make()
+ make('install')
diff --git a/var/spack/repos/builtin/packages/xf86bigfontproto/package.py b/var/spack/repos/builtin/packages/xf86bigfontproto/package.py
new file mode 100644
index 0000000000..6c1dc8f37d
--- /dev/null
+++ b/var/spack/repos/builtin/packages/xf86bigfontproto/package.py
@@ -0,0 +1,39 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class Xf86bigfontproto(Package):
+ """X.org XF86BigFontProto protocol headers."""
+
+ homepage = "https://cgit.freedesktop.org/xorg/proto/xf86bigfontproto"
+ url = "https://www.x.org/archive/individual/proto/xf86bigfontproto-1.2.0.tar.gz"
+
+ version('1.2.0', '91b0733ff4cbe55808d96073258aa3d1')
+
+ def install(self, spec, prefix):
+ configure('--prefix={0}'.format(prefix))
+
+ make('install')
diff --git a/var/spack/repos/builtin/packages/xf86dga/package.py b/var/spack/repos/builtin/packages/xf86dga/package.py
new file mode 100644
index 0000000000..8add6fbca7
--- /dev/null
+++ b/var/spack/repos/builtin/packages/xf86dga/package.py
@@ -0,0 +1,46 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class Xf86dga(Package):
+ """dga is a simple test client for the XFree86-DGA extension."""
+
+ homepage = "http://cgit.freedesktop.org/xorg/app/xf86dga"
+ url = "https://www.x.org/archive/individual/app/xf86dga-1.0.3.tar.gz"
+
+ version('1.0.3', '3b87bb916c9df68cf5e4e969307b25b5')
+
+ depends_on('libx11')
+ depends_on('libxxf86dga@1.1:')
+
+ depends_on('pkg-config@0.9.0:', type='build')
+ depends_on('util-macros', type='build')
+
+ def install(self, spec, prefix):
+ configure('--prefix={0}'.format(prefix))
+
+ make()
+ make('install')
diff --git a/var/spack/repos/builtin/packages/xf86dgaproto/package.py b/var/spack/repos/builtin/packages/xf86dgaproto/package.py
new file mode 100644
index 0000000000..05b64c9534
--- /dev/null
+++ b/var/spack/repos/builtin/packages/xf86dgaproto/package.py
@@ -0,0 +1,39 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class Xf86dgaproto(Package):
+ """X.org XF86DGAProto protocol headers."""
+
+ homepage = "https://cgit.freedesktop.org/xorg/proto/xf86dgaproto"
+ url = "https://www.x.org/archive/individual/proto/xf86dgaproto-2.1.tar.gz"
+
+ version('2.1', '1fe79dc07857ad3e1fb8b8f2bdd70d1b')
+
+ def install(self, spec, prefix):
+ configure('--prefix={0}'.format(prefix))
+
+ make('install')
diff --git a/var/spack/repos/builtin/packages/xf86driproto/package.py b/var/spack/repos/builtin/packages/xf86driproto/package.py
new file mode 100644
index 0000000000..655d1bc60b
--- /dev/null
+++ b/var/spack/repos/builtin/packages/xf86driproto/package.py
@@ -0,0 +1,46 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class Xf86driproto(Package):
+ """XFree86 Direct Rendering Infrastructure Extension.
+
+ This extension defines a protocol to allow user applications to access
+ the video hardware without requiring data to be passed through the X
+ server."""
+
+ homepage = "http://cgit.freedesktop.org/xorg/proto/xf86driproto"
+ url = "https://www.x.org/archive/individual/proto/xf86driproto-2.1.1.tar.gz"
+
+ version('2.1.1', '3ba16a48d8d9f9f746f9bd281ba8fb3f')
+
+ depends_on('pkg-config@0.9.0:', type='build')
+ depends_on('util-macros', type='build')
+
+ def install(self, spec, prefix):
+ configure('--prefix={0}'.format(prefix))
+
+ make('install')
diff --git a/var/spack/repos/builtin/packages/xf86miscproto/package.py b/var/spack/repos/builtin/packages/xf86miscproto/package.py
new file mode 100644
index 0000000000..4b7e279077
--- /dev/null
+++ b/var/spack/repos/builtin/packages/xf86miscproto/package.py
@@ -0,0 +1,42 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class Xf86miscproto(Package):
+ """This package includes the protocol definitions of the "XFree86-Misc"
+ extension to the X11 protocol. The "XFree86-Misc" extension is
+ supported by the XFree86 X server and versions of the Xorg X server
+ prior to Xorg 1.6."""
+
+ homepage = "http://cgit.freedesktop.org/xorg/proto/xf86miscproto"
+ url = "https://www.x.org/archive/individual/proto/xf86miscproto-0.9.3.tar.gz"
+
+ version('0.9.3', 'c6432f04f84929c94fa05b3a466c489d')
+
+ def install(self, spec, prefix):
+ configure('--prefix={0}'.format(prefix))
+
+ make('install')
diff --git a/var/spack/repos/builtin/packages/xf86rushproto/package.py b/var/spack/repos/builtin/packages/xf86rushproto/package.py
new file mode 100644
index 0000000000..bdd192970b
--- /dev/null
+++ b/var/spack/repos/builtin/packages/xf86rushproto/package.py
@@ -0,0 +1,39 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class Xf86rushproto(Package):
+ """X.org XF86RushProto protocol headers."""
+
+ homepage = "https://cgit.freedesktop.org/xorg/proto/xf86rushproto"
+ url = "https://www.x.org/archive/individual/proto/xf86rushproto-1.1.2.tar.gz"
+
+ version('1.1.2', '6a6389473332ace01146cccfef228576')
+
+ def install(self, spec, prefix):
+ configure('--prefix={0}'.format(prefix))
+
+ make('install')
diff --git a/var/spack/repos/builtin/packages/xf86vidmodeproto/package.py b/var/spack/repos/builtin/packages/xf86vidmodeproto/package.py
new file mode 100644
index 0000000000..ece389f9e6
--- /dev/null
+++ b/var/spack/repos/builtin/packages/xf86vidmodeproto/package.py
@@ -0,0 +1,45 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class Xf86vidmodeproto(Package):
+ """XFree86 Video Mode Extension.
+
+ This extension defines a protocol for dynamically configuring modelines
+ and gamma."""
+
+ homepage = "http://cgit.freedesktop.org/xorg/proto/xf86vidmodeproto"
+ url = "https://www.x.org/archive/individual/proto/xf86vidmodeproto-2.3.1.tar.gz"
+
+ version('2.3.1', '99016d0fe355bae0bb23ce00fb4d4a2c')
+
+ depends_on('pkg-config@0.9.0:', type='build')
+ depends_on('util-macros', type='build')
+
+ def install(self, spec, prefix):
+ configure('--prefix={0}'.format(prefix))
+
+ make('install')
diff --git a/var/spack/repos/builtin/packages/xfd/package.py b/var/spack/repos/builtin/packages/xfd/package.py
new file mode 100644
index 0000000000..669cd83bf5
--- /dev/null
+++ b/var/spack/repos/builtin/packages/xfd/package.py
@@ -0,0 +1,52 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class Xfd(Package):
+ """xfd - display all the characters in a font using either the
+ X11 core protocol or libXft2."""
+
+ homepage = "http://cgit.freedesktop.org/xorg/app/xfd"
+ url = "https://www.x.org/archive/individual/app/xfd-1.1.2.tar.gz"
+
+ version('1.1.2', '12fe8f7c3e71352bf22124ad56d4ceaf')
+
+ depends_on('libxaw')
+ depends_on('fontconfig')
+ depends_on('libxft')
+ depends_on('libxrender')
+ depends_on('libxmu')
+ depends_on('libxt')
+
+ depends_on('xproto@7.0.17:', type='build')
+ depends_on('pkg-config@0.9.0:', type='build')
+ depends_on('util-macros', type='build')
+
+ def install(self, spec, prefix):
+ configure('--prefix={0}'.format(prefix))
+
+ make()
+ make('install')
diff --git a/var/spack/repos/builtin/packages/xfindproxy/package.py b/var/spack/repos/builtin/packages/xfindproxy/package.py
new file mode 100644
index 0000000000..e4b83433c4
--- /dev/null
+++ b/var/spack/repos/builtin/packages/xfindproxy/package.py
@@ -0,0 +1,53 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class Xfindproxy(Package):
+ """xfindproxy is used to locate available X11 proxy services.
+
+ It utilizes the Proxy Management Protocol to communicate with a proxy
+ manager. The proxy manager keeps track of all available proxy
+ services, starts new proxies when necessary, and makes sure that
+ proxies are shared whenever possible."""
+
+ homepage = "http://cgit.freedesktop.org/xorg/app/xfindproxy"
+ url = "https://www.x.org/archive/individual/app/xfindproxy-1.0.4.tar.gz"
+
+ version('1.0.4', 'd0a7b53ae5827b342bccd3ebc7ec672f')
+
+ depends_on('libice')
+ depends_on('libxt')
+
+ depends_on('xproto', type='build')
+ depends_on('xproxymanagementprotocol', type='build')
+ depends_on('pkg-config@0.9.0:', type='build')
+ depends_on('util-macros', type='build')
+
+ def install(self, spec, prefix):
+ configure('--prefix={0}'.format(prefix))
+
+ make()
+ make('install')
diff --git a/var/spack/repos/builtin/packages/xfontsel/package.py b/var/spack/repos/builtin/packages/xfontsel/package.py
new file mode 100644
index 0000000000..772ff8d570
--- /dev/null
+++ b/var/spack/repos/builtin/packages/xfontsel/package.py
@@ -0,0 +1,50 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class Xfontsel(Package):
+ """xfontsel application provides a simple way to display the X11 core
+ protocol fonts known to your X server, examine samples of each, and
+ retrieve the X Logical Font Description ("XLFD") full name for a font."""
+
+ homepage = "http://cgit.freedesktop.org/xorg/app/xfontsel"
+ url = "https://www.x.org/archive/individual/app/xfontsel-1.0.5.tar.gz"
+
+ version('1.0.5', '72a35e7fa786eb2b0194d75eeb4a02e3')
+
+ depends_on('libxaw')
+ depends_on('libxmu')
+ depends_on('libxt')
+ depends_on('libx11')
+
+ depends_on('pkg-config@0.9.0:', type='build')
+ depends_on('util-macros', type='build')
+
+ def install(self, spec, prefix):
+ configure('--prefix={0}'.format(prefix))
+
+ make()
+ make('install')
diff --git a/var/spack/repos/builtin/packages/xfs/package.py b/var/spack/repos/builtin/packages/xfs/package.py
new file mode 100644
index 0000000000..72429dee90
--- /dev/null
+++ b/var/spack/repos/builtin/packages/xfs/package.py
@@ -0,0 +1,48 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class Xfs(Package):
+ """X Font Server."""
+
+ homepage = "http://cgit.freedesktop.org/xorg/app/xfs"
+ url = "https://www.x.org/archive/individual/app/xfs-1.1.4.tar.gz"
+
+ version('1.1.4', '0818a2e0317e0f0a1e8a15ca811827e2')
+
+ depends_on('libxfont@1.4.5:')
+ depends_on('font-util')
+
+ depends_on('xproto@7.0.17:', type='build')
+ depends_on('xtrans', type='build')
+ depends_on('pkg-config@0.9.0:', type='build')
+ depends_on('util-macros', type='build')
+
+ def install(self, spec, prefix):
+ configure('--prefix={0}'.format(prefix))
+
+ make()
+ make('install')
diff --git a/var/spack/repos/builtin/packages/xfsinfo/package.py b/var/spack/repos/builtin/packages/xfsinfo/package.py
new file mode 100644
index 0000000000..b31ad1c1e0
--- /dev/null
+++ b/var/spack/repos/builtin/packages/xfsinfo/package.py
@@ -0,0 +1,50 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class Xfsinfo(Package):
+ """xfsinfo is a utility for displaying information about an X font
+ server. It is used to examine the capabilities of a server, the
+ predefined values for various parameters used in communicating between
+ clients and the server, and the font catalogues and alternate servers
+ that are available."""
+
+ homepage = "http://cgit.freedesktop.org/xorg/app/xfsinfo"
+ url = "https://www.x.org/archive/individual/app/xfsinfo-1.0.5.tar.gz"
+
+ version('1.0.5', '36b64a3f37b87c759c5d17634e129fb9')
+
+ depends_on('libfs')
+
+ depends_on('xproto@7.0.17:', type='build')
+ depends_on('pkg-config@0.9.0:', type='build')
+ depends_on('util-macros', type='build')
+
+ def install(self, spec, prefix):
+ configure('--prefix={0}'.format(prefix))
+
+ make()
+ make('install')
diff --git a/var/spack/repos/builtin/packages/xfwp/package.py b/var/spack/repos/builtin/packages/xfwp/package.py
new file mode 100644
index 0000000000..5f073a8806
--- /dev/null
+++ b/var/spack/repos/builtin/packages/xfwp/package.py
@@ -0,0 +1,50 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class Xfwp(Package):
+ """xfwp proxies X11 protocol connections, such as through a firewall."""
+
+ homepage = "http://cgit.freedesktop.org/xorg/app/xfwp"
+ url = "https://www.x.org/archive/individual/app/xfwp-1.0.3.tar.gz"
+
+ version('1.0.3', 'e23cc01894ae57e5959ca6a56d0f4f94')
+
+ depends_on('libice')
+
+ depends_on('xproto', type='build')
+ depends_on('xproxymanagementprotocol', type='build')
+ depends_on('pkg-config@0.9.0:', type='build')
+ depends_on('util-macros', type='build')
+
+ def install(self, spec, prefix):
+ configure('--prefix={0}'.format(prefix))
+
+ # FIXME: fails with the error message:
+ # io.c:1039:7: error: implicit declaration of function 'swab'
+
+ make()
+ make('install')
diff --git a/var/spack/repos/builtin/packages/xgamma/package.py b/var/spack/repos/builtin/packages/xgamma/package.py
new file mode 100644
index 0000000000..b8abec293a
--- /dev/null
+++ b/var/spack/repos/builtin/packages/xgamma/package.py
@@ -0,0 +1,48 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class Xgamma(Package):
+ """xgamma allows X users to query and alter the gamma correction of a
+ monitor via the X video mode extension (XFree86-VidModeExtension)."""
+
+ homepage = "http://cgit.freedesktop.org/xorg/app/xgamma"
+ url = "https://www.x.org/archive/individual/app/xgamma-1.0.6.tar.gz"
+
+ version('1.0.6', 'ac4f91bf1d9aa0433152ba6196288cc6')
+
+ depends_on('libx11')
+ depends_on('libxxf86vm')
+
+ depends_on('xproto@7.0.17:', type='build')
+ depends_on('pkg-config@0.9.0:', type='build')
+ depends_on('util-macros', type='build')
+
+ def install(self, spec, prefix):
+ configure('--prefix={0}'.format(prefix))
+
+ make()
+ make('install')
diff --git a/var/spack/repos/builtin/packages/xgc/package.py b/var/spack/repos/builtin/packages/xgc/package.py
new file mode 100644
index 0000000000..608e6e0360
--- /dev/null
+++ b/var/spack/repos/builtin/packages/xgc/package.py
@@ -0,0 +1,49 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class Xgc(Package):
+ """xgc is an X11 graphics demo that shows various features of the X11
+ core protocol graphics primitives."""
+
+ homepage = "http://cgit.freedesktop.org/xorg/app/xgc"
+ url = "https://www.x.org/archive/individual/app/xgc-1.0.5.tar.gz"
+
+ version('1.0.5', '605557a9c138f6dc848c87a21bc7c7fc')
+
+ depends_on('libxaw')
+ depends_on('libxt')
+
+ depends_on('flex', type='build')
+ depends_on('bison', type='build')
+ depends_on('pkg-config@0.9.0:', type='build')
+ depends_on('util-macros', type='build')
+
+ def install(self, spec, prefix):
+ configure('--prefix={0}'.format(prefix))
+
+ make()
+ make('install')
diff --git a/var/spack/repos/builtin/packages/xhost/package.py b/var/spack/repos/builtin/packages/xhost/package.py
new file mode 100644
index 0000000000..f01c481ee7
--- /dev/null
+++ b/var/spack/repos/builtin/packages/xhost/package.py
@@ -0,0 +1,49 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class Xhost(Package):
+ """xhost is used to manage the list of host names or user names
+ allowed to make connections to the X server."""
+
+ homepage = "http://cgit.freedesktop.org/xorg/app/xhost"
+ url = "https://www.x.org/archive/individual/app/xhost-1.0.7.tar.gz"
+
+ version('1.0.7', 'de34b4ba5194634dbeb29a1f008f495a')
+
+ depends_on('libx11')
+ depends_on('libxmu')
+ depends_on('libxau')
+
+ depends_on('xproto@7.0.22:', type='build')
+ depends_on('pkg-config@0.9.0:', type='build')
+ depends_on('util-macros', type='build')
+
+ def install(self, spec, prefix):
+ configure('--prefix={0}'.format(prefix))
+
+ make()
+ make('install')
diff --git a/var/spack/repos/builtin/packages/xineramaproto/package.py b/var/spack/repos/builtin/packages/xineramaproto/package.py
new file mode 100644
index 0000000000..baededbb25
--- /dev/null
+++ b/var/spack/repos/builtin/packages/xineramaproto/package.py
@@ -0,0 +1,45 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class Xineramaproto(Package):
+ """X Xinerama Extension.
+
+ This is an X extension that allows multiple physical screens controlled
+ by a single X server to appear as a single screen."""
+
+ homepage = "http://cgit.freedesktop.org/xorg/proto/xineramaproto"
+ url = "https://www.x.org/archive/individual/proto/xineramaproto-1.2.1.tar.gz"
+
+ version('1.2.1', 'e0e148b11739e144a546b8a051b17dde')
+
+ depends_on('pkg-config@0.9.0:', type='build')
+ depends_on('util-macros', type='build')
+
+ def install(self, spec, prefix):
+ configure('--prefix={0}'.format(prefix))
+
+ make('install')
diff --git a/var/spack/repos/builtin/packages/xinit/package.py b/var/spack/repos/builtin/packages/xinit/package.py
new file mode 100644
index 0000000000..9f3bc09229
--- /dev/null
+++ b/var/spack/repos/builtin/packages/xinit/package.py
@@ -0,0 +1,48 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class Xinit(Package):
+ """The xinit program is used to start the X Window System server and a
+ first client program on systems that are not using a display manager
+ such as xdm."""
+
+ homepage = "http://cgit.freedesktop.org/xorg/app/xinit"
+ url = "https://www.x.org/archive/individual/app/xinit-1.3.4.tar.gz"
+
+ version('1.3.4', '91c5697345016ec7841f5e5fccbe7a4c')
+
+ depends_on('libx11')
+
+ depends_on('xproto@7.0.17:', type='build')
+ depends_on('pkg-config@0.9.0:', type='build')
+ depends_on('util-macros', type='build')
+
+ def install(self, spec, prefix):
+ configure('--prefix={0}'.format(prefix))
+
+ make()
+ make('install')
diff --git a/var/spack/repos/builtin/packages/xinput/package.py b/var/spack/repos/builtin/packages/xinput/package.py
new file mode 100644
index 0000000000..3c4fb35503
--- /dev/null
+++ b/var/spack/repos/builtin/packages/xinput/package.py
@@ -0,0 +1,50 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class Xinput(Package):
+ """xinput is a utility to configure and test XInput devices."""
+
+ homepage = "http://cgit.freedesktop.org/xorg/app/xinput"
+ url = "https://www.x.org/archive/individual/app/xinput-1.6.2.tar.gz"
+
+ version('1.6.2', '6684f6015298d22936438173be3b7ef5')
+
+ depends_on('libx11')
+ depends_on('libxext')
+ depends_on('libxi@1.5.99.1:')
+ depends_on('libxrandr')
+ depends_on('libxinerama')
+
+ depends_on('inputproto@2.1.99.1:', type='build')
+ depends_on('pkg-config@0.9.0:', type='build')
+ depends_on('util-macros', type='build')
+
+ def install(self, spec, prefix):
+ configure('--prefix={0}'.format(prefix))
+
+ make()
+ make('install')
diff --git a/var/spack/repos/builtin/packages/xkbcomp/package.py b/var/spack/repos/builtin/packages/xkbcomp/package.py
new file mode 100644
index 0000000000..e6e8875fa7
--- /dev/null
+++ b/var/spack/repos/builtin/packages/xkbcomp/package.py
@@ -0,0 +1,53 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class Xkbcomp(Package):
+ """The X Keyboard (XKB) Extension essentially replaces the core protocol
+ definition of a keyboard. The extension makes it possible to specify
+ clearly and explicitly most aspects of keyboard behaviour on a per-key
+ basis, and to track more closely the logical and physical state of a
+ keyboard. It also includes a number of keyboard controls designed to
+ make keyboards more accessible to people with physical impairments."""
+
+ homepage = "https://www.x.org/wiki/XKB/"
+ url = "https://www.x.org/archive/individual/app/xkbcomp-1.3.1.tar.gz"
+
+ version('1.3.1', '9e8ca110ed40d4703f8f73d99bc81576')
+
+ depends_on('libx11')
+ depends_on('libxkbfile')
+
+ depends_on('xproto@7.0.17:', type='build')
+ depends_on('bison', type='build')
+ depends_on('pkg-config@0.9.0:', type='build')
+ depends_on('util-macros', type='build')
+
+ def install(self, spec, prefix):
+ configure('--prefix={0}'.format(prefix))
+
+ make()
+ make('install')
diff --git a/var/spack/repos/builtin/packages/xkbdata/package.py b/var/spack/repos/builtin/packages/xkbdata/package.py
new file mode 100644
index 0000000000..fc84631e36
--- /dev/null
+++ b/var/spack/repos/builtin/packages/xkbdata/package.py
@@ -0,0 +1,43 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class Xkbdata(Package):
+ """The XKB data files for the various keyboard models, layouts,
+ and locales."""
+
+ homepage = "https://www.x.org/wiki/XKB/"
+ url = "https://www.x.org/archive/individual/data/xkbdata-1.0.1.tar.gz"
+
+ version('1.0.1', 'a7e0fbc9cc84c621243c777694388064')
+
+ depends_on('xkbcomp', type='build')
+
+ def install(self, spec, prefix):
+ configure('--prefix={0}'.format(prefix))
+
+ make()
+ make('install')
diff --git a/var/spack/repos/builtin/packages/xkbevd/package.py b/var/spack/repos/builtin/packages/xkbevd/package.py
new file mode 100644
index 0000000000..462d989db2
--- /dev/null
+++ b/var/spack/repos/builtin/packages/xkbevd/package.py
@@ -0,0 +1,47 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class Xkbevd(Package):
+ """XKB event daemon demo."""
+
+ homepage = "http://cgit.freedesktop.org/xorg/app/xkbevd"
+ url = "https://www.x.org/archive/individual/app/xkbevd-1.1.4.tar.gz"
+
+ version('1.1.4', '0e9e05761551b1e58bd541231f90ae87')
+
+ depends_on('libxkbfile')
+ depends_on('libx11')
+
+ depends_on('bison', type='build')
+ depends_on('pkg-config@0.9.0:', type='build')
+ depends_on('util-macros', type='build')
+
+ def install(self, spec, prefix):
+ configure('--prefix={0}'.format(prefix))
+
+ make()
+ make('install')
diff --git a/var/spack/repos/builtin/packages/xkbprint/package.py b/var/spack/repos/builtin/packages/xkbprint/package.py
new file mode 100644
index 0000000000..dc92ac4126
--- /dev/null
+++ b/var/spack/repos/builtin/packages/xkbprint/package.py
@@ -0,0 +1,48 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class Xkbprint(Package):
+ """xkbprint generates a printable or encapsulated PostScript description
+ of an XKB keyboard description."""
+
+ homepage = "http://cgit.freedesktop.org/xorg/app/xkbprint"
+ url = "https://www.x.org/archive/individual/app/xkbprint-1.0.4.tar.gz"
+
+ version('1.0.4', '4dd9d4fdbdc08f70dc402da149e4d5d8')
+
+ depends_on('libxkbfile')
+ depends_on('libx11')
+
+ depends_on('xproto@7.0.17:', type='build')
+ depends_on('pkg-config@0.9.0:', type='build')
+ depends_on('util-macros', type='build')
+
+ def install(self, spec, prefix):
+ configure('--prefix={0}'.format(prefix))
+
+ make()
+ make('install')
diff --git a/var/spack/repos/builtin/packages/xkbutils/package.py b/var/spack/repos/builtin/packages/xkbutils/package.py
new file mode 100644
index 0000000000..a4c6c97578
--- /dev/null
+++ b/var/spack/repos/builtin/packages/xkbutils/package.py
@@ -0,0 +1,50 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class Xkbutils(Package):
+ """xkbutils is a collection of small utilities utilizing the XKeyboard
+ (XKB) extension to the X11 protocol."""
+
+ homepage = "http://cgit.freedesktop.org/xorg/app/xkbutils"
+ url = "https://www.x.org/archive/individual/app/xkbutils-1.0.4.tar.gz"
+
+ version('1.0.4', '6b898346b84e07c2f13b097193ca0413')
+
+ depends_on('libxaw')
+ depends_on('libxt')
+ depends_on('libx11')
+
+ depends_on('xproto@7.0.17:', type='build')
+ depends_on('inputproto', type='build')
+ depends_on('pkg-config@0.9.0:', type='build')
+ depends_on('util-macros', type='build')
+
+ def install(self, spec, prefix):
+ configure('--prefix={0}'.format(prefix))
+
+ make()
+ make('install')
diff --git a/var/spack/repos/builtin/packages/xkeyboard-config/package.py b/var/spack/repos/builtin/packages/xkeyboard-config/package.py
new file mode 100644
index 0000000000..3ad7ea197c
--- /dev/null
+++ b/var/spack/repos/builtin/packages/xkeyboard-config/package.py
@@ -0,0 +1,57 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class XkeyboardConfig(Package):
+ """This project provides a consistent, well-structured, frequently
+ released, open source database of keyboard configuration data. The
+ project is targeted to XKB-based systems."""
+
+ homepage = "https://www.freedesktop.org/wiki/Software/XKeyboardConfig/"
+ url = "https://www.x.org/archive/individual/data/xkeyboard-config/xkeyboard-config-2.18.tar.gz"
+
+ version('2.18', '96c43e04dbfbb1e6e6abd4678292062c')
+
+ depends_on('libx11@1.4.3:')
+
+ depends_on('libxslt', type='build')
+ depends_on('pkg-config@0.9.0:', type='build')
+ depends_on('intltool@0.30:', type='build')
+ depends_on('xproto@7.0.20:', type='build')
+
+ # TODO: missing dependencies
+ # xgettext
+ # msgmerge
+ # msgfmt
+ # gmsgfmt
+ # perl@5.8.1:
+ # perl XML::Parser
+
+ def install(self, spec, prefix):
+ configure('--prefix={0}'.format(prefix))
+
+ make()
+ make('install')
diff --git a/var/spack/repos/builtin/packages/xkill/package.py b/var/spack/repos/builtin/packages/xkill/package.py
new file mode 100644
index 0000000000..061d756eec
--- /dev/null
+++ b/var/spack/repos/builtin/packages/xkill/package.py
@@ -0,0 +1,49 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class Xkill(Package):
+ """xkill is a utility for forcing the X server to close connections to
+ clients. This program is very dangerous, but is useful for aborting
+ programs that have displayed undesired windows on a user's screen."""
+
+ homepage = "http://cgit.freedesktop.org/xorg/app/xkill"
+ url = "https://www.x.org/archive/individual/app/xkill-1.0.4.tar.gz"
+
+ version('1.0.4', 'b04c15bfd0b619f1e4ff3e44607e738d')
+
+ depends_on('libx11')
+ depends_on('libxmu')
+
+ depends_on('xproto@7.0.22:', type='build')
+ depends_on('pkg-config@0.9.0:', type='build')
+ depends_on('util-macros', type='build')
+
+ def install(self, spec, prefix):
+ configure('--prefix={0}'.format(prefix))
+
+ make()
+ make('install')
diff --git a/var/spack/repos/builtin/packages/xload/package.py b/var/spack/repos/builtin/packages/xload/package.py
new file mode 100644
index 0000000000..2fc91043b5
--- /dev/null
+++ b/var/spack/repos/builtin/packages/xload/package.py
@@ -0,0 +1,50 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class Xload(Package):
+ """xload displays a periodically updating histogram of the
+ system load average."""
+
+ homepage = "http://cgit.freedesktop.org/xorg/app/xload"
+ url = "https://www.x.org/archive/individual/app/xload-1.1.2.tar.gz"
+
+ version('1.1.2', '0af9a68193849b16f8168f096682efb4')
+
+ depends_on('libxaw')
+ depends_on('libxmu')
+ depends_on('libxt')
+ depends_on('libx11')
+
+ depends_on('xproto@7.0.17:', type='build')
+ depends_on('pkg-config@0.9.0:', type='build')
+ depends_on('util-macros', type='build')
+
+ def install(self, spec, prefix):
+ configure('--prefix={0}'.format(prefix))
+
+ make()
+ make('install')
diff --git a/var/spack/repos/builtin/packages/xlogo/package.py b/var/spack/repos/builtin/packages/xlogo/package.py
new file mode 100644
index 0000000000..77f5bd3639
--- /dev/null
+++ b/var/spack/repos/builtin/packages/xlogo/package.py
@@ -0,0 +1,53 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class Xlogo(Package):
+ """The xlogo program simply displays the X Window System logo."""
+
+ homepage = "http://cgit.freedesktop.org/xorg/app/xlogo"
+ url = "https://www.x.org/archive/individual/app/xlogo-1.0.4.tar.gz"
+
+ version('1.0.4', '4c4f82c196a55a90800a77906f4353fb')
+
+ depends_on('libsm')
+ depends_on('libxaw')
+ depends_on('libxmu')
+ depends_on('libxt@1.0:')
+ depends_on('libxext')
+ depends_on('libx11')
+ depends_on('libxft')
+ depends_on('libxrender')
+ depends_on('libxt')
+
+ depends_on('pkg-config@0.9.0:', type='build')
+ depends_on('util-macros', type='build')
+
+ def install(self, spec, prefix):
+ configure('--prefix={0}'.format(prefix))
+
+ make()
+ make('install')
diff --git a/var/spack/repos/builtin/packages/xlsatoms/package.py b/var/spack/repos/builtin/packages/xlsatoms/package.py
new file mode 100644
index 0000000000..8722b57c8c
--- /dev/null
+++ b/var/spack/repos/builtin/packages/xlsatoms/package.py
@@ -0,0 +1,46 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class Xlsatoms(Package):
+ """xlsatoms lists the interned atoms defined on an X11 server."""
+
+ homepage = "http://cgit.freedesktop.org/xorg/app/xlsatoms"
+ url = "https://www.x.org/archive/individual/app/xlsatoms-1.1.2.tar.gz"
+
+ version('1.1.2', '1f32e2b8c2135b5867291517848cb396')
+
+ depends_on('libxcb', when='@1.1:')
+ depends_on('libx11', when='@:1.0')
+
+ depends_on('pkg-config@0.9.0:', type='build')
+ depends_on('util-macros', type='build')
+
+ def install(self, spec, prefix):
+ configure('--prefix={0}'.format(prefix))
+
+ make()
+ make('install')
diff --git a/var/spack/repos/builtin/packages/xlsclients/package.py b/var/spack/repos/builtin/packages/xlsclients/package.py
new file mode 100644
index 0000000000..3714de2706
--- /dev/null
+++ b/var/spack/repos/builtin/packages/xlsclients/package.py
@@ -0,0 +1,47 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class Xlsclients(Package):
+ """xlsclients is a utility for listing information about the client
+ applications running on a X11 server."""
+
+ homepage = "http://cgit.freedesktop.org/xorg/app/xlsclients"
+ url = "https://www.x.org/archive/individual/app/xlsclients-1.1.3.tar.gz"
+
+ version('1.1.3', '093c748d98b61dbddcaf3de1740fbd26')
+
+ depends_on('libxcb@1.6:', when='@1.1:')
+ depends_on('libx11', when='@:1.0')
+
+ depends_on('pkg-config@0.9.0:', type='build')
+ depends_on('util-macros', type='build')
+
+ def install(self, spec, prefix):
+ configure('--prefix={0}'.format(prefix))
+
+ make()
+ make('install')
diff --git a/var/spack/repos/builtin/packages/xlsfonts/package.py b/var/spack/repos/builtin/packages/xlsfonts/package.py
new file mode 100644
index 0000000000..011ca5aa0e
--- /dev/null
+++ b/var/spack/repos/builtin/packages/xlsfonts/package.py
@@ -0,0 +1,47 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class Xlsfonts(Package):
+ """xlsfonts lists fonts available from an X server via the X11
+ core protocol."""
+
+ homepage = "http://cgit.freedesktop.org/xorg/app/xlsfonts"
+ url = "https://www.x.org/archive/individual/app/xlsfonts-1.0.5.tar.gz"
+
+ version('1.0.5', '074cc44e5238c6a501523ef06caba517')
+
+ depends_on('libx11')
+
+ depends_on('xproto@7.0.17:', type='build')
+ depends_on('pkg-config@0.9.0:', type='build')
+ depends_on('util-macros', type='build')
+
+ def install(self, spec, prefix):
+ configure('--prefix={0}'.format(prefix))
+
+ make()
+ make('install')
diff --git a/var/spack/repos/builtin/packages/xmag/package.py b/var/spack/repos/builtin/packages/xmag/package.py
new file mode 100644
index 0000000000..27843299c8
--- /dev/null
+++ b/var/spack/repos/builtin/packages/xmag/package.py
@@ -0,0 +1,48 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class Xmag(Package):
+ """xmag displays a magnified snapshot of a portion of an X11 screen."""
+
+ homepage = "http://cgit.freedesktop.org/xorg/app/xmag"
+ url = "https://www.x.org/archive/individual/app/xmag-1.0.6.tar.gz"
+
+ version('1.0.6', '2827ae4b293535623b9f7b659c506dcd')
+
+ depends_on('libxaw')
+ depends_on('libxmu')
+ depends_on('libxt')
+ depends_on('libx11')
+
+ depends_on('pkg-config@0.9.0:', type='build')
+ depends_on('util-macros', type='build')
+
+ def install(self, spec, prefix):
+ configure('--prefix={0}'.format(prefix))
+
+ make()
+ make('install')
diff --git a/var/spack/repos/builtin/packages/xman/package.py b/var/spack/repos/builtin/packages/xman/package.py
new file mode 100644
index 0000000000..629a457edc
--- /dev/null
+++ b/var/spack/repos/builtin/packages/xman/package.py
@@ -0,0 +1,48 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class Xman(Package):
+ """xman is a graphical manual page browser using the Athena Widgets (Xaw)
+ toolkit."""
+
+ homepage = "http://cgit.freedesktop.org/xorg/app/xman"
+ url = "https://www.x.org/archive/individual/app/xman-1.1.4.tar.gz"
+
+ version('1.1.4', 'f4238c79ee7227ea193898fc159f31e5')
+
+ depends_on('libxaw')
+ depends_on('libxt')
+
+ depends_on('xproto@7.0.17:', type='build')
+ depends_on('pkg-config@0.9.0:', type='build')
+ depends_on('util-macros', type='build')
+
+ def install(self, spec, prefix):
+ configure('--prefix={0}'.format(prefix))
+
+ make()
+ make('install')
diff --git a/var/spack/repos/builtin/packages/xmessage/package.py b/var/spack/repos/builtin/packages/xmessage/package.py
new file mode 100644
index 0000000000..9b2ee5102e
--- /dev/null
+++ b/var/spack/repos/builtin/packages/xmessage/package.py
@@ -0,0 +1,48 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class Xmessage(Package):
+ """xmessage displays a message or query in a window. The user can click
+ on an "okay" button to dismiss it or can select one of several buttons
+ to answer a question. xmessage can also exit after a specified time."""
+
+ homepage = "http://cgit.freedesktop.org/xorg/app/xmessage"
+ url = "https://www.x.org/archive/individual/app/xmessage-1.0.4.tar.gz"
+
+ version('1.0.4', '69df5761fbec14c782948065a6f36028')
+
+ depends_on('libxaw')
+ depends_on('libxt')
+
+ depends_on('pkg-config@0.9.0:', type='build')
+ depends_on('util-macros', type='build')
+
+ def install(self, spec, prefix):
+ configure('--prefix={0}'.format(prefix))
+
+ make()
+ make('install')
diff --git a/var/spack/repos/builtin/packages/xmh/package.py b/var/spack/repos/builtin/packages/xmh/package.py
new file mode 100644
index 0000000000..cdde63e149
--- /dev/null
+++ b/var/spack/repos/builtin/packages/xmh/package.py
@@ -0,0 +1,51 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class Xmh(Package):
+ """The xmh program provides a graphical user interface to the
+ MH Message Handling System. To actually do things with your
+ mail, it makes calls to the MH package."""
+
+ homepage = "http://cgit.freedesktop.org/xorg/app/xmh"
+ url = "https://www.x.org/archive/individual/app/xmh-1.0.3.tar.gz"
+
+ version('1.0.3', '7547c5a5ab7309a1b10e8ecf48e60105')
+
+ depends_on('libxaw')
+ depends_on('libxmu')
+ depends_on('libxt')
+ depends_on('libx11')
+
+ depends_on('xbitmaps@1.1.0:', type='build')
+ depends_on('pkg-config@0.9.0:', type='build')
+ depends_on('util-macros', type='build')
+
+ def install(self, spec, prefix):
+ configure('--prefix={0}'.format(prefix))
+
+ make()
+ make('install')
diff --git a/var/spack/repos/builtin/packages/xmlto/package.py b/var/spack/repos/builtin/packages/xmlto/package.py
new file mode 100644
index 0000000000..0dbc81de32
--- /dev/null
+++ b/var/spack/repos/builtin/packages/xmlto/package.py
@@ -0,0 +1,46 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class Xmlto(Package):
+ """Utility xmlto is a simple shell script for converting XML files to various
+ formats. It serves as easy to use command line frontend to make fine output
+ without remembering many long options and searching for the syntax of the
+ backends."""
+
+ homepage = "http://cyberelk.net/tim/software/xmlto/"
+ url = "https://fedorahosted.org/releases/x/m/xmlto/xmlto-0.0.28.tar.gz"
+
+ version('0.0.28', 'a1fefad9d83499a15576768f60f847c6')
+
+ # FIXME: missing a lot of dependencies
+ depends_on('libxslt')
+
+ def install(self, spec, prefix):
+ configure('--prefix={0}'.format(prefix))
+
+ make()
+ make('install')
diff --git a/var/spack/repos/builtin/packages/xmodmap/package.py b/var/spack/repos/builtin/packages/xmodmap/package.py
new file mode 100644
index 0000000000..abe40dbb43
--- /dev/null
+++ b/var/spack/repos/builtin/packages/xmodmap/package.py
@@ -0,0 +1,50 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class Xmodmap(Package):
+ """The xmodmap program is used to edit and display the keyboard modifier
+ map and keymap table that are used by client applications to convert
+ event keycodes into keysyms. It is usually run from the user's
+ session startup script to configure the keyboard according to personal
+ tastes."""
+
+ homepage = "http://cgit.freedesktop.org/xorg/app/xmodmap"
+ url = "https://www.x.org/archive/individual/app/xmodmap-1.0.9.tar.gz"
+
+ version('1.0.9', '771cf86bcdc3589e7add2e761f675099')
+
+ depends_on('libx11')
+
+ depends_on('xproto@7.0.25:', type='build')
+ depends_on('pkg-config@0.9.0:', type='build')
+ depends_on('util-macros', type='build')
+
+ def install(self, spec, prefix):
+ configure('--prefix={0}'.format(prefix))
+
+ make()
+ make('install')
diff --git a/var/spack/repos/builtin/packages/xmore/package.py b/var/spack/repos/builtin/packages/xmore/package.py
new file mode 100644
index 0000000000..8f8ccd7138
--- /dev/null
+++ b/var/spack/repos/builtin/packages/xmore/package.py
@@ -0,0 +1,46 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class Xmore(Package):
+ """xmore - plain text display program for the X Window System."""
+
+ homepage = "http://cgit.freedesktop.org/xorg/app/xmore"
+ url = "https://www.x.org/archive/individual/app/xmore-1.0.2.tar.gz"
+
+ version('1.0.2', '40b1850494f8af0939a1989c399efa11')
+
+ depends_on('libxaw')
+ depends_on('libxt')
+
+ depends_on('pkg-config@0.9.0:', type='build')
+ depends_on('util-macros', type='build')
+
+ def install(self, spec, prefix):
+ configure('--prefix={0}'.format(prefix))
+
+ make()
+ make('install')
diff --git a/var/spack/repos/builtin/packages/xorg-cf-files/package.py b/var/spack/repos/builtin/packages/xorg-cf-files/package.py
new file mode 100644
index 0000000000..c7b22d904f
--- /dev/null
+++ b/var/spack/repos/builtin/packages/xorg-cf-files/package.py
@@ -0,0 +1,44 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class XorgCfFiles(Package):
+ """The xorg-cf-files package contains the data files for the imake utility,
+ defining the known settings for a wide variety of platforms (many of which
+ have not been verified or tested in over a decade), and for many of the
+ libraries formerly delivered in the X.Org monolithic releases."""
+
+ homepage = "http://cgit.freedesktop.org/xorg/util/cf"
+ url = "https://www.x.org/archive/individual/util/xorg-cf-files-1.0.6.tar.gz"
+
+ version('1.0.6', 'c0ce98377c70d95fb48e1bd856109bf8')
+
+ depends_on('pkg-config@0.9.0:', type='build')
+
+ def install(self, spec, prefix):
+ configure('--prefix={0}'.format(prefix))
+
+ make('install')
diff --git a/var/spack/repos/builtin/packages/xorg-docs/package.py b/var/spack/repos/builtin/packages/xorg-docs/package.py
new file mode 100644
index 0000000000..5c320bba7b
--- /dev/null
+++ b/var/spack/repos/builtin/packages/xorg-docs/package.py
@@ -0,0 +1,48 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class XorgDocs(Package):
+ """This package provides miscellaneous documentation for the X Window
+ System that doesn't better fit into other packages.
+
+ The preferred documentation format for these documents is DocBook XML."""
+
+ homepage = "http://cgit.freedesktop.org/xorg/doc/xorg-docs"
+ url = "https://www.x.org/archive/individual/doc/xorg-docs-1.7.1.tar.gz"
+
+ version('1.7.1', 'ca689ccbf8ebc362afbe5cc5792a4abd')
+
+ depends_on('pkg-config@0.9.0:', type='build')
+ depends_on('util-macros', type='build')
+ depends_on('xorg-sgml-doctools@1.8:', type='build')
+ depends_on('xmlto', type='build')
+
+ def install(self, spec, prefix):
+ configure('--prefix={0}'.format(prefix))
+
+ make()
+ make('install')
diff --git a/var/spack/repos/builtin/packages/xorg-gtest/package.py b/var/spack/repos/builtin/packages/xorg-gtest/package.py
new file mode 100644
index 0000000000..6978d610d3
--- /dev/null
+++ b/var/spack/repos/builtin/packages/xorg-gtest/package.py
@@ -0,0 +1,51 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class XorgGtest(Package):
+ """Provides a Google Test environment for starting and stopping
+ a X server for testing purposes."""
+
+ homepage = "https://people.freedesktop.org/~cndougla/xorg-gtest/"
+ url = "https://www.x.org/archive/individual/test/xorg-gtest-0.7.1.tar.bz2"
+
+ version('0.7.1', '31beb4d7d2b4eba7f9265fa0cb4c6428')
+
+ depends_on('libx11')
+ depends_on('libxi')
+ depends_on('xorg-server')
+
+ depends_on('pkg-config@0.9.0:', type='build')
+ depends_on('util-macros', type='build')
+
+ # TODO: may be missing evemu package?
+ # TODO: what is the difference between xorg-gtest and googletest packages?
+
+ def install(self, spec, prefix):
+ configure('--prefix={0}'.format(prefix))
+
+ make()
+ make('install')
diff --git a/var/spack/repos/builtin/packages/xorg-server/package.py b/var/spack/repos/builtin/packages/xorg-server/package.py
new file mode 100644
index 0000000000..9d9b49f9c2
--- /dev/null
+++ b/var/spack/repos/builtin/packages/xorg-server/package.py
@@ -0,0 +1,108 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class XorgServer(Package):
+ """X.Org Server is the free and open source implementation of the display
+ server for the X Window System stewarded by the X.Org Foundation."""
+
+ homepage = "http://cgit.freedesktop.org/xorg/xserver"
+ url = "https://www.x.org/archive/individual/xserver/xorg-server-1.18.99.901.tar.gz"
+
+ version('1.18.99.901', 'd0242b95991c221c4fcc0d283aba7a42')
+
+ depends_on('pixman@0.27.2:')
+ depends_on('font-util')
+ depends_on('libxshmfence@1.1:')
+ depends_on('libdrm@2.3.0:')
+ depends_on('libx11')
+ # depends_on('gl@9.2.0:')
+
+ depends_on('dri2proto@2.8:', type='build')
+ depends_on('dri3proto@1.0:', type='build')
+ depends_on('glproto@1.4.17:', type='build')
+
+ depends_on('flex', type='build')
+ depends_on('bison', type='build')
+ depends_on('pkg-config@0.9.0:', type='build')
+ depends_on('util-macros', type='build')
+
+ # TODO: add missing dependencies
+ # $LIBSELINUX $REQUIRED_MODULES $REQUIRED_LIBS
+ # $LIBPCIACCESS $DGAPROTO $XORG_MODULES epoxy xdmcp xau xfixes x11-xcb
+ # xcb-aux xcb-image xcb-ewmh xcb-icccm $WINDOWSWMPROTO windowsdriproto
+ # khronos-opengl-registry
+ # $APPLEWMPROTO $LIBAPPLEWM xfixes $LIBDMX $LIBXEXT $LIBDMX xmu $LIBXEXT
+ # $LIBDMX $LIBXI $LIBXEXT $LIBXTST $LIBXEXT xres $LIBXEXT $LIBXEXT
+ # $XEPHYR_REQUIRED_LIBS
+
+ # VIDEOPROTO="videoproto"
+ # COMPOSITEPROTO="compositeproto >= 0.4"
+ # RECORDPROTO="recordproto >= 1.13.99.1"
+ # SCRNSAVERPROTO="scrnsaverproto >= 1.1"
+ # RESOURCEPROTO="resourceproto >= 1.2.0"
+ # DRIPROTO="xf86driproto >= 2.1.0"
+ # XINERAMAPROTO="xineramaproto"
+ # BIGFONTPROTO="xf86bigfontproto >= 1.2.0"
+ # DGAPROTO="xf86dgaproto >= 2.0.99.1"
+ # DMXPROTO="dmxproto >= 2.2.99.1"
+ # VIDMODEPROTO="xf86vidmodeproto >= 2.2.99.1"
+ # WINDOWSWMPROTO="windowswmproto"
+ # APPLEWMPROTO="applewmproto >= 1.4"
+
+ # XPROTO="xproto >= 7.0.28"
+ # RANDRPROTO="randrproto >= 1.5.0"
+ # RENDERPROTO="renderproto >= 0.11"
+ # XEXTPROTO="xextproto >= 7.2.99.901"
+ # INPUTPROTO="inputproto >= 2.3"
+ # KBPROTO="kbproto >= 1.0.3"
+ # FONTSPROTO="fontsproto >= 2.1.3"
+ # FIXESPROTO="fixesproto >= 5.0"
+ # DAMAGEPROTO="damageproto >= 1.1"
+ # XCMISCPROTO="xcmiscproto >= 1.2.0"
+ # BIGREQSPROTO="bigreqsproto >= 1.1.0"
+ # XTRANS="xtrans >= 1.3.5"
+ # PRESENTPROTO="presentproto >= 1.0"
+
+ # LIBAPPLEWM="applewm >= 1.4"
+ # LIBDMX="dmx >= 1.0.99.1"
+ # LIBDRI="dri >= 7.8.0"
+ # LIBEGL="egl"
+ # LIBGBM="gbm >= 10.2.0"
+ # LIBXEXT="xext >= 1.0.99.4"
+ # LIBXFONT="xfont2 >= 2.0.0"
+ # LIBXI="xi >= 1.2.99.1"
+ # LIBXTST="xtst >= 1.0.99.2"
+ # LIBPCIACCESS="pciaccess >= 0.12.901"
+ # LIBUDEV="libudev >= 143"
+ # LIBSELINUX="libselinux >= 2.0.86"
+ # LIBDBUS="dbus-1 >= 1.0"
+
+ def install(self, spec, prefix):
+ configure('--prefix={0}'.format(prefix))
+
+ make()
+ make('install')
diff --git a/var/spack/repos/builtin/packages/xorg-sgml-doctools/package.py b/var/spack/repos/builtin/packages/xorg-sgml-doctools/package.py
new file mode 100644
index 0000000000..c2e5797efa
--- /dev/null
+++ b/var/spack/repos/builtin/packages/xorg-sgml-doctools/package.py
@@ -0,0 +1,45 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class XorgSgmlDoctools(Package):
+ """This package provides a common set of SGML entities and XML/CSS style
+ sheets used in building/formatting the documentation provided in other
+ X.Org packages."""
+
+ homepage = "http://cgit.freedesktop.org/xorg/doc/xorg-sgml-doctools"
+ url = "https://www.x.org/archive/individual/doc/xorg-sgml-doctools-1.11.tar.gz"
+
+ version('1.11', '51cf4c6b476e2b98a068fea6975b9b21')
+
+ depends_on('pkg-config@0.9.0:', type='build')
+ depends_on('util-macros', type='build')
+
+ def install(self, spec, prefix):
+ configure('--prefix={0}'.format(prefix))
+
+ make()
+ make('install')
diff --git a/var/spack/repos/builtin/packages/xphelloworld/package.py b/var/spack/repos/builtin/packages/xphelloworld/package.py
new file mode 100644
index 0000000000..6d445d69be
--- /dev/null
+++ b/var/spack/repos/builtin/packages/xphelloworld/package.py
@@ -0,0 +1,54 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class Xphelloworld(Package):
+ """Xprint sample applications."""
+
+ homepage = "http://cgit.freedesktop.org/xorg/app/xphelloworld"
+ url = "https://www.x.org/archive/individual/app/xphelloworld-1.0.1.tar.gz"
+
+ version('1.0.1', 'b1851337a8e850d5c8e5a5ca5e3033da')
+
+ depends_on('libx11')
+ depends_on('libxaw')
+ depends_on('libxprintapputil')
+ depends_on('libxprintutil')
+ depends_on('libxp')
+ depends_on('libxt')
+
+ # FIXME: xphelloworld requires libxaw8, but libxaw only provides 6 and 7.
+ # It looks like xprint support was removed from libxaw at some point.
+ # But even the oldest version of libxaw doesn't build libxaw8.
+
+ depends_on('pkg-config@0.9.0:', type='build')
+ depends_on('util-macros', type='build')
+
+ def install(self, spec, prefix):
+ configure('--prefix={0}'.format(prefix))
+
+ make()
+ make('install')
diff --git a/var/spack/repos/builtin/packages/xplsprinters/package.py b/var/spack/repos/builtin/packages/xplsprinters/package.py
new file mode 100644
index 0000000000..55de272a33
--- /dev/null
+++ b/var/spack/repos/builtin/packages/xplsprinters/package.py
@@ -0,0 +1,47 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class Xplsprinters(Package):
+ """List Xprint printers."""
+
+ homepage = "http://cgit.freedesktop.org/xorg/app/xplsprinters"
+ url = "https://www.x.org/archive/individual/app/xplsprinters-1.0.1.tar.gz"
+
+ version('1.0.1', '8e5698b5a2a2a0fc78caeb23909dd284')
+
+ depends_on('libxp')
+ depends_on('libxprintutil')
+ depends_on('libx11')
+
+ depends_on('pkg-config@0.9.0:', type='build')
+ depends_on('util-macros', type='build')
+
+ def install(self, spec, prefix):
+ configure('--prefix={0}'.format(prefix))
+
+ make()
+ make('install')
diff --git a/var/spack/repos/builtin/packages/xpr/package.py b/var/spack/repos/builtin/packages/xpr/package.py
new file mode 100644
index 0000000000..669693e084
--- /dev/null
+++ b/var/spack/repos/builtin/packages/xpr/package.py
@@ -0,0 +1,48 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class Xpr(Package):
+ """xpr takes as input a window dump file produced by xwd
+ and formats it for output on various types of printers."""
+
+ homepage = "http://cgit.freedesktop.org/xorg/app/xpr"
+ url = "https://www.x.org/archive/individual/app/xpr-1.0.4.tar.gz"
+
+ version('1.0.4', '6adfa60f458474c0c226454c233fc32f')
+
+ depends_on('libxmu')
+ depends_on('libx11')
+
+ depends_on('xproto@7.0.17:', type='build')
+ depends_on('pkg-config@0.9.0:', type='build')
+ depends_on('util-macros', type='build')
+
+ def install(self, spec, prefix):
+ configure('--prefix={0}'.format(prefix))
+
+ make()
+ make('install')
diff --git a/var/spack/repos/builtin/packages/xprehashprinterlist/package.py b/var/spack/repos/builtin/packages/xprehashprinterlist/package.py
new file mode 100644
index 0000000000..4578c3c191
--- /dev/null
+++ b/var/spack/repos/builtin/packages/xprehashprinterlist/package.py
@@ -0,0 +1,46 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class Xprehashprinterlist(Package):
+ """Rehash list of Xprint printers."""
+
+ homepage = "http://cgit.freedesktop.org/xorg/app/xprehashprinterlist"
+ url = "https://www.x.org/archive/individual/app/xprehashprinterlist-1.0.1.tar.gz"
+
+ version('1.0.1', '395578955634e4b2daa5b78f6fa9222c')
+
+ depends_on('libxp')
+ depends_on('libx11')
+
+ depends_on('pkg-config@0.9.0:', type='build')
+ depends_on('util-macros', type='build')
+
+ def install(self, spec, prefix):
+ configure('--prefix={0}'.format(prefix))
+
+ make()
+ make('install')
diff --git a/var/spack/repos/builtin/packages/xprop/package.py b/var/spack/repos/builtin/packages/xprop/package.py
new file mode 100644
index 0000000000..0e1a591bcb
--- /dev/null
+++ b/var/spack/repos/builtin/packages/xprop/package.py
@@ -0,0 +1,47 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class Xprop(Package):
+ """xprop is a command line tool to display and/or set window and font
+ properties of an X server."""
+
+ homepage = "http://cgit.freedesktop.org/xorg/app/xprop"
+ url = "https://www.x.org/archive/individual/app/xprop-1.2.2.tar.gz"
+
+ version('1.2.2', 'db03a6bcf7b0d0c2e691ea3083277cbc')
+
+ depends_on('libx11')
+
+ depends_on('xproto@7.0.17:', type='build')
+ depends_on('pkg-config@0.9.0:', type='build')
+ depends_on('util-macros', type='build')
+
+ def install(self, spec, prefix):
+ configure('--prefix={0}'.format(prefix))
+
+ make()
+ make('install')
diff --git a/var/spack/repos/builtin/packages/xproto/package.py b/var/spack/repos/builtin/packages/xproto/package.py
new file mode 100644
index 0000000000..67074a6993
--- /dev/null
+++ b/var/spack/repos/builtin/packages/xproto/package.py
@@ -0,0 +1,49 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class Xproto(Package):
+ """X Window System Core Protocol.
+
+ This package provides the headers and specification documents defining
+ the X Window System Core Protocol, Version 11.
+
+ It also includes a number of headers that aren't purely protocol related,
+ but are depended upon by many other X Window System packages to provide
+ common definitions and porting layer."""
+
+ homepage = "http://cgit.freedesktop.org/xorg/proto/x11proto"
+ url = "https://www.x.org/archive/individual/proto/xproto-7.0.29.tar.gz"
+
+ version('7.0.29', '16a78dd2c5ad73011105c96235f6a0af')
+
+ depends_on('pkg-config@0.9.0:', type='build')
+ depends_on('util-macros', type='build')
+
+ def install(self, spec, prefix):
+ configure('--prefix={0}'.format(prefix))
+
+ make('install')
diff --git a/var/spack/repos/builtin/packages/xproxymanagementprotocol/package.py b/var/spack/repos/builtin/packages/xproxymanagementprotocol/package.py
new file mode 100644
index 0000000000..e5bfcb8cbc
--- /dev/null
+++ b/var/spack/repos/builtin/packages/xproxymanagementprotocol/package.py
@@ -0,0 +1,41 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class Xproxymanagementprotocol(Package):
+ """The Proxy Management Protocol is an ICE based protocol that provides a
+ way for application servers to easily locate proxy services available to
+ them."""
+
+ homepage = "http://cgit.freedesktop.org/xorg/proto/pmproto"
+ url = "https://www.x.org/archive/individual/proto/xproxymanagementprotocol-1.0.3.tar.gz"
+
+ version('1.0.3', 'c4ab05a6174b4e9b6ae5b7cfbb6d718e')
+
+ def install(self, spec, prefix):
+ configure('--prefix={0}'.format(prefix))
+
+ make('install')
diff --git a/var/spack/repos/builtin/packages/xrandr/package.py b/var/spack/repos/builtin/packages/xrandr/package.py
new file mode 100644
index 0000000000..35e21c6047
--- /dev/null
+++ b/var/spack/repos/builtin/packages/xrandr/package.py
@@ -0,0 +1,49 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class Xrandr(Package):
+ """xrandr - primitive command line interface to X11 Resize, Rotate, and
+ Reflect (RandR) extension."""
+
+ homepage = "http://cgit.freedesktop.org/xorg/app/xrandr"
+ url = "https://www.x.org/archive/individual/app/xrandr-1.5.0.tar.gz"
+
+ version('1.5.0', 'fe9cf76033fe5d973131eac67b6a3118')
+
+ depends_on('libxrandr@1.5:')
+ depends_on('libxrender')
+ depends_on('libx11')
+
+ depends_on('xproto@7.0.17:', type='build')
+ depends_on('pkg-config@0.9.0:', type='build')
+ depends_on('util-macros', type='build')
+
+ def install(self, spec, prefix):
+ configure('--prefix={0}'.format(prefix))
+
+ make()
+ make('install')
diff --git a/var/spack/repos/builtin/packages/xrdb/package.py b/var/spack/repos/builtin/packages/xrdb/package.py
new file mode 100644
index 0000000000..93847a19a0
--- /dev/null
+++ b/var/spack/repos/builtin/packages/xrdb/package.py
@@ -0,0 +1,47 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class Xrdb(Package):
+ """xrdb - X server resource database utility."""
+
+ homepage = "http://cgit.freedesktop.org/xorg/app/xrdb"
+ url = "https://www.x.org/archive/individual/app/xrdb-1.1.0.tar.gz"
+
+ version('1.1.0', 'd48983e561ef8b4b2e245feb584c11ce')
+
+ depends_on('libxmu')
+ depends_on('libx11')
+
+ depends_on('xproto@7.0.17:', type='build')
+ depends_on('pkg-config@0.9.0:', type='build')
+ depends_on('util-macros', type='build')
+
+ def install(self, spec, prefix):
+ configure('--prefix={0}'.format(prefix))
+
+ make()
+ make('install')
diff --git a/var/spack/repos/builtin/packages/xrefresh/package.py b/var/spack/repos/builtin/packages/xrefresh/package.py
new file mode 100644
index 0000000000..f99810beea
--- /dev/null
+++ b/var/spack/repos/builtin/packages/xrefresh/package.py
@@ -0,0 +1,46 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class Xrefresh(Package):
+ """xrefresh - refresh all or part of an X screen."""
+
+ homepage = "http://cgit.freedesktop.org/xorg/app/xrefresh"
+ url = "https://www.x.org/archive/individual/app/xrefresh-1.0.5.tar.gz"
+
+ version('1.0.5', 'e41c5148d894406484af59887257c465')
+
+ depends_on('libx11')
+
+ depends_on('xproto@7.0.17:', type='build')
+ depends_on('pkg-config@0.9.0:', type='build')
+ depends_on('util-macros', type='build')
+
+ def install(self, spec, prefix):
+ configure('--prefix={0}'.format(prefix))
+
+ make()
+ make('install')
diff --git a/var/spack/repos/builtin/packages/xrootd/package.py b/var/spack/repos/builtin/packages/xrootd/package.py
new file mode 100644
index 0000000000..db076dbc56
--- /dev/null
+++ b/var/spack/repos/builtin/packages/xrootd/package.py
@@ -0,0 +1,52 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+
+from spack import *
+
+
+class Xrootd(Package):
+ """The XROOTD project aims at giving high performance, scalable fault
+ tolerant access to data repositories of many kinds."""
+ homepage = "http://xrootd.org"
+ url = "http://xrootd.org/download/v4.3.0/xrootd-4.3.0.tar.gz"
+
+ version('4.3.0', '39c2fab9f632f35e12ff607ccaf9e16c')
+
+ depends_on('cmake', type='build')
+
+ def install(self, spec, prefix):
+ options = []
+ options.extend(std_cmake_args)
+
+ build_directory = join_path(self.stage.path, 'spack-build')
+ source_directory = self.stage.source_path
+
+ if '+debug' in spec:
+ options.append('-DCMAKE_BUILD_TYPE:STRING=Debug')
+
+ with working_dir(build_directory, create=True):
+ cmake(source_directory, *options)
+ make()
+ make("install")
diff --git a/var/spack/repos/builtin/packages/xrx/package.py b/var/spack/repos/builtin/packages/xrx/package.py
new file mode 100644
index 0000000000..4457c2f164
--- /dev/null
+++ b/var/spack/repos/builtin/packages/xrx/package.py
@@ -0,0 +1,57 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class Xrx(Package):
+ """The remote execution (RX) service specifies a MIME format for invoking
+ applications remotely, for example via a World Wide Web browser. This
+ RX format specifies a syntax for listing network services required by
+ the application, for example an X display server. The requesting Web
+ browser must identify specific instances of the services in the request
+ to invoke the application."""
+
+ homepage = "http://cgit.freedesktop.org/xorg/app/xrx"
+ url = "https://www.x.org/archive/individual/app/xrx-1.0.4.tar.gz"
+
+ version('1.0.4', 'dd4b8bf6eca5fc5be5df30c14050074c')
+
+ depends_on('libx11')
+ depends_on('libxt')
+ depends_on('libxext')
+ depends_on('libxau')
+ depends_on('libice')
+ depends_on('libxaw')
+
+ depends_on('xtrans', type='build')
+ depends_on('xproxymanagementprotocol', type='build')
+ depends_on('pkg-config@0.9.0:', type='build')
+ depends_on('util-macros', type='build')
+
+ def install(self, spec, prefix):
+ configure('--prefix={0}'.format(prefix))
+
+ make()
+ make('install')
diff --git a/var/spack/repos/builtin/packages/xscope/package.py b/var/spack/repos/builtin/packages/xscope/package.py
new file mode 100644
index 0000000000..cf33c9767e
--- /dev/null
+++ b/var/spack/repos/builtin/packages/xscope/package.py
@@ -0,0 +1,45 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class Xscope(Package):
+ """XSCOPE -- a program to monitor X11/Client conversations."""
+
+ homepage = "http://cgit.freedesktop.org/xorg/app/xscope"
+ url = "https://www.x.org/archive/individual/app/xscope-1.4.1.tar.gz"
+
+ version('1.4.1', 'c476fb73b354f4a5c388f3814052ce0d')
+
+ depends_on('xproto@7.0.17:', type='build')
+ depends_on('xtrans', type='build')
+ depends_on('pkg-config@0.9.0:', type='build')
+ depends_on('util-macros', type='build')
+
+ def install(self, spec, prefix):
+ configure('--prefix={0}'.format(prefix))
+
+ make()
+ make('install')
diff --git a/var/spack/repos/builtin/packages/xsdktrilinos/package.py b/var/spack/repos/builtin/packages/xsdktrilinos/package.py
new file mode 100644
index 0000000000..ea49054435
--- /dev/null
+++ b/var/spack/repos/builtin/packages/xsdktrilinos/package.py
@@ -0,0 +1,98 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+import os
+
+
+class Xsdktrilinos(CMakePackage):
+ """xSDKTrilinos contains the portions of Trilinos that depend on PETSc
+ because they would cause a circular dependency if built as part of
+ Trilinos.
+ """
+ homepage = "https://trilinos.org/"
+ base_url = "https://github.com/trilinos/xSDKTrilinos/archive"
+
+ version('develop', git='https://github.com/trilinos/xSDKTrilinos.git', tag='master')
+ version('12.8.1', '9cc338ded17d1e10ea6c0dc18b22dcd4')
+ version('12.6.4', '44c4c54ccbac73bb8939f68797b9454a')
+
+ def url_for_version(self, version):
+ return '%s/trilinos-release-%s.tar.gz' % \
+ (Xsdktrilinos.base_url, version.dashed)
+
+ variant('hypre', default=True,
+ description='Compile with Hypre preconditioner')
+ variant('petsc', default=True,
+ description='Compile with PETSc solvers')
+ variant('shared', default=True,
+ description='Enables the build of shared libraries')
+ variant('debug', default=False,
+ description='Builds a debug version of the libraries')
+
+ # MPI related dependencies
+ depends_on('mpi')
+ depends_on('hypre~internal-superlu', when='+hypre')
+ depends_on('petsc+mpi~complex', when='+petsc')
+ depends_on('trilinos@12.6.4', when='@12.6.4')
+ depends_on('trilinos@12.8.1', when='@12.8.1')
+ depends_on('trilinos@develop', when='@develop')
+
+ def cmake_args(self):
+ spec = self.spec
+
+ options = []
+
+ mpi_bin = spec['mpi'].prefix.bin
+ options.extend([
+ '-DxSDKTrilinos_VERBOSE_CONFIGURE:BOOL=OFF',
+ '-DxSDKTrilinos_ENABLE_TESTS:BOOL=ON',
+ '-DxSDKTrilinos_ENABLE_EXAMPLES:BOOL=ON',
+ '-DTrilinos_INSTALL_DIR=%s' % spec['trilinos'].prefix,
+ '-DCMAKE_BUILD_TYPE:STRING=%s' % (
+ 'DEBUG' if '+debug' in spec else 'RELEASE'),
+ '-DBUILD_SHARED_LIBS:BOOL=%s' % (
+ 'ON' if '+shared' in spec else 'OFF'),
+ '-DTPL_ENABLE_MPI:BOOL=ON',
+ '-DMPI_BASE_DIR:PATH=%s' % spec['mpi'].prefix,
+ '-DxSDKTrilinos_ENABLE_CXX11:BOOL=ON',
+ '-DTPL_ENABLE_HYPRE:BOOL=%s' % (
+ 'ON' if '+hypre' in spec else 'OFF'),
+ '-DTPL_ENABLE_PETSC:BOOL=%s' % (
+ 'ON' if '+petsc' in spec else 'OFF'),
+ '-DCMAKE_INSTALL_NAME_DIR:PATH=%s/lib' % self.prefix
+ ])
+
+ # Fortran lib
+ if spec.satisfies('%gcc') or spec.satisfies('%clang'):
+ libgfortran = os.path.dirname(os.popen(
+ '%s --print-file-name libgfortran.a' %
+ join_path(mpi_bin, 'mpif90')).read())
+ options.extend([
+ '-DxSDKTrilinos_EXTRA_LINK_FLAGS:STRING=-L%s/ -lgfortran' % (
+ libgfortran),
+ '-DxSDKTrilinos_ENABLE_Fortran=ON'
+ ])
+
+ return options
diff --git a/var/spack/repos/builtin/packages/xset/package.py b/var/spack/repos/builtin/packages/xset/package.py
new file mode 100644
index 0000000000..462bea8cfe
--- /dev/null
+++ b/var/spack/repos/builtin/packages/xset/package.py
@@ -0,0 +1,47 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class Xset(Package):
+ """User preference utility for X."""
+
+ homepage = "http://cgit.freedesktop.org/xorg/app/xset"
+ url = "https://www.x.org/archive/individual/app/xset-1.2.3.tar.gz"
+
+ version('1.2.3', '1a76965ed0e8cb51d3fa04d458cb3d8f')
+
+ depends_on('libxmu')
+ depends_on('libx11')
+
+ depends_on('xproto@7.0.17:', type='build')
+ depends_on('pkg-config@0.9.0:', type='build')
+ depends_on('util-macros', type='build')
+
+ def install(self, spec, prefix):
+ configure('--prefix={0}'.format(prefix))
+
+ make()
+ make('install')
diff --git a/var/spack/repos/builtin/packages/xsetmode/package.py b/var/spack/repos/builtin/packages/xsetmode/package.py
new file mode 100644
index 0000000000..f05fd0f123
--- /dev/null
+++ b/var/spack/repos/builtin/packages/xsetmode/package.py
@@ -0,0 +1,46 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class Xsetmode(Package):
+ """Set the mode for an X Input device."""
+
+ homepage = "http://cgit.freedesktop.org/xorg/app/xsetmode"
+ url = "https://www.x.org/archive/individual/app/xsetmode-1.0.0.tar.gz"
+
+ version('1.0.0', '0dc2a917138d0345c00e016ac720e085')
+
+ depends_on('libxi')
+ depends_on('libx11')
+
+ depends_on('pkg-config@0.9.0:', type='build')
+ depends_on('util-macros', type='build')
+
+ def install(self, spec, prefix):
+ configure('--prefix={0}'.format(prefix))
+
+ make()
+ make('install')
diff --git a/var/spack/repos/builtin/packages/xsetpointer/package.py b/var/spack/repos/builtin/packages/xsetpointer/package.py
new file mode 100644
index 0000000000..e9bf2fc9fe
--- /dev/null
+++ b/var/spack/repos/builtin/packages/xsetpointer/package.py
@@ -0,0 +1,47 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class Xsetpointer(Package):
+ """Set an X Input device as the main pointer."""
+
+ homepage = "http://cgit.freedesktop.org/xorg/app/xsetpointer"
+ url = "https://www.x.org/archive/individual/app/xsetpointer-1.0.1.tar.gz"
+
+ version('1.0.1', 'bb206b6875f2428c2281e1165b6c7f88')
+
+ depends_on('libxi')
+ depends_on('libx11')
+
+ depends_on('inputproto@1.4:', type='build')
+ depends_on('pkg-config@0.9.0:', type='build')
+ depends_on('util-macros', type='build')
+
+ def install(self, spec, prefix):
+ configure('--prefix={0}'.format(prefix))
+
+ make()
+ make('install')
diff --git a/var/spack/repos/builtin/packages/xsetroot/package.py b/var/spack/repos/builtin/packages/xsetroot/package.py
new file mode 100644
index 0000000000..3e62d41e9b
--- /dev/null
+++ b/var/spack/repos/builtin/packages/xsetroot/package.py
@@ -0,0 +1,49 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class Xsetroot(Package):
+ """xsetroot - root window parameter setting utility for X."""
+
+ homepage = "http://cgit.freedesktop.org/xorg/app/xsetroot"
+ url = "https://www.x.org/archive/individual/app/xsetroot-1.1.1.tar.gz"
+
+ version('1.1.1', '8c794914a2d0456317288c41451dbee3')
+
+ depends_on('libxmu')
+ depends_on('libx11')
+ depends_on('libxcursor')
+
+ depends_on('xbitmaps', type='build')
+ depends_on('xproto@7.0.17:', type='build')
+ depends_on('pkg-config@0.9.0:', type='build')
+ depends_on('util-macros', type='build')
+
+ def install(self, spec, prefix):
+ configure('--prefix={0}'.format(prefix))
+
+ make()
+ make('install')
diff --git a/var/spack/repos/builtin/packages/xsm/package.py b/var/spack/repos/builtin/packages/xsm/package.py
new file mode 100644
index 0000000000..9d9c896365
--- /dev/null
+++ b/var/spack/repos/builtin/packages/xsm/package.py
@@ -0,0 +1,49 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class Xsm(Package):
+ """X Session Manager."""
+
+ homepage = "http://cgit.freedesktop.org/xorg/app/xsm"
+ url = "https://www.x.org/archive/individual/app/xsm-1.0.3.tar.gz"
+
+ version('1.0.3', '60a2e5987d8e49a568599ba8fe59c8db')
+
+ depends_on('libx11')
+ depends_on('libxt@1.1.0:')
+ depends_on('libice')
+ depends_on('libsm')
+ depends_on('libxaw')
+
+ depends_on('pkg-config@0.9.0:', type='build')
+ depends_on('util-macros', type='build')
+
+ def install(self, spec, prefix):
+ configure('--prefix={0}'.format(prefix))
+
+ make()
+ make('install')
diff --git a/var/spack/repos/builtin/packages/xstdcmap/package.py b/var/spack/repos/builtin/packages/xstdcmap/package.py
new file mode 100644
index 0000000000..bb19bdff1a
--- /dev/null
+++ b/var/spack/repos/builtin/packages/xstdcmap/package.py
@@ -0,0 +1,50 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class Xstdcmap(Package):
+ """The xstdcmap utility can be used to selectively define standard colormap
+ properties. It is intended to be run from a user's X startup script to
+ create standard colormap definitions in order to facilitate sharing of
+ scarce colormap resources among clients using PseudoColor visuals."""
+
+ homepage = "http://cgit.freedesktop.org/xorg/app/xstdcmap"
+ url = "https://www.x.org/archive/individual/app/xstdcmap-1.0.3.tar.gz"
+
+ version('1.0.3', '70c1fd18b79c3ea1dae136e2eabe1c82')
+
+ depends_on('libxmu')
+ depends_on('libx11')
+
+ depends_on('xproto@7.0.17:', type='build')
+ depends_on('pkg-config@0.9.0:', type='build')
+ depends_on('util-macros', type='build')
+
+ def install(self, spec, prefix):
+ configure('--prefix={0}'.format(prefix))
+
+ make()
+ make('install')
diff --git a/var/spack/repos/builtin/packages/xtrans/package.py b/var/spack/repos/builtin/packages/xtrans/package.py
new file mode 100644
index 0000000000..ed46059d9d
--- /dev/null
+++ b/var/spack/repos/builtin/packages/xtrans/package.py
@@ -0,0 +1,45 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class Xtrans(Package):
+ """xtrans is a library of code that is shared among various X packages to
+ handle network protocol transport in a modular fashion, allowing a
+ single place to add new transport types. It is used by the X server,
+ libX11, libICE, the X font server, and related components."""
+
+ homepage = "http://cgit.freedesktop.org/xorg/lib/libxtrans"
+ url = "https://www.x.org/archive//individual/lib/xtrans-1.3.5.tar.gz"
+
+ version('1.3.5', '6e4eac1b7c6591da0753052e1eccfb58')
+
+ depends_on('pkg-config@0.9.0:', type='build')
+ depends_on('util-macros', type='build')
+
+ def install(self, spec, prefix):
+ configure('--prefix={0}'.format(prefix))
+
+ make('install')
diff --git a/var/spack/repos/builtin/packages/xtrap/package.py b/var/spack/repos/builtin/packages/xtrap/package.py
new file mode 100644
index 0000000000..405ec2f848
--- /dev/null
+++ b/var/spack/repos/builtin/packages/xtrap/package.py
@@ -0,0 +1,46 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class Xtrap(Package):
+ """XTrap sample clients."""
+
+ homepage = "http://cgit.freedesktop.org/xorg/app/xtrap"
+ url = "https://www.x.org/archive/individual/app/xtrap-1.0.2.tar.gz"
+
+ version('1.0.2', '601e4945535d2d25eb1bc640332e2363')
+
+ depends_on('libx11')
+ depends_on('libxtrap')
+
+ depends_on('pkg-config@0.9.0:', type='build')
+ depends_on('util-macros', type='build')
+
+ def install(self, spec, prefix):
+ configure('--prefix={0}'.format(prefix))
+
+ make()
+ make('install')
diff --git a/var/spack/repos/builtin/packages/xts/package.py b/var/spack/repos/builtin/packages/xts/package.py
new file mode 100644
index 0000000000..c3993cf391
--- /dev/null
+++ b/var/spack/repos/builtin/packages/xts/package.py
@@ -0,0 +1,60 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class Xts(Package):
+ """This is a revamped version of X Test Suite (XTS) which removes some of
+ the ugliness of building and running the tests."""
+
+ homepage = "https://www.x.org/wiki/XorgTesting/"
+ url = "https://www.x.org/archive/individual/test/xts-0.99.1.tar.gz"
+
+ version('0.99.1', '1e5443fede389be606f3745a71483bac')
+
+ depends_on('libx11')
+ depends_on('libxext')
+ depends_on('libxi')
+ depends_on('libxtst')
+ depends_on('libxau')
+ depends_on('libxt')
+ depends_on('libxmu')
+ depends_on('libxaw')
+
+ depends_on('xtrans', type='build')
+ depends_on('bdftopcf', type='build')
+ depends_on('mkfontdir', type='build')
+ depends_on('perl', type='build')
+ depends_on('xset', type='build')
+ depends_on('xdpyinfo', type='build')
+
+ def install(self, spec, prefix):
+ configure('--prefix={0}'.format(prefix))
+
+ # FIXME: Crashes during compilation
+ # error: redeclaration of enumerator 'XawChainTop'
+
+ make()
+ make('install')
diff --git a/var/spack/repos/builtin/packages/xvidtune/package.py b/var/spack/repos/builtin/packages/xvidtune/package.py
new file mode 100644
index 0000000000..ac5352df5f
--- /dev/null
+++ b/var/spack/repos/builtin/packages/xvidtune/package.py
@@ -0,0 +1,50 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class Xvidtune(Package):
+ """xvidtune is a client interface to the X server video mode
+ extension (XFree86-VidModeExtension)."""
+
+ homepage = "http://cgit.freedesktop.org/xorg/app/xvidtune"
+ url = "https://www.x.org/archive/individual/app/xvidtune-1.0.3.tar.gz"
+
+ version('1.0.3', 'e0c31d78741ae4aab2f4bfcc2abd4a3d')
+
+ depends_on('libxxf86vm')
+ depends_on('libxt')
+ depends_on('libxaw')
+ depends_on('libxmu')
+ depends_on('libx11')
+
+ depends_on('pkg-config@0.9.0:', type='build')
+ depends_on('util-macros', type='build')
+
+ def install(self, spec, prefix):
+ configure('--prefix={0}'.format(prefix))
+
+ make()
+ make('install')
diff --git a/var/spack/repos/builtin/packages/xvinfo/package.py b/var/spack/repos/builtin/packages/xvinfo/package.py
new file mode 100644
index 0000000000..359f1f23de
--- /dev/null
+++ b/var/spack/repos/builtin/packages/xvinfo/package.py
@@ -0,0 +1,48 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class Xvinfo(Package):
+ """xvinfo prints out the capabilities of any video adaptors associated
+ with the display that are accessible through the X-Video extension."""
+
+ homepage = "http://cgit.freedesktop.org/xorg/app/xvinfo"
+ url = "https://www.x.org/archive/individual/app/xvinfo-1.1.3.tar.gz"
+
+ version('1.1.3', '6890a19226c07344ae12e7a2ef12f2c6')
+
+ depends_on('libxv')
+ depends_on('libx11')
+
+ depends_on('xproto@7.0.25:', type='build')
+ depends_on('pkg-config@0.9.0:', type='build')
+ depends_on('util-macros', type='build')
+
+ def install(self, spec, prefix):
+ configure('--prefix={0}'.format(prefix))
+
+ make()
+ make('install')
diff --git a/var/spack/repos/builtin/packages/xwd/package.py b/var/spack/repos/builtin/packages/xwd/package.py
new file mode 100644
index 0000000000..d1f9ee1dfb
--- /dev/null
+++ b/var/spack/repos/builtin/packages/xwd/package.py
@@ -0,0 +1,47 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class Xwd(Package):
+ """xwd - dump an image of an X window."""
+
+ homepage = "http://cgit.freedesktop.org/xorg/app/xwd"
+ url = "https://www.x.org/archive/individual/app/xwd-1.0.6.tar.gz"
+
+ version('1.0.6', 'd6c132f5f00188ce2a1393f12bd34ad4')
+
+ depends_on('libx11')
+ depends_on('libxkbfile')
+
+ depends_on('xproto@7.0.17:', type='build')
+ depends_on('pkg-config@0.9.0:', type='build')
+ depends_on('util-macros', type='build')
+
+ def install(self, spec, prefix):
+ configure('--prefix={0}'.format(prefix))
+
+ make()
+ make('install')
diff --git a/var/spack/repos/builtin/packages/xwininfo/package.py b/var/spack/repos/builtin/packages/xwininfo/package.py
new file mode 100644
index 0000000000..bba97ca671
--- /dev/null
+++ b/var/spack/repos/builtin/packages/xwininfo/package.py
@@ -0,0 +1,48 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class Xwininfo(Package):
+ """xwininfo prints information about windows on an X server. Various
+ information is displayed depending on which options are selected."""
+
+ homepage = "http://cgit.freedesktop.org/xorg/app/xwininfo"
+ url = "https://www.x.org/archive/individual/app/xwininfo-1.1.3.tar.gz"
+
+ version('1.1.3', 'd26623fe240659a320367bc453f1d301')
+
+ depends_on('libxcb@1.6:')
+ depends_on('libx11')
+
+ depends_on('xproto@7.0.17:', type='build')
+ depends_on('pkg-config@0.9.0:', type='build')
+ depends_on('util-macros', type='build')
+
+ def install(self, spec, prefix):
+ configure('--prefix={0}'.format(prefix))
+
+ make()
+ make('install')
diff --git a/var/spack/repos/builtin/packages/xwud/package.py b/var/spack/repos/builtin/packages/xwud/package.py
new file mode 100644
index 0000000000..a30d55b7c6
--- /dev/null
+++ b/var/spack/repos/builtin/packages/xwud/package.py
@@ -0,0 +1,47 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class Xwud(Package):
+ """xwud allows X users to display in a window an image saved in a
+ specially formatted dump file, such as produced by xwd."""
+
+ homepage = "http://cgit.freedesktop.org/xorg/app/xwud"
+ url = "https://www.x.org/archive/individual/app/xwud-1.0.4.tar.gz"
+
+ version('1.0.4', 'bb44485a37496f0121e5843bcf5bb01b')
+
+ depends_on('libx11')
+
+ depends_on('xproto@7.0.17:', type='build')
+ depends_on('pkg-config@0.9.0:', type='build')
+ depends_on('util-macros', type='build')
+
+ def install(self, spec, prefix):
+ configure('--prefix={0}'.format(prefix))
+
+ make()
+ make('install')
diff --git a/var/spack/repos/builtin/packages/xz/package.py b/var/spack/repos/builtin/packages/xz/package.py
index b3ef7808aa..8b0609f50e 100644
--- a/var/spack/repos/builtin/packages/xz/package.py
+++ b/var/spack/repos/builtin/packages/xz/package.py
@@ -24,6 +24,7 @@
##############################################################################
from spack import *
+
class Xz(Package):
"""XZ Utils is free general-purpose data compression software with
high compression ratio. XZ Utils were written for POSIX-like
@@ -36,7 +37,9 @@ class Xz(Package):
version('5.2.2', 'f90c9a0c8b259aee2234c4e0d7fd70af')
def install(self, spec, prefix):
- configure("--prefix=%s" % prefix)
- make()
- make("install")
+ configure('--prefix={0}'.format(prefix))
+ make()
+ if self.run_tests:
+ make('check')
+ make('install')
diff --git a/var/spack/repos/builtin/packages/yaml-cpp/package.py b/var/spack/repos/builtin/packages/yaml-cpp/package.py
new file mode 100644
index 0000000000..0ced7c89fc
--- /dev/null
+++ b/var/spack/repos/builtin/packages/yaml-cpp/package.py
@@ -0,0 +1,50 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+
+
+class YamlCpp(CMakePackage):
+ """A YAML parser and emitter in C++"""
+
+ homepage = "https://github.com/jbeder/yaml-cpp"
+ url = "https://github.com/jbeder/yaml-cpp/archive/yaml-cpp-0.5.3.tar.gz"
+
+ version('0.5.3', '4e47733d98266e46a1a73ae0a72954eb')
+
+ variant('fpic', default=False,
+ description='Build with position independent code')
+
+ depends_on('boost', when='@:0.5.3')
+
+ def cmake_args(self):
+ spec = self.spec
+ options = []
+
+ if '+fpic' in spec:
+ options.extend([
+ '-DCMAKE_POSITION_INDEPENDENT_CODE:BOOL=true'
+ ])
+
+ return options
diff --git a/var/spack/repos/builtin/packages/yasm/package.py b/var/spack/repos/builtin/packages/yasm/package.py
index e05160c8ea..f14bdbcee7 100644
--- a/var/spack/repos/builtin/packages/yasm/package.py
+++ b/var/spack/repos/builtin/packages/yasm/package.py
@@ -24,6 +24,7 @@
##############################################################################
from spack import *
+
class Yasm(Package):
"""Yasm is a complete rewrite of the NASM-2.11.06 assembler. It
supports the x86 and AMD64 instruction sets, accepts NASM and
diff --git a/var/spack/repos/builtin/packages/yorick/package.py b/var/spack/repos/builtin/packages/yorick/package.py
new file mode 100644
index 0000000000..52a4d8787d
--- /dev/null
+++ b/var/spack/repos/builtin/packages/yorick/package.py
@@ -0,0 +1,81 @@
+##############################################################################
+# Copyright (c) 2013-2016, Lawrence Livermore National Security, LLC.
+# Produced at the Lawrence Livermore National Laboratory.
+#
+# This file is part of Spack.
+# Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved.
+# LLNL-CODE-647188
+#
+# For details, see https://github.com/llnl/spack
+# Please also see the LICENSE file for our notice and the LGPL.
+#
+# This program is free software; you can redistribute it and/or modify
+# it under the terms of the GNU Lesser General Public License (as
+# published by the Free Software Foundation) version 2.1, February 1999.
+#
+# This program is distributed in the hope that it will be useful, but
+# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
+# conditions of the GNU Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this program; if not, write to the Free Software
+# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
+##############################################################################
+from spack import *
+import os
+import shutil
+import glob
+
+
+class Yorick(Package):
+ """Yorick is an interpreted programming language for scientific simulations
+ or calculations, postprocessing or steering large simulation codes,
+ interactive scientific graphics, and reading, writing, or translating
+ files of numbers. Yorick includes an interactive graphics package, and a
+ binary file package capable of translating to and from the raw numeric
+ formats of all modern computers. Yorick is written in ANSI C and runs on
+ most operating systems (\*nix systems, MacOS X, Windows).
+ """
+
+ homepage = "http://dhmunro.github.io/yorick-doc/"
+ url = "https://github.com/dhmunro/yorick/archive/y_2_2_04.tar.gz"
+
+ version('2.2.04', md5='1b5b0da6ad81b2d9dba64d991ec17939')
+ version('master', branch='master',
+ git='https://github.com/dhmunro/yorick.git')
+ version('f90-plugin', branch='f90-plugin',
+ git='https://github.com/trmwzm/yorick.git')
+
+ variant('X', default=False, description='Enable X11 support')
+
+ depends_on('libx11', when='+X')
+
+ def install(self, spec, prefix):
+ os.environ['FORTRAN_LINKAGE'] = '-Df_linkage'
+
+ make("config")
+
+ filter_file(r'^CC.+',
+ 'CC={0}'.format(self.compiler.cc),
+ 'Make.cfg')
+ filter_file(r'^FC.+',
+ 'FC={0}'.format(self.compiler.fc),
+ 'Make.cfg')
+ filter_file(r'^COPT_DEFAULT.+',
+ 'COPT_DEFAULT=-O3',
+ 'Make.cfg')
+
+ make()
+ make("install")
+
+ try:
+ os.makedirs(prefix)
+ except OSError:
+ pass
+ os.chdir("relocate")
+ for f in glob.glob('*'):
+ if os.path.isdir(f):
+ shutil.copytree(f, os.path.join(prefix, f))
+ else:
+ shutil.copy2(f, os.path.join(prefix, f))
diff --git a/var/spack/repos/builtin/packages/zeromq/package.py b/var/spack/repos/builtin/packages/zeromq/package.py
index 9bdd5861e0..cafd3c2125 100644
--- a/var/spack/repos/builtin/packages/zeromq/package.py
+++ b/var/spack/repos/builtin/packages/zeromq/package.py
@@ -24,11 +24,13 @@
##############################################################################
from spack import *
+
class Zeromq(Package):
""" The ZMQ networking/concurrency library and core API """
homepage = "http://zguide.zeromq.org/"
url = "http://download.zeromq.org/zeromq-4.1.2.tar.gz"
+ version('4.1.4', 'a611ecc93fffeb6d058c0e6edf4ad4fb')
version('4.1.2', '159c0c56a895472f02668e692d122685')
version('4.1.1', '0a4b44aa085644f25c177f79dc13f253')
version('4.0.7', '9b46f7e7b0704b83638ef0d461fd59ab')
@@ -36,9 +38,10 @@ class Zeromq(Package):
version('4.0.5', '73c39f5eb01b9d7eaf74a5d899f1d03d')
depends_on("libsodium")
+ depends_on("libsodium@:1.0.3", when='@:4.1.2')
def install(self, spec, prefix):
- configure("--with-libsodium","--prefix=%s" % prefix)
+ configure("--with-libsodium", "--prefix=%s" % prefix)
make()
make("install")
diff --git a/var/spack/repos/builtin/packages/zfp/package.py b/var/spack/repos/builtin/packages/zfp/package.py
index 878b65118f..a898ab03d3 100644
--- a/var/spack/repos/builtin/packages/zfp/package.py
+++ b/var/spack/repos/builtin/packages/zfp/package.py
@@ -24,12 +24,15 @@
##############################################################################
from spack import *
+
class Zfp(Package):
- """zfp is an open source C library for compressed floating-point arrays that supports
- very high throughput read and write random acces, target error bounds or bit rates.
- Although bit-for-bit lossless compression is not always possible, zfp is usually
- accurate to within machine epsilon in near-lossless mode, and is often orders of
- magnitude more accurate than other lossy compressors.
+ """zfp is an open source C library for compressed floating-point arrays
+ that supports very high throughput read and write random acces,
+ target error bounds or bit rates. Although bit-for-bit lossless
+ compression is not always possible, zfp is usually accurate to
+ within machine epsilon in near-lossless mode, and is often orders
+ of magnitude more accurate than other lossy compressors.
+
"""
homepage = "http://computation.llnl.gov/projects/floating-point-compression"
diff --git a/var/spack/repos/builtin/packages/zlib/package.py b/var/spack/repos/builtin/packages/zlib/package.py
index e1cbdc7e28..ea758e0188 100644
--- a/var/spack/repos/builtin/packages/zlib/package.py
+++ b/var/spack/repos/builtin/packages/zlib/package.py
@@ -24,19 +24,23 @@
##############################################################################
from spack import *
-class Zlib(Package):
- """zlib is designed to be a free, general-purpose, legally unencumbered --
- that is, not covered by any patents -- lossless data-compression library for
- use on virtually any computer hardware and operating system.
- """
+
+class Zlib(AutotoolsPackage):
+ """A free, general-purpose, legally unencumbered lossless
+ data-compression library."""
homepage = "http://zlib.net"
- url = "http://zlib.net/zlib-1.2.8.tar.gz"
+ url = "http://zlib.net/fossils/zlib-1.2.10.tar.gz"
+ version('1.2.10', 'd9794246f853d15ce0fcbf79b9a3cf13')
+ # author had this to say about 1.2.9....
+ # Due to the bug fixes, any installations of 1.2.9 should be immediately
+ # replaced with 1.2.10.
version('1.2.8', '44d667c142d7cda120332623eab69f40')
- def install(self, spec, prefix):
- configure("--prefix=%s" % prefix)
+ variant('pic', default=True,
+ description='Produce position-independent code (for shared libs)')
- make()
- make("install")
+ def setup_environment(self, spack_env, run_env):
+ if '+pic' in self.spec:
+ spack_env.set('CFLAGS', self.compiler.pic_flag)
diff --git a/var/spack/repos/builtin/packages/zoltan/package.py b/var/spack/repos/builtin/packages/zoltan/package.py
index 841ff3f4a2..8d4dd321b0 100644
--- a/var/spack/repos/builtin/packages/zoltan/package.py
+++ b/var/spack/repos/builtin/packages/zoltan/package.py
@@ -22,16 +22,23 @@
# License along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
-import re, os, glob
+
from spack import *
+import re
+import os
+import glob
+
class Zoltan(Package):
- """The Zoltan library is a toolkit of parallel combinatorial algorithms for
- parallel, unstructured, and/or adaptive scientific applications. Zoltan's
- largest component is a suite of dynamic load-balancing and paritioning
- algorithms that increase applications' parallel performance by reducing
- idle time. Zoltan also has graph coloring and graph ordering algorithms,
- which are useful in task schedulers and parallel preconditioners."""
+ """The Zoltan library is a toolkit of parallel combinatorial algorithms
+ for parallel, unstructured, and/or adaptive scientific
+ applications. Zoltan's largest component is a suite of dynamic
+ load-balancing and paritioning algorithms that increase
+ applications' parallel performance by reducing idle time. Zoltan
+ also has graph coloring and graph ordering algorithms, which are
+ useful in task schedulers and parallel preconditioners.
+
+ """
homepage = "http://www.cs.sandia.gov/zoltan"
base_url = "http://www.cs.sandia.gov/~kddevin/Zoltan_Distributions"
@@ -41,18 +48,28 @@ class Zoltan(Package):
version('3.6', '9cce794f7241ecd8dbea36c3d7a880f9')
version('3.3', '5eb8f00bda634b25ceefa0122bd18d65')
- variant('debug', default=False, description='Builds a debug version of the library')
- variant('shared', default=True, description='Builds a shared version of the library')
+ variant('debug', default=False, description='Builds a debug version of the library.')
+ variant('shared', default=True, description='Builds a shared version of the library.')
- variant('fortran', default=True, description='Enable Fortran support')
- variant('mpi', default=False, description='Enable MPI support')
+ variant('fortran', default=True, description='Enable Fortran support.')
+ variant('mpi', default=True, description='Enable MPI support.')
depends_on('mpi', when='+mpi')
+ def url_for_version(self, version):
+ return '%s/zoltan_distrib_v%s.tar.gz' % (Zoltan.base_url, version)
+
def install(self, spec, prefix):
+ # FIXME: The older Zoltan versions fail to compile the F90 MPI wrappers
+ # because of some complicated generic type problem.
+ if spec.satisfies('@:3.6+fortran+mpi'):
+ raise RuntimeError(('Cannot build Zoltan v{0} with +fortran and '
+ '+mpi; please disable one of these features '
+ 'or upgrade versions.').format(self.version))
+
config_args = [
- '--enable-f90interface' if '+fortan' in spec else '--disable-f90interface',
- '--enable-mpi' if '+mpi' in spec else '--disable-mpi',
+ self.get_config_flag('f90interface', 'fortran'),
+ self.get_config_flag('mpi', 'mpi'),
]
config_cflags = [
'-O0' if '+debug' in spec else '-O3',
@@ -60,46 +77,70 @@ class Zoltan(Package):
]
if '+shared' in spec:
- config_args.append('--with-ar=$(CXX) -shared $(LDFLAGS) -o')
config_args.append('RANLIB=echo')
+ config_args.append('--with-ar=$(CXX) -shared $(LDFLAGS) -o')
config_cflags.append('-fPIC')
+ if spec.satisfies('%gcc'):
+ config_args.append('--with-libs={0}'.format('-lgfortran'))
if '+mpi' in spec:
- config_args.append('CC=%s/mpicc' % spec['mpi'].prefix.bin)
- config_args.append('CXX=%s/mpicxx' % spec['mpi'].prefix.bin)
- config_args.append('--with-mpi=%s' % spec['mpi'].prefix)
- config_args.append('--with-mpi-compilers=%s' % spec['mpi'].prefix.bin)
+ config_args.append('CC={0}'.format(spec['mpi'].mpicc))
+ config_args.append('CXX={0}'.format(spec['mpi'].mpicxx))
+ config_args.append('FC={0}'.format(spec['mpi'].mpifc))
+
+ mpi_libs = ' -l'.join(self.get_mpi_libs())
+ config_args.append('--with-mpi={0}'.format(spec['mpi'].prefix))
+ config_args.append('--with-mpi-libs=-l{0}'.format(mpi_libs))
# NOTE: Early versions of Zoltan come packaged with a few embedded
# library packages (e.g. ParMETIS, Scotch), which messes with Spack's
# ability to descend directly into the package's source directory.
+ source_directory = self.stage.source_path
if spec.satisfies('@:3.6'):
- cd('Zoltan_v%s' % self.version)
-
- mkdirp('build')
- cd('build')
-
- config_zoltan = Executable('../configure')
- config_zoltan(
- '--prefix=%s' % pwd(),
- '--with-cflags=%s' % ' '.join(config_cflags),
- '--with-cxxflags=%s' % ' '.join(config_cflags),
- *config_args)
-
- make()
- make('install')
-
- # NOTE: Unfortunately, Zoltan doesn't provide any configuration options for
- # the extension of the output library files, so this script must change these
- # extensions as a post-processing step.
+ zoltan_directory = 'Zoltan_v{0}'.format(self.version)
+ source_directory = join_path(source_directory, zoltan_directory)
+
+ build_directory = join_path(source_directory, 'build')
+ with working_dir(build_directory, create=True):
+ config = Executable(join_path(source_directory, 'configure'))
+ config(
+ '--prefix={0}'.format(prefix),
+ '--with-cflags={0}'.format(' '.join(config_cflags)),
+ '--with-cxxflags={0}'.format(' '.join(config_cflags)),
+ '--with-fcflags={0}'.format(' '.join(config_cflags)),
+ *config_args
+ )
+
+ # NOTE: Earlier versions of Zoltan cannot be built in parallel
+ # because they contain nested Makefile dependency bugs.
+ make(parallel=not spec.satisfies('@:3.6+fortran'))
+ make('install')
+
+ # NOTE: Unfortunately, Zoltan doesn't provide any configuration
+ # options for the extension of the output library files, so this
+ # script must change these extensions as a post-processing step.
if '+shared' in spec:
- for libpath in glob.glob('lib/*.a'):
- libdir, libname = (os.path.dirname(libpath), os.path.basename(libpath))
- move(libpath, os.path.join(libdir, re.sub(r'\.a$', '.so', libname)))
-
- mkdirp(prefix)
- move('include', prefix)
- move('lib', prefix)
-
- def url_for_version(self, version):
- return '%s/zoltan_distrib_v%s.tar.gz' % (Zoltan.base_url, version)
+ for lib_path in glob.glob(join_path(prefix, 'lib', '*.a')):
+ lib_static_name = os.path.basename(lib_path)
+ lib_shared_name = re.sub(r'\.a$', '.{0}'.format(dso_suffix),
+ lib_static_name)
+ move(lib_path, join_path(prefix, 'lib', lib_shared_name))
+
+ def get_config_flag(self, flag_name, flag_variant):
+ flag_pre = 'en' if '+{0}'.format(flag_variant) in self.spec else 'dis'
+ return '--{0}able-{1}'.format(flag_pre, flag_name)
+
+ # NOTE: Zoltan assumes that it's linking against an MPI library that can
+ # be found with '-lmpi,' which isn't the case for many MPI packages. This
+ # function finds the names of the actual libraries for Zoltan's MPI dep.
+ def get_mpi_libs(self):
+ mpi_libs = set()
+
+ for lib_path in glob.glob(join_path(self.spec['mpi'].prefix.lib, '*')):
+ mpi_lib_match = re.match(
+ r'^(lib)((\w*)mpi(\w*))\.((a)|({0}))$'.format(dso_suffix),
+ os.path.basename(lib_path))
+ if mpi_lib_match:
+ mpi_libs.add(mpi_lib_match.group(2))
+
+ return list(mpi_libs)
diff --git a/var/spack/repos/builtin/packages/zsh/package.py b/var/spack/repos/builtin/packages/zsh/package.py
index 2c9ed4c6e7..a70d307be9 100644
--- a/var/spack/repos/builtin/packages/zsh/package.py
+++ b/var/spack/repos/builtin/packages/zsh/package.py
@@ -24,12 +24,13 @@
##############################################################################
from spack import *
+
class Zsh(Package):
+ """Zsh is a shell designed for interactive use, although it is also a
+ powerful scripting language. Many of the useful features of bash, ksh, and
+ tcsh were incorporated into zsh; many original features were added.
"""
- Zsh is a shell designed for interactive use, although it is also a powerful
- scripting language. Many of the useful features of bash, ksh, and tcsh were
- incorporated into zsh; many original features were added.
- """
+
homepage = "http://www.zsh.org"
url = "http://downloads.sourceforge.net/project/zsh/zsh/5.1.1/zsh-5.1.1.tar.gz"